From 8dcc2eabff6a39c47f4fae519cc9de658039312d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Feb 2022 22:24:37 +0100 Subject: [PATCH 001/207] flame: adding batch utils for creating batch in desktop --- openpype/hosts/flame/api/batch_utils.py | 37 +++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 openpype/hosts/flame/api/batch_utils.py diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py new file mode 100644 index 0000000000..3a155c4b8d --- /dev/null +++ b/openpype/hosts/flame/api/batch_utils.py @@ -0,0 +1,37 @@ +import flame + + +def create_batch(name, frame_start, frame_end, **kwargs): + schematicReels = ['LoadedReel1'] + shelfReels = ['ShelfReel1'] + + handle_start = kwargs.get("handleStart") + handle_end = kwargs.get("handleEnd") + + if handle_start: + frame_start -= handle_start + if handle_end: + frame_end += handle_end + + # Create batch group with name, start_frame value, duration value, + # set of schematic reel names, set of shelf reel names + flame.batch.create_batch_group( + name, + start_frame=frame_start, + duration=frame_end, + reels=schematicReels, + shelf_reels=shelfReels + ) + + if kwargs.get("switch_batch_tab"): + # use this command to switch to the batch tab + flame.batch.go_to() + + comp = flame.batch.create_node("Comp") + writeFile = flame.batch.create_node("Write File") + + # connect nodes + flame.batch.connect_nodes(comp, "Result", writeFile, "Front") + + # sort batch nodes + flame.batch.organize() From 5391f1fff3abe36dde6289778ab2823e79616ab7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Feb 2022 22:46:27 +0100 Subject: [PATCH 002/207] flame: adding write node to batch utils --- openpype/hosts/flame/api/batch_utils.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 3a155c4b8d..2c80834928 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -5,6 +5,7 @@ def create_batch(name, frame_start, frame_end, **kwargs): schematicReels = ['LoadedReel1'] shelfReels = ['ShelfReel1'] + write_pref = kwargs["write_pref"] handle_start = kwargs.get("handleStart") handle_end = kwargs.get("handleEnd") @@ -27,11 +28,23 @@ def create_batch(name, frame_start, frame_end, **kwargs): # use this command to switch to the batch tab flame.batch.go_to() - comp = flame.batch.create_node("Comp") - writeFile = flame.batch.create_node("Write File") + comp_node = flame.batch.create_node("Comp") + + # create write node + write_node = flame.batch.create_node('Write File') + write_node.media_path = write_pref["media_path"] + write_node.media_path_pattern = write_pref["media_path_pattern"] + write_node.create_clip = write_pref["create_clip"] + write_node.include_setup = write_pref["include_setup"] + write_node.create_clip_path = write_pref["create_clip_path"] + write_node.include_setup_path = write_pref["include_setup_path"] + write_node.file_type = write_pref["file_type"] + write_node.bit_depth = write_pref["bit_depth"] + write_node.frame_index_mode = write_pref["frame_index_mode"] + write_node.frame_padding = int(write_pref["frame_padding"]) # connect nodes - flame.batch.connect_nodes(comp, "Result", writeFile, "Front") + flame.batch.connect_nodes(comp_node, "Result", write_node, "Front") # sort batch nodes flame.batch.organize() From cdc3d0be792e718b17fb1290f3f05a5ea8c4380f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Feb 2022 12:49:11 +0100 Subject: [PATCH 003/207] flame: batch utils to api --- openpype/hosts/flame/api/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 56bbadd2fc..98a1a23e89 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -70,6 +70,9 @@ from .render_utils import ( export_clip, get_preset_path_by_xml_name ) +from .batch_utils import ( + create_batch +) __all__ = [ # constants @@ -140,5 +143,8 @@ __all__ = [ # render utils "export_clip", - "get_preset_path_by_xml_name" + "get_preset_path_by_xml_name", + + # batch utils + "create_batch" ] From 5d8e3e293f46860f9de52f93736f9646023de501 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Feb 2022 12:49:33 +0100 Subject: [PATCH 004/207] flame: adding docstrigs to create_batch --- openpype/hosts/flame/api/batch_utils.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 2c80834928..a1fe7961c4 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -2,8 +2,15 @@ import flame def create_batch(name, frame_start, frame_end, **kwargs): - schematicReels = ['LoadedReel1'] - shelfReels = ['ShelfReel1'] + """Create Batch Group in active project's Desktop + + Args: + name (str): name of batch group to be created + frame_start (int): start frame of batch + frame_end (int): end frame of batch + """ + schematic_reels = kwargs.get("shematic_reels") or ['LoadedReel1'] + shelf_reels = kwargs.get("shelf_reels") or ['ShelfReel1'] write_pref = kwargs["write_pref"] handle_start = kwargs.get("handleStart") @@ -20,8 +27,8 @@ def create_batch(name, frame_start, frame_end, **kwargs): name, start_frame=frame_start, duration=frame_end, - reels=schematicReels, - shelf_reels=shelfReels + reels=schematic_reels, + shelf_reels=shelf_reels ) if kwargs.get("switch_batch_tab"): From 162df8c0aca975da19778cd05e22771f651458a9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Feb 2022 13:00:14 +0100 Subject: [PATCH 005/207] flame: itegrator wip --- .../flame/plugins/publish/integrate_batch_group.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/integrate_batch_group.py diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py new file mode 100644 index 0000000000..fd88ed318e --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -0,0 +1,14 @@ +import pyblish +import openpype.hosts.flame.api as opfapi + +@pyblish.api.log +class IntegrateBatchGroup(pyblish.api.InstancePlugin): + """Integrate published shot to batch group""" + + order = pyblish.api.IntegratorOrder + 0.45 + label = "Integrate Batch Groups" + hosts = ["flame"] + families = ["clip"] + + def process(self, instance): + opfapi.create_batch \ No newline at end of file From c120135f1fa467587a1b41f0e3c6282a2285b200 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 21:08:35 +0100 Subject: [PATCH 006/207] Removed submodule openpype/modules/default_modules/ftrack/python2_vendor/arrow --- openpype/modules/default_modules/ftrack/python2_vendor/arrow | 1 - 1 file changed, 1 deletion(-) delete mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/arrow diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow b/openpype/modules/default_modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 From d1a733cf885e8955746ef6e71db01b76fe7c96be Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 21:08:43 +0100 Subject: [PATCH 007/207] Removed submodule openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api --- .../default_modules/ftrack/python2_vendor/ftrack-python-api | 1 - 1 file changed, 1 deletion(-) delete mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From fdb9f0da77f1a996be4160f52acdc1036238bc39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 21:08:50 +0100 Subject: [PATCH 008/207] Removed submodule repos/avalon-unreal-integration --- repos/avalon-unreal-integration | 1 - 1 file changed, 1 deletion(-) delete mode 160000 repos/avalon-unreal-integration diff --git a/repos/avalon-unreal-integration b/repos/avalon-unreal-integration deleted file mode 160000 index 43f6ea9439..0000000000 --- a/repos/avalon-unreal-integration +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 43f6ea943980b29c02a170942b566ae11f2b7080 From e7c5aa16df8e46090140572cc77c607e6da4e707 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 18 Mar 2022 17:23:26 +0300 Subject: [PATCH 009/207] Fixes comparing against render filename Fixes comparison against AOV pattern match to produce correct "review" flags of the "beauty" pass, for proper generation of review burnin and thumbnail. --- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 19d504b6c9..2c8bcdf4fc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -448,7 +448,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): preview = False if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: - if re.match(aov_pattern, aov): + # Matching against the AOV pattern in the render files + # In order to match the AOV name, we must compare against the render filename string + # We are grabbing the render filename string from the collection that we have grabbed from expected files (exp_files) + render_file_name = os.path.basename(col[0]) + if re.match(aov_pattern, render_file_name): preview = True break From 509f3289418a297ac17e3a0de93a987390cd8370 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 18 Mar 2022 17:25:34 +0300 Subject: [PATCH 010/207] Fix regex in global OpenPype Deadline settings Fixes the "beauty" regex for the "reviewable subsets filter" in the Publish Deadline settings. --- .../defaults/project_settings/deadline.json | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 5bb0a4022e..1859b480a1 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -15,33 +15,6 @@ "deadline" ] }, - "ProcessSubmittedJobOnFarm": { - "enabled": true, - "deadline_department": "", - "deadline_pool": "", - "deadline_group": "", - "deadline_chunk_size": 1, - "deadline_priority": 50, - "publishing_script": "", - "skip_integration_repre_list": [], - "aov_filter": { - "maya": [ - ".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*" - ], - "nuke": [ - ".*" - ], - "aftereffects": [ - ".*" - ], - "celaction": [ - ".*" - ], - "harmony": [ - ".*" - ] - } - }, "MayaSubmitDeadline": { "enabled": true, "optional": false, @@ -95,6 +68,33 @@ "group": "", "department": "", "multiprocess": true + }, + "ProcessSubmittedJobOnFarm": { + "enabled": true, + "deadline_department": "", + "deadline_pool": "", + "deadline_group": "", + "deadline_chunk_size": 1, + "deadline_priority": 50, + "publishing_script": "", + "skip_integration_repre_list": [], + "aov_filter": { + "maya": [ + ".*(?:[\\._-])*([Bb]eauty)(?:[\\.|_])*.*" + ], + "nuke": [ + ".*" + ], + "aftereffects": [ + ".*" + ], + "celaction": [ + ".*" + ], + "harmony": [ + ".*" + ] + } } } } \ No newline at end of file From b59100025e22cdc211615e2a868e902dbf8ad832 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 21 Mar 2022 14:31:58 +0300 Subject: [PATCH 011/207] Fix hound comment warning line length for comments adjusted --- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 2c8bcdf4fc..2ad1dcd691 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -449,8 +449,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: # Matching against the AOV pattern in the render files - # In order to match the AOV name, we must compare against the render filename string - # We are grabbing the render filename string from the collection that we have grabbed from expected files (exp_files) + # In order to match the AOV name + # we must compare against the render filename string + # We are grabbing the render filename string + # from the collection that we have grabbed from expected files (exp_files) render_file_name = os.path.basename(col[0]) if re.match(aov_pattern, render_file_name): preview = True From 15b1c86e88bd055b815d663047856f019c54f779 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 21 Mar 2022 14:31:58 +0300 Subject: [PATCH 012/207] Fix hound comment warning line length for comments adjusted --- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 2c8bcdf4fc..2ad1dcd691 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -449,8 +449,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: # Matching against the AOV pattern in the render files - # In order to match the AOV name, we must compare against the render filename string - # We are grabbing the render filename string from the collection that we have grabbed from expected files (exp_files) + # In order to match the AOV name + # we must compare against the render filename string + # We are grabbing the render filename string + # from the collection that we have grabbed from expected files (exp_files) render_file_name = os.path.basename(col[0]) if re.match(aov_pattern, render_file_name): preview = True From b517fa0ce068ac9ec9f5d52f1430e31e2a69ea63 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 21 Mar 2022 15:13:40 +0300 Subject: [PATCH 013/207] Fix hound warning, again --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 2ad1dcd691..5dd1be1f54 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -452,7 +452,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # In order to match the AOV name # we must compare against the render filename string # We are grabbing the render filename string - # from the collection that we have grabbed from expected files (exp_files) + # from the collection that we have grabbed from exp_files render_file_name = os.path.basename(col[0]) if re.match(aov_pattern, render_file_name): preview = True From 8efa09c6d2d4fc2558ed6e814a00d76981f78a42 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 21 Mar 2022 15:18:21 +0300 Subject: [PATCH 014/207] Simplify regex for "beauty" pass Simplifies the "regex" used to sift for the reviewable pass for thumbnail and burnin --- openpype/settings/defaults/project_settings/deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 1859b480a1..b2104a04eb 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -80,7 +80,7 @@ "skip_integration_repre_list": [], "aov_filter": { "maya": [ - ".*(?:[\\._-])*([Bb]eauty)(?:[\\.|_])*.*" + ".*([Bb]eauty).*" ], "nuke": [ ".*" From 80e08d43ce3617d57280a04ae426c0d42a595709 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 21 Mar 2022 15:53:39 +0300 Subject: [PATCH 015/207] Test if col[0] is remainder or list for file_name --- .../modules/deadline/plugins/publish/submit_publish_job.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 5dd1be1f54..b1f6f9a485 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -453,7 +453,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # we must compare against the render filename string # We are grabbing the render filename string # from the collection that we have grabbed from exp_files - render_file_name = os.path.basename(col[0]) + if isinstance(col, list): + render_file_name = os.path.basename(col[0]) + else: + render_file_name = os.path.basename(col) if re.match(aov_pattern, render_file_name): preview = True break From db03b47b8ee5615bb19ee90f1dc52f8f05cf379b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Mar 2022 15:14:38 +0100 Subject: [PATCH 016/207] hound fix --- openpype/hosts/flame/api/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 7c03186ff0..97f83ccf07 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -146,7 +146,6 @@ __all__ = [ "export_clip", "get_preset_path_by_xml_name", "modify_preset_file", - # batch utils "create_batch" From aab2ed17f8d582056f5835613845d13ea7205b24 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Mar 2022 15:15:04 +0100 Subject: [PATCH 017/207] flame: ingegrate batch wip --- .../plugins/publish/integrate_batch_group.py | 66 ++++++++++++++++++- 1 file changed, 65 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index fd88ed318e..aaa405343c 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -1,6 +1,7 @@ import pyblish import openpype.hosts.flame.api as opfapi + @pyblish.api.log class IntegrateBatchGroup(pyblish.api.InstancePlugin): """Integrate published shot to batch group""" @@ -11,4 +12,67 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): families = ["clip"] def process(self, instance): - opfapi.create_batch \ No newline at end of file + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + + asset_name = instance.data["asset"] + write_pref_data = self._get_write_prefs(instance) + + batch_data = { + "shematic_reels": [ + "OP_LoadedReel" + ], + "write_pref": write_pref_data, + "handleStart": handle_start, + "handleEnd": handle_end + } + + opfapi.create_batch(asset_name, frame_start, frame_end, batch_data) + + def _get_write_prefs(self, instance): + # The path attribute where the rendered clip is exported + # /path/to/file.[0001-0010].exr + media_path = "{render_path}".format() + # name of file represented by tokens + media_path_pattern = "_v." + # The Create Open Clip attribute of the Write File node. \ + # Determines if an Open Clip is created by the Write File node. + create_clip = True + # The Include Setup attribute of the Write File node. + # Determines if a Batch Setup file is created by the Write File node. + include_setup = True + # The path attribute where the Open Clip file is exported by + # the Write File node. + create_clip_path = "" + include_setup_path = None + # The file type for the files written by the Write File node. + # Setting this attribute also overwrites format_extension, + # bit_depth and compress_mode to match the defaults for + # this file type. + file_type = "OpenEXR" + # The bit depth for the files written by the Write File node. + # This attribute resets to match file_type whenever file_type is set. + bit_depth = "16" + frame_index_mode = None + frame_padding = 0 + # The versioning mode of the Open Clip exported by the Write File node. + # Only available if create_clip = True. + version_mode = "Follow Iteration" + version_name = "v" + + return { + "media_path": media_path, + "media_path_pattern": media_path_pattern, + "create_clip": create_clip, + "include_setup": include_setup, + "create_clip_path": create_clip_path, + "include_setup_path": include_setup_path, + "file_type": file_type, + "bit_depth": bit_depth, + "frame_index_mode": frame_index_mode, + "frame_padding": frame_padding, + "version_mode": version_mode, + "version_name": version_name + } From 12aeb88e0a2cd7ccfa6dae9dd7f20d83bf4577b5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Mar 2022 17:13:11 +0100 Subject: [PATCH 018/207] flame: integrate batch [wip] --- .../plugins/publish/integrate_batch_group.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index aaa405343c..780531287b 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -1,3 +1,4 @@ +import os import pyblish import openpype.hosts.flame.api as opfapi @@ -32,9 +33,12 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): opfapi.create_batch(asset_name, frame_start, frame_end, batch_data) def _get_write_prefs(self, instance): + shot_path = instance.data[""] + render_dir_path = os.path.join( + shot_path, "work", task, "render", "flame") # The path attribute where the rendered clip is exported # /path/to/file.[0001-0010].exr - media_path = "{render_path}".format() + media_path = render_dir_path # name of file represented by tokens media_path_pattern = "_v." # The Create Open Clip attribute of the Write File node. \ @@ -46,17 +50,33 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # The path attribute where the Open Clip file is exported by # the Write File node. create_clip_path = "" - include_setup_path = None + # The path attribute where the Batch setup file + # is exported by the Write File node. + include_setup_path = "./_v" # The file type for the files written by the Write File node. # Setting this attribute also overwrites format_extension, # bit_depth and compress_mode to match the defaults for # this file type. file_type = "OpenEXR" + # The file extension for the files written by the Write File node. + # This attribute resets to match file_type whenever file_type + # is set. If you require a specific extension, you must + # set format_extension after setting file_type. + format_extension = "exr" # The bit depth for the files written by the Write File node. # This attribute resets to match file_type whenever file_type is set. bit_depth = "16" - frame_index_mode = None - frame_padding = 0 + # The compressing attribute for the files exported by the Write + # File node. Only relevant when file_type in 'OpenEXR', 'Sgi', 'Tiff' + compress = True + # The compression format attribute for the specific File Types + # export by the Write File node. You must set compress_mode + # after setting file_type. + compress_mode = "DWAB" + # The frame index mode attribute of the Write File node. + # Value range: `Use Timecode` or `Use Start Frame` + frame_index_mode = "Use Start Frame" + frame_padding = 6 # The versioning mode of the Open Clip exported by the Write File node. # Only available if create_clip = True. version_mode = "Follow Iteration" @@ -70,7 +90,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): "create_clip_path": create_clip_path, "include_setup_path": include_setup_path, "file_type": file_type, + "format_extension": format_extension, "bit_depth": bit_depth, + "compress": compress, + "compress_mode": compress_mode, "frame_index_mode": frame_index_mode, "frame_padding": frame_padding, "version_mode": version_mode, From 7dd0c86a17f0a95ac6e16416473978169cf793a7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Mar 2022 17:11:48 +0100 Subject: [PATCH 019/207] flame: collect timeline instances settings --- .../defaults/project_settings/flame.json | 31 +++++++ .../projects_schema/schema_project_flame.json | 81 +++++++++++++++++++ 2 files changed, 112 insertions(+) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index c7188b10b5..939752c778 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -20,6 +20,37 @@ } }, "publish": { + "CollectTimelineInstances": { + "xml_preset_attrs_from_comments": [ + { + "name": "width", + "type": "number" + }, + { + "name": "height", + "type": "number" + }, + { + "name": "pixelRatio", + "type": "float" + }, + { + "name": "resizeType", + "type": "string" + }, + { + "name": "resizeFilter", + "type": "string" + } + ], + "add_tasks": [ + { + "name": "compositing", + "type": "Compositing", + "create_batch_group": true + } + ] + }, "ExtractSubsetResources": { "keep_original_representation": false, "export_presets_mapping": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index e352f8b132..8057b07d9c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -136,6 +136,87 @@ "key": "publish", "label": "Publish plugins", "children": [ + { + "type": "dict", + "collapsible": true, + "key": "CollectTimelineInstances", + "label": "Collect Timeline Instances", + "is_group": true, + "children": [ + { + "type": "collapsible-wrap", + "label": "XML presets attributes parsable from segment comments", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "list", + "key": "xml_preset_attrs_from_comments", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Attribute name" + }, + { + "key": "type", + "label": "Attribute type", + "type": "enum", + "default": "number", + "enum_items": [ + { + "number": "number" + }, + { + "float": "float" + }, + { + "string": "string" + } + ] + } + ] + } + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Add tasks", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "list", + "key": "add_tasks", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Task name" + }, + { + "key": "type", + "label": "Task type", + "multiselection": false, + "type": "task-types-enum" + }, + { + "type": "boolean", + "key": "create_batch_group", + "label": "Create batch group" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, From 074703f8cff5b4432a98d1fb28f9a6f42943c694 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Mar 2022 17:13:09 +0100 Subject: [PATCH 020/207] flame: use settings in collect timeline instances --- .../publish/collect_timeline_instances.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 70340ad7a2..94348601b2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -21,15 +21,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] - # TODO: add to settings # settings - xml_preset_attrs_from_comments = { - "width": "number", - "height": "number", - "pixelRatio": "float", - "resizeType": "string", - "resizeFilter": "string" - } + xml_preset_attrs_from_comments = [] + add_tasks = [] def process(self, context): project = context.data["flameProject"] @@ -106,7 +100,11 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "fps": self.fps, "flameSourceClip": source_clip, "sourceFirstFrame": int(first_frame), - "path": file_path + "path": file_path, + "flameAddTasks": self.add_tasks, + "tasks": { + task["name"]: {"type": task["type"]} + for task in self.add_tasks} }) # get otio clip data @@ -181,14 +179,17 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # split to key and value key, value = split.split(":") - for a_name, a_type in self.xml_preset_attrs_from_comments.items(): + for attr_data in self.xml_preset_attrs_from_comments: + a_name = attr_data["name"] + a_type = attr_data["type"] + # exclude all not related attributes if a_name.lower() not in key.lower(): continue # get pattern defined by type pattern = TXT_PATERN - if a_type in ("number" , "float"): + if a_type in ("number", "float"): pattern = NUM_PATERN res_goup = pattern.findall(value) From 029135acf5513756ffc3e01fafa66e8ddd565d32 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Mar 2022 20:38:46 +0100 Subject: [PATCH 021/207] flame: integrator bath group [finishing] --- openpype/hosts/flame/api/batch_utils.py | 12 +++- .../plugins/publish/integrate_batch_group.py | 65 +++++++++++++++---- 2 files changed, 61 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index a1fe7961c4..d309c5985d 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -37,8 +37,12 @@ def create_batch(name, frame_start, frame_end, **kwargs): comp_node = flame.batch.create_node("Comp") + # TODO: convert this to iterational processing, + # so it could be driven from `imageio` settigns # create write node write_node = flame.batch.create_node('Write File') + # assign attrs + write_node.name = write_pref["name"] write_node.media_path = write_pref["media_path"] write_node.media_path_pattern = write_pref["media_path_pattern"] write_node.create_clip = write_pref["create_clip"] @@ -46,11 +50,15 @@ def create_batch(name, frame_start, frame_end, **kwargs): write_node.create_clip_path = write_pref["create_clip_path"] write_node.include_setup_path = write_pref["include_setup_path"] write_node.file_type = write_pref["file_type"] + write_node.format_extension = write_pref["format_extension"] write_node.bit_depth = write_pref["bit_depth"] + write_node.compress = write_pref["compress"] + write_node.compress_mode = write_pref["compress_mode"] write_node.frame_index_mode = write_pref["frame_index_mode"] - write_node.frame_padding = int(write_pref["frame_padding"]) + write_node.frame_padding = write_pref["frame_padding"] + write_node.version_mode = write_pref["version_mode"] + write_node.version_name = write_pref["version_name"] - # connect nodes flame.batch.connect_nodes(comp_node, "Result", write_node, "Front") # sort batch nodes diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 780531287b..808c059816 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -1,5 +1,7 @@ import os +from pprint import pformat import pyblish +from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi @@ -17,25 +19,52 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): frame_end = instance.data["frameEnd"] handle_start = instance.data["handleStart"] handle_end = instance.data["handleEnd"] - asset_name = instance.data["asset"] - write_pref_data = self._get_write_prefs(instance) + add_tasks = instance.data["flameAddTasks"] - batch_data = { - "shematic_reels": [ - "OP_LoadedReel" - ], - "write_pref": write_pref_data, - "handleStart": handle_start, - "handleEnd": handle_end - } + # iterate all tasks from settings + for task_data in add_tasks: + # exclude batch group + if not task_data["create_batch_group"]: + continue + task_name = task_data["name"] + batchgroup_name = "{}_{}".format(asset_name, task_name) + write_pref_data = self._get_write_prefs(instance, task_data) - opfapi.create_batch(asset_name, frame_start, frame_end, batch_data) + batch_data = { + "shematic_reels": [ + "OP_LoadedReel" + ], + "write_pref": write_pref_data, + "handleStart": handle_start, + "handleEnd": handle_end + } + self.log.debug( + "__ batch_data: {}".format(pformat(batch_data))) - def _get_write_prefs(self, instance): - shot_path = instance.data[""] + # create batch with utils + opfapi.create_batch( + batchgroup_name, + frame_start, + frame_end, + batch_data + ) + + def _get_write_prefs(self, instance, task_data): + anatomy_data = instance.data["anatomyData"] + + task_workfile_path = self._get_shot_task_dir_path(instance, task_data) + self.log.debug("__ task_workfile_path: {}".format(task_workfile_path)) + + # TODO: this might be done with template in settings render_dir_path = os.path.join( - shot_path, "work", task, "render", "flame") + task_workfile_path, "render", "flame") + + # TODO: add most of these to `imageio/flame/batch/write_node` + name = "{project[code]}_{asset}_{task[name]}".format( + **anatomy_data + ) + # The path attribute where the rendered clip is exported # /path/to/file.[0001-0010].exr media_path = render_dir_path @@ -83,6 +112,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): version_name = "v" return { + "name": name, "media_path": media_path, "media_path_pattern": media_path_pattern, "create_clip": create_clip, @@ -99,3 +129,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): "version_mode": version_mode, "version_name": version_name } + + def _get_shot_task_dir_path(self, instance, task_data): + project_doc = instance.data["projectEntity"] + asset_entity = instance.data["assetEntity"] + + return get_workdir( + project_doc, asset_entity, task_data["name"], "flame") From 9fb6d7a7230149b9d1c7b273eb8fcc532a84a3fb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 12:04:29 +0100 Subject: [PATCH 022/207] flame: batchgroup uses duration --- openpype/hosts/flame/api/batch_utils.py | 14 ++++++-------- .../flame/plugins/publish/integrate_batch_group.py | 5 +++-- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index d309c5985d..a47d62a10e 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -1,7 +1,7 @@ import flame -def create_batch(name, frame_start, frame_end, **kwargs): +def create_batch(name, frame_start, frame_duration, **kwargs): """Create Batch Group in active project's Desktop Args: @@ -13,20 +13,18 @@ def create_batch(name, frame_start, frame_end, **kwargs): shelf_reels = kwargs.get("shelf_reels") or ['ShelfReel1'] write_pref = kwargs["write_pref"] - handle_start = kwargs.get("handleStart") - handle_end = kwargs.get("handleEnd") + handle_start = kwargs.get("handleStart") or 0 + handle_end = kwargs.get("handleEnd") or 0 - if handle_start: - frame_start -= handle_start - if handle_end: - frame_end += handle_end + frame_start -= handle_start + frame_duration += handle_start + handle_end # Create batch group with name, start_frame value, duration value, # set of schematic reel names, set of shelf reel names flame.batch.create_batch_group( name, start_frame=frame_start, - duration=frame_end, + duration=frame_duration, reels=schematic_reels, shelf_reels=shelf_reels ) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 808c059816..0a21d6ca2d 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -19,6 +19,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): frame_end = instance.data["frameEnd"] handle_start = instance.data["handleStart"] handle_end = instance.data["handleEnd"] + frame_duration = (frame_end - frame_start) + 1 asset_name = instance.data["asset"] add_tasks = instance.data["flameAddTasks"] @@ -46,8 +47,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): opfapi.create_batch( batchgroup_name, frame_start, - frame_end, - batch_data + frame_duration, + **batch_data ) def _get_write_prefs(self, instance, task_data): From a87f778f1e95d16d09097d923ca3f1d519e86126 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 12:56:18 +0100 Subject: [PATCH 023/207] flame: reuse batch groups --- openpype/hosts/flame/api/__init__.py | 4 ++- openpype/hosts/flame/api/lib.py | 9 +++++++ .../plugins/publish/integrate_batch_group.py | 26 ++++++++++++++----- 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 97f83ccf07..561aaab3de 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -29,7 +29,8 @@ from .lib import ( get_frame_from_filename, get_padding_from_filename, maintained_object_duplication, - get_clip_segment + get_clip_segment, + get_batch_group_from_desktop ) from .utils import ( setup, @@ -105,6 +106,7 @@ __all__ = [ "get_padding_from_filename", "maintained_object_duplication", "get_clip_segment", + "get_batch_group_from_desktop", # pipeline "install", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 74d9e7607a..9a6b86209d 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -708,3 +708,12 @@ def get_clip_segment(flame_clip): raise ValueError("Clip `{}` has too many segments!".format(name)) return segments[0] + + +def get_batch_group_from_desktop(name): + project = get_current_project() + project_desktop = project.current_workspace.desktop + + for bgroup in project_desktop.batch_groups: + if bgroup.name.get_value() == name: + return bgroup diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 0a21d6ca2d..3a8173791a 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -43,13 +43,25 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): self.log.debug( "__ batch_data: {}".format(pformat(batch_data))) - # create batch with utils - opfapi.create_batch( - batchgroup_name, - frame_start, - frame_duration, - **batch_data - ) + # check if the batch group already exists + bgroup = opfapi.get_batch_group_from_desktop(batchgroup_name) + + if not bgroup: + self.log.info( + "Creating new batch group: {}".format(batchgroup_name)) + # create batch with utils + opfapi.create_batch( + batchgroup_name, + frame_start, + frame_duration, + **batch_data + ) + else: + self.log.info( + "Updating batch group: {}".format(batchgroup_name)) + # update already created batch group + bgroup.start_frame = frame_start + bgroup.duration = frame_duration def _get_write_prefs(self, instance, task_data): anatomy_data = instance.data["anatomyData"] From c26ff2ab544fadc7121d54ec49a2b35433e6122a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 12:56:36 +0100 Subject: [PATCH 024/207] flame: fix task name on write file node --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 3a8173791a..af2b0fad65 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -65,6 +65,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): def _get_write_prefs(self, instance, task_data): anatomy_data = instance.data["anatomyData"] + # update task data in anatomy data + anatomy_data.update(task_data) task_workfile_path = self._get_shot_task_dir_path(instance, task_data) self.log.debug("__ task_workfile_path: {}".format(task_workfile_path)) From 590e966a7d18a4c1f7dea0e08a3056a202607670 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 14:29:49 +0100 Subject: [PATCH 025/207] flame: updating anatomy data with correct task data --- .../plugins/publish/integrate_batch_group.py | 25 ++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index af2b0fad65..c54eeec05c 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -1,4 +1,5 @@ import os +import copy from pprint import pformat import pyblish from openpype.lib import get_workdir @@ -63,10 +64,28 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): bgroup.start_frame = frame_start bgroup.duration = frame_duration - def _get_write_prefs(self, instance, task_data): - anatomy_data = instance.data["anatomyData"] + def _get_anamoty_data_with_current_task(self, instance, task_data): + anatomy_data = copy.deepcopy(instance.data["anatomyData"]) + task_name = task_data["name"] + task_type = task_data["type"] + anatomy_obj = instance.context.data["anatomy"] + # update task data in anatomy data - anatomy_data.update(task_data) + project_task_types = anatomy_obj["tasks"] + task_code = project_task_types.get(task_type, {}).get("short_name") + anatomy_data.update({ + "task": { + "name": task_name, + "type": task_type, + "short": task_code + } + }) + return anatomy_data + + def _get_write_prefs(self, instance, task_data): + # update task in anatomy data + anatomy_data = self._get_anamoty_data_with_current_task( + instance, task_data) task_workfile_path = self._get_shot_task_dir_path(instance, task_data) self.log.debug("__ task_workfile_path: {}".format(task_workfile_path)) From c6cfdfbd3aac60dd8469048354b423f39edd4e9a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 24 Mar 2022 16:41:39 +0300 Subject: [PATCH 026/207] Added new file `flagging.py` to new farm directory --- openpype/pipeline/farm/flagging.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 openpype/pipeline/farm/flagging.py diff --git a/openpype/pipeline/farm/flagging.py b/openpype/pipeline/farm/flagging.py new file mode 100644 index 0000000000..e69de29bb2 From d5521ae8407a9f9bb4d1f05e8a1ef048700acf45 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 15:37:05 +0100 Subject: [PATCH 027/207] flame: adding loading of plate into integrator --- openpype/hosts/flame/api/batch_utils.py | 4 +- .../plugins/publish/integrate_batch_group.py | 87 +++++++++++-------- 2 files changed, 53 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index a47d62a10e..99e053faf1 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -21,7 +21,7 @@ def create_batch(name, frame_start, frame_duration, **kwargs): # Create batch group with name, start_frame value, duration value, # set of schematic reel names, set of shelf reel names - flame.batch.create_batch_group( + bgroup = flame.batch.create_batch_group( name, start_frame=frame_start, duration=frame_duration, @@ -61,3 +61,5 @@ def create_batch(name, frame_start, frame_duration, **kwargs): # sort batch nodes flame.batch.organize() + + return bgroup diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index c54eeec05c..97b456c18c 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -16,12 +16,6 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): families = ["clip"] def process(self, instance): - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] - handle_start = instance.data["handleStart"] - handle_end = instance.data["handleEnd"] - frame_duration = (frame_end - frame_start) + 1 - asset_name = instance.data["asset"] add_tasks = instance.data["flameAddTasks"] # iterate all tasks from settings @@ -29,40 +23,59 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # exclude batch group if not task_data["create_batch_group"]: continue - task_name = task_data["name"] - batchgroup_name = "{}_{}".format(asset_name, task_name) - write_pref_data = self._get_write_prefs(instance, task_data) - batch_data = { - "shematic_reels": [ - "OP_LoadedReel" - ], - "write_pref": write_pref_data, - "handleStart": handle_start, - "handleEnd": handle_end - } - self.log.debug( - "__ batch_data: {}".format(pformat(batch_data))) + # create or get already created batch group + bgroup = self._get_batch_group(instance, task_data) - # check if the batch group already exists - bgroup = opfapi.get_batch_group_from_desktop(batchgroup_name) + # load plate to batch group + self.log.info("Loading subset `{}` into batch `{}`".format( + instance.data["subset"], bgroup.name.get_value() + )) - if not bgroup: - self.log.info( - "Creating new batch group: {}".format(batchgroup_name)) - # create batch with utils - opfapi.create_batch( - batchgroup_name, - frame_start, - frame_duration, - **batch_data - ) - else: - self.log.info( - "Updating batch group: {}".format(batchgroup_name)) - # update already created batch group - bgroup.start_frame = frame_start - bgroup.duration = frame_duration + def _get_batch_group(self, instance, task_data): + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + frame_duration = (frame_end - frame_start) + 1 + asset_name = instance.data["asset"] + + task_name = task_data["name"] + batchgroup_name = "{}_{}".format(asset_name, task_name) + write_pref_data = self._get_write_prefs(instance, task_data) + + batch_data = { + "shematic_reels": [ + "OP_LoadedReel" + ], + "write_pref": write_pref_data, + "handleStart": handle_start, + "handleEnd": handle_end + } + self.log.debug( + "__ batch_data: {}".format(pformat(batch_data))) + + # check if the batch group already exists + bgroup = opfapi.get_batch_group_from_desktop(batchgroup_name) + + if not bgroup: + self.log.info( + "Creating new batch group: {}".format(batchgroup_name)) + # create batch with utils + bgroup = opfapi.create_batch( + batchgroup_name, + frame_start, + frame_duration, + **batch_data + ) + else: + self.log.info( + "Updating batch group: {}".format(batchgroup_name)) + # update already created batch group + bgroup.start_frame = frame_start + bgroup.duration = frame_duration + + return bgroup def _get_anamoty_data_with_current_task(self, instance, task_data): anatomy_data = copy.deepcopy(instance.data["anatomyData"]) From 38268bc83102964c22db15129516a32ba5d5f455 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 16:28:46 +0100 Subject: [PATCH 028/207] flame: let extractor drive loading to batch group --- .../settings/defaults/project_settings/flame.json | 4 +++- .../projects_schema/schema_project_flame.json | 14 ++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 939752c778..a2b9bef103 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -62,7 +62,9 @@ "ignore_comment_attrs": false, "colorspace_out": "ACES - ACEScg", "representation_add_range": true, - "representation_tags": [] + "representation_tags": [], + "load_to_batch_group": true, + "batch_group_loader_name": "LoadClip" } } } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 8057b07d9c..c991577799 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -302,6 +302,20 @@ "type": "text", "multiline": false } + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "load_to_batch_group", + "label": "Load to batch group reel", + "default": false + }, + { + "type": "text", + "key": "batch_group_loader_name", + "label": "Use loader name" } ] } From 0407465ee1a2438d8a84d9d0704bb38dd56c1a2c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 17:24:21 +0100 Subject: [PATCH 029/207] flame: add loadable arguments to extracted repres --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 32f6b9508f..7c29bcf944 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -211,7 +211,11 @@ class ExtractSubsetResources(openpype.api.Extractor): "tags": repre_tags, "data": { "colorspace": color_out - } + }, + "load_to_batch_group": preset_config.get( + "load_to_batch_group"), + "batch_group_loader_name": preset_config.get( + "batch_group_loader_name") } # collect all available content of export dir From 638864150493e0f46b2fd41a6fbe0609434dc536 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 17:24:43 +0100 Subject: [PATCH 030/207] flame: finalize loading procedure in batch integrator --- .../plugins/publish/integrate_batch_group.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 97b456c18c..62211d7ace 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -4,6 +4,7 @@ from pprint import pformat import pyblish from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi +import openpype.pipeline as op_pipeline @pyblish.api.log @@ -15,6 +16,9 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): hosts = ["flame"] families = ["clip"] + # settings + default_loader = "LoadClip" + def process(self, instance): add_tasks = instance.data["flameAddTasks"] @@ -31,6 +35,77 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): self.log.info("Loading subset `{}` into batch `{}`".format( instance.data["subset"], bgroup.name.get_value() )) + self._load_clip_to_context(instance, bgroup) + + def _load_clip_to_context(self, instance, bgroup): + # get all loaders for host + loaders = op_pipeline.discover_loader_plugins() + + # get all published representations + published_representations = instance.data["published_representations"] + + # get all loadable representations + representations = instance.data["representations"] + + # get repre_id for the loadable representations + loadable_representations = [ + { + "name": _repr["name"], + "loader": _repr.get("batch_group_loader_name"), + # match loader to the loadable representation + "_id": next( + ( + id + for id, repr in published_representations.items() + if repr["representation"]["name"] == _repr["name"] + ), + None + ) + } + for _repr in representations + if _repr.get("load_to_batch_group") is not None + ] + + # get representation context from the repre_id + representation_ids = [ + repre["_id"] + for repre in loadable_representations + if repre["_id"] is not None + ] + repre_contexts = op_pipeline.load.get_repres_contexts( + representation_ids) + + # loop all returned repres from repre_context dict + for repre_id, repre_context in repre_contexts.items(): + # get loader name by representation id + loader_name = next( + ( + repr["loader"] + for repr in loadable_representations + if repr["_id"] == repre_id + ), + self.default_loader + ) + # get loader plugin + Loader = next( + ( + loader_plugin + for loader_plugin in loaders + if loader_plugin.__name__ == loader_name + ), + None + ) + if Loader: + # load to flame by representation context + op_pipeline.load.load_with_repre_context(Loader, repre_context) + else: + self.log.warning( + "Something got wrong and there is not Loader found for " + "following data: {}".format( + pformat(loadable_representations)) + ) + + def _get_batch_group(self, instance, task_data): frame_start = instance.data["frameStart"] From cde1caaa9180fcc7e4165995e9760429f2a55e07 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 17:40:27 +0100 Subject: [PATCH 031/207] flame: clean args types --- .../publish/extract_subset_resources.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 7c29bcf944..00b87c05a0 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -108,6 +108,18 @@ class ExtractSubsetResources(openpype.api.Extractor): ignore_comment_attrs = preset_config["ignore_comment_attrs"] color_out = preset_config["colorspace_out"] + # get attribures related loading in integrate_batch_group + load_to_batch_group = preset_config.get( + "load_to_batch_group") + batch_group_loader_name = preset_config.get( + "batch_group_loader_name") + + # convert to None if empty string + if batch_group_loader_name: + batch_group_loader_name = str(batch_group_loader_name) + if batch_group_loader_name == "": + batch_group_loader_name = None + # get frame range with handles for representation range frame_start_handle = frame_start - handle_start source_duration_handles = ( @@ -212,10 +224,8 @@ class ExtractSubsetResources(openpype.api.Extractor): "data": { "colorspace": color_out }, - "load_to_batch_group": preset_config.get( - "load_to_batch_group"), - "batch_group_loader_name": preset_config.get( - "batch_group_loader_name") + "load_to_batch_group": load_to_batch_group, + "batch_group_loader_name": batch_group_loader_name } # collect all available content of export dir From 4cfd22b6393b3a4d7e5c18046f7d0340ce124e27 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 17:47:06 +0100 Subject: [PATCH 032/207] flame: improving loading with exception --- .../hosts/flame/plugins/publish/integrate_batch_group.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 62211d7ace..08632c3018 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -97,7 +97,13 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): ) if Loader: # load to flame by representation context - op_pipeline.load.load_with_repre_context(Loader, repre_context) + try: + op_pipeline.load.load_with_repre_context( + Loader, repre_context) + except op_pipeline.load.IncompatibleLoaderError as msg: + self.log.error( + "Check allowed representations for Loader `{}` " + "in settings > error: {}".format(Loader.__name__, msg)) else: self.log.warning( "Something got wrong and there is not Loader found for " From a4f8cdb76962f6f4c9e4efef49079c3926e486e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 17:51:22 +0100 Subject: [PATCH 033/207] flame: better logging for loading fail --- .../hosts/flame/plugins/publish/integrate_batch_group.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 08632c3018..f1049e4697 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -103,7 +103,14 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): except op_pipeline.load.IncompatibleLoaderError as msg: self.log.error( "Check allowed representations for Loader `{}` " - "in settings > error: {}".format(Loader.__name__, msg)) + "in settings > error: {}".format( + Loader.__name__, msg)) + self.log.error( + "Representaton context >>{}<< is not compatible " + "with loader `{}`".format( + pformat(repre_context), Loader.__name__ + ) + ) else: self.log.warning( "Something got wrong and there is not Loader found for " From c210d3914efd9cd9d3657b054c01ec3dd28ce4d1 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 14:51:59 +0300 Subject: [PATCH 034/207] Refactor function for matching AOV into new file --- openpype/pipeline/farm/patterning.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 openpype/pipeline/farm/patterning.py diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py new file mode 100644 index 0000000000..3138dd6873 --- /dev/null +++ b/openpype/pipeline/farm/patterning.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +import os +import re + +def match_aov_pattern(self, app, render_file_name): + """Matching against a AOV pattern in the render files + In order to match the AOV name + we must compare against the render filename string + that we are grabbing the render filename string + from the collection that we have grabbed from exp_files. + """ + + if app in self.aov_filter.keys(): + for aov_pattern in self.aov_filter[app]: + if re.match(aov_pattern, render_file_name): + preview = True + return preview \ No newline at end of file From b9f5bb3a7be9c0f167694115c51ad8fdc84bd357 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 14:54:32 +0300 Subject: [PATCH 035/207] Cleanup placement --- .../plugins/publish/submit_publish_job.py | 37 +++++++------------ 1 file changed, 13 insertions(+), 24 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index b1f6f9a485..7f65011864 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -8,6 +8,7 @@ from copy import copy, deepcopy import requests import clique import openpype.api +from openpype.pipeline.farm.patterning import match_aov_pattern from avalon import api, io @@ -446,21 +447,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): app = os.environ.get("AVALON_APP", "") preview = False - if app in self.aov_filter.keys(): - for aov_pattern in self.aov_filter[app]: - # Matching against the AOV pattern in the render files - # In order to match the AOV name - # we must compare against the render filename string - # We are grabbing the render filename string - # from the collection that we have grabbed from exp_files - if isinstance(col, list): - render_file_name = os.path.basename(col[0]) - else: - render_file_name = os.path.basename(col) - if re.match(aov_pattern, render_file_name): - preview = True - break + if isinstance(col, list): + render_file_name = os.path.basename(col[0]) + else: + render_file_name = os.path.basename(col) + + preview = match_aov_pattern(self, app, render_file_name) + + + if instance_data.get("multipartExr"): preview = True @@ -532,18 +528,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): for collection in collections: ext = collection.tail.lstrip(".") preview = False + render_file_name = list(collection[0]) + app = os.environ.get("AVALON_APP", "") # if filtered aov name is found in filename, toggle it for # preview video rendering - for app in self.aov_filter.keys(): - if os.environ.get("AVALON_APP", "") == app: - for aov in self.aov_filter[app]: - if re.match( - aov, - list(collection)[0] - ): - preview = True - break - + preview = match_aov_pattern(self, app, render_file_name) # toggle preview on if multipart is on if instance.get("multipartExr", False): preview = True From f175d77d006d145b1d2a88cdb71e78b26882c9af Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 14:57:50 +0300 Subject: [PATCH 036/207] remove unused import --- openpype/pipeline/farm/patterning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 3138dd6873..e62362b0ba 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import os import re def match_aov_pattern(self, app, render_file_name): @@ -9,7 +8,8 @@ def match_aov_pattern(self, app, render_file_name): that we are grabbing the render filename string from the collection that we have grabbed from exp_files. """ - + + if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: if re.match(aov_pattern, render_file_name): From d40429d5e7d6a70a6402bc89e2d8616d1abaf2d2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 14:58:30 +0300 Subject: [PATCH 037/207] remove empty lines --- openpype/pipeline/farm/patterning.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index e62362b0ba..7e717a9fff 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -8,8 +8,6 @@ def match_aov_pattern(self, app, render_file_name): that we are grabbing the render filename string from the collection that we have grabbed from exp_files. """ - - if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: if re.match(aov_pattern, render_file_name): From 911cfb2a94f1b330531b2905782500c5e8ecc5e2 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 15:04:03 +0300 Subject: [PATCH 038/207] adds empty line --- openpype/pipeline/farm/patterning.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 7e717a9fff..0ee8499e73 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -1,12 +1,13 @@ # -*- coding: utf-8 -*- import re + def match_aov_pattern(self, app, render_file_name): """Matching against a AOV pattern in the render files In order to match the AOV name we must compare against the render filename string that we are grabbing the render filename string - from the collection that we have grabbed from exp_files. + from the collection that we have grabbed from exp_files. """ if app in self.aov_filter.keys(): for aov_pattern in self.aov_filter[app]: From 2429e5c07f792d998fc4def930e6b8288e3ed340 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 15:24:03 +0300 Subject: [PATCH 039/207] style fixes --- openpype/pipeline/farm/patterning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 0ee8499e73..308546a1c9 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -5,7 +5,7 @@ import re def match_aov_pattern(self, app, render_file_name): """Matching against a AOV pattern in the render files In order to match the AOV name - we must compare against the render filename string + we must compare against the render filename string that we are grabbing the render filename string from the collection that we have grabbed from exp_files. """ From 6e70c412e9866da08b859751088f6265371cd3ef Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Fri, 25 Mar 2022 16:12:16 +0300 Subject: [PATCH 040/207] Remove unused file --- openpype/pipeline/farm/flagging.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 openpype/pipeline/farm/flagging.py diff --git a/openpype/pipeline/farm/flagging.py b/openpype/pipeline/farm/flagging.py deleted file mode 100644 index e69de29bb2..0000000000 From 585d53deee223f359e6732620ea0188f8d00ec5c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Mar 2022 15:21:22 +0100 Subject: [PATCH 041/207] flame: improving loading in integrate batch plugin --- .../plugins/publish/extract_subset_resources.py | 6 ++---- .../flame/plugins/publish/integrate_batch_group.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 00b87c05a0..31f7b6d574 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -115,10 +115,8 @@ class ExtractSubsetResources(openpype.api.Extractor): "batch_group_loader_name") # convert to None if empty string - if batch_group_loader_name: - batch_group_loader_name = str(batch_group_loader_name) - if batch_group_loader_name == "": - batch_group_loader_name = None + if batch_group_loader_name == "": + batch_group_loader_name = None # get frame range with handles for representation range frame_start_handle = frame_start - handle_start diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index f1049e4697..81b304ff0b 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -66,6 +66,9 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): if _repr.get("load_to_batch_group") is not None ] + self.log.debug("__ loadable_representations: {}".format(pformat( + loadable_representations))) + # get representation context from the repre_id representation_ids = [ repre["_id"] @@ -75,17 +78,20 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): repre_contexts = op_pipeline.load.get_repres_contexts( representation_ids) + self.log.debug("__ repre_contexts: {}".format(pformat( + repre_contexts))) + # loop all returned repres from repre_context dict for repre_id, repre_context in repre_contexts.items(): + self.log.debug("__ repre_id: {}".format(repre_id)) # get loader name by representation id loader_name = next( ( repr["loader"] for repr in loadable_representations if repr["_id"] == repre_id - ), - self.default_loader - ) + )) or self.default_loader + # get loader plugin Loader = next( ( @@ -118,8 +124,6 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): pformat(loadable_representations)) ) - - def _get_batch_group(self, instance, task_data): frame_start = instance.data["frameStart"] frame_end = instance.data["frameEnd"] From ed4388184ad768dbf00ce050efff8eaf11d3cf7c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Mar 2022 15:56:12 +0100 Subject: [PATCH 042/207] flame: adding clip loader to current batch --- .../flame/plugins/load/load_clip_batch.py | 135 ++++++++++++++++++ .../defaults/project_settings/flame.json | 22 +++ .../projects_schema/schema_project_flame.json | 42 ++++++ 3 files changed, 199 insertions(+) create mode 100644 openpype/hosts/flame/plugins/load/load_clip_batch.py diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py new file mode 100644 index 0000000000..81af34744e --- /dev/null +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -0,0 +1,135 @@ +import os +import flame +from pprint import pformat +import openpype.hosts.flame.api as opfapi + + +class LoadClipBatch(opfapi.ClipLoader): + """Load a subset to timeline as clip + + Place clip to timeline on its asset origin timings collected + during conforming to project + """ + + families = ["render2d", "source", "plate", "render", "review"] + representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"] + + label = "Load as clip to current batch" + order = -10 + icon = "code-fork" + color = "orange" + + # settings + reel_name = "OP_LoadedReel" + clip_name_template = "{asset}_{subset}_{representation}" + + def load(self, context, name, namespace, options): + + # get flame objects + self.batch = flame.batch + + # load clip to timeline and get main variables + namespace = namespace + version = context['version'] + version_data = version.get("data", {}) + version_name = version.get("name", None) + colorspace = version_data.get("colorspace", None) + clip_name = self.clip_name_template.format( + **context["representation"]["context"]) + + # todo: settings in imageio + # convert colorspace with ocio to flame mapping + # in imageio flame section + colorspace = colorspace + + # create workfile path + workfile_dir = os.environ["AVALON_WORKDIR"] + openclip_dir = os.path.join( + workfile_dir, clip_name + ) + openclip_path = os.path.join( + openclip_dir, clip_name + ".clip" + ) + if not os.path.exists(openclip_dir): + os.makedirs(openclip_dir) + + # prepare clip data from context ad send it to openClipLoader + loading_context = { + "path": self.fname.replace("\\", "/"), + "colorspace": colorspace, + "version": "v{:0>3}".format(version_name), + "logger": self.log + + } + self.log.debug(pformat( + loading_context + )) + self.log.debug(openclip_path) + + # make openpype clip file + opfapi.OpenClipSolver(openclip_path, loading_context).make() + + # prepare Reel group in actual desktop + opc = self._get_clip( + clip_name, + openclip_path + ) + + # add additional metadata from the version to imprint Avalon knob + add_keys = [ + "frameStart", "frameEnd", "source", "author", + "fps", "handleStart", "handleEnd" + ] + + # move all version data keys to tag data + data_imprint = { + key: version_data.get(key, str(None)) + for key in add_keys + } + # add variables related to version context + data_imprint.update({ + "version": version_name, + "colorspace": colorspace, + "objectName": clip_name + }) + + # TODO: finish the containerisation + # opc_segment = opfapi.get_clip_segment(opc) + + # return opfapi.containerise( + # opc_segment, + # name, namespace, context, + # self.__class__.__name__, + # data_imprint) + + return opc + + def _get_clip(self, name, clip_path): + reel = self._get_reel() + + # with maintained openclip as opc + matching_clip = next( + ( + cl for cl in reel.clips + if cl.name.get_value() == name + ) + ) + + if not matching_clip: + created_clips = flame.import_clips(str(clip_path), reel) + return created_clips.pop() + + return matching_clip + + def _get_reel(self): + + matching_reel = [ + rg for rg in self.batch.reels + if rg.name.get_value() == self.reel_name + ] + + return ( + matching_reel.pop() + if matching_reel + else self.batch.create_reel(str(self.reel_name)) + ) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index a2b9bef103..afd0834c9d 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -92,6 +92,28 @@ "reel_group_name": "OpenPype_Reels", "reel_name": "Loaded", "clip_name_template": "{asset}_{subset}_{representation}" + }, + "LoadClipBatch": { + "enabled": true, + "families": [ + "render2d", + "source", + "plate", + "render", + "review" + ], + "representations": [ + "exr", + "dpx", + "jpg", + "jpeg", + "png", + "h264", + "mov", + "mp4" + ], + "reel_name": "OP_LoadedReel", + "clip_name_template": "{asset}_{subset}_{representation}" } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index c991577799..fe11d63ac2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -376,6 +376,48 @@ "label": "Clip name template" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "LoadClipBatch", + "label": "Load as clip to current batch", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "families", + "label": "Families", + "object_type": "text" + }, + { + "type": "list", + "key": "representations", + "label": "Representations", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "reel_name", + "label": "Reel name" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "clip_name_template", + "label": "Clip name template" + } + ] } ] } From 267a3e04ed4fda8c3837af28379e3a8812312fb2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Mar 2022 16:52:03 +0100 Subject: [PATCH 043/207] flame: improving batch attributes --- openpype/hosts/flame/api/batch_utils.py | 1 + openpype/hosts/flame/plugins/load/load_clip_batch.py | 11 +++++------ .../flame/plugins/publish/integrate_batch_group.py | 4 +++- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 99e053faf1..43742c6e4f 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -56,6 +56,7 @@ def create_batch(name, frame_start, frame_duration, **kwargs): write_node.frame_padding = write_pref["frame_padding"] write_node.version_mode = write_pref["version_mode"] write_node.version_name = write_pref["version_name"] + write_node.version_padding = write_pref["version_padding"] flame.batch.connect_nodes(comp_node, "Result", write_node, "Front") diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 81af34744e..bf0bbb5168 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -108,12 +108,11 @@ class LoadClipBatch(opfapi.ClipLoader): reel = self._get_reel() # with maintained openclip as opc - matching_clip = next( - ( - cl for cl in reel.clips - if cl.name.get_value() == name - ) - ) + matching_clip = None + for cl in reel.clips: + if cl.name.get_value() != name: + continue + matching_clip = cl if not matching_clip: created_clips = flame.import_clips(str(clip_path), reel) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 81b304ff0b..536bf0d807 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -249,6 +249,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # Only available if create_clip = True. version_mode = "Follow Iteration" version_name = "v" + version_padding = 3 return { "name": name, @@ -266,7 +267,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): "frame_index_mode": frame_index_mode, "frame_padding": frame_padding, "version_mode": version_mode, - "version_name": version_name + "version_name": version_name, + "version_padding": version_padding } def _get_shot_task_dir_path(self, instance, task_data): From f8e99f38c97cf37b8001e4c5848d93e00a7f9107 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Mar 2022 16:53:47 +0100 Subject: [PATCH 044/207] flame: make dirs for batch renders add one more directory layer for renders --- .../hosts/flame/plugins/publish/integrate_batch_group.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 536bf0d807..eaab429111 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -199,6 +199,9 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): render_dir_path = os.path.join( task_workfile_path, "render", "flame") + if not os.path.exists(render_dir_path): + os.makedirs(render_dir_path, mode=0o777) + # TODO: add most of these to `imageio/flame/batch/write_node` name = "{project[code]}_{asset}_{task[name]}".format( **anatomy_data @@ -208,7 +211,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # /path/to/file.[0001-0010].exr media_path = render_dir_path # name of file represented by tokens - media_path_pattern = "_v." + media_path_pattern = "_v/_v." # The Create Open Clip attribute of the Write File node. \ # Determines if an Open Clip is created by the Write File node. create_clip = True From e631218ee44c36f9f7fabdcf666d77daccd771be Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:29:26 +0300 Subject: [PATCH 045/207] refactor function, fix comments --- .../plugins/publish/submit_publish_job.py | 9 +++--- openpype/pipeline/farm/patterning.py | 30 +++++++++++-------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 7f65011864..0a374a75b6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -452,11 +452,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): render_file_name = os.path.basename(col[0]) else: render_file_name = os.path.basename(col) - - preview = match_aov_pattern(self, app, render_file_name) + aov_patterns = self.aov_filter.keys() + preview = match_aov_pattern(app, aov_patterns, render_file_name) - + # toggle preview on if multipart is on if instance_data.get("multipartExr"): preview = True @@ -530,9 +530,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): preview = False render_file_name = list(collection[0]) app = os.environ.get("AVALON_APP", "") + aov_patterns = self.aov_filter.keys() # if filtered aov name is found in filename, toggle it for # preview video rendering - preview = match_aov_pattern(self, app, render_file_name) + preview = match_aov_pattern(app, aov_patterns, render_file_name) # toggle preview on if multipart is on if instance.get("multipartExr", False): preview = True diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 308546a1c9..0ad7e682fa 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -1,16 +1,22 @@ # -*- coding: utf-8 -*- import re - -def match_aov_pattern(self, app, render_file_name): - """Matching against a AOV pattern in the render files - In order to match the AOV name - we must compare against the render filename string - that we are grabbing the render filename string - from the collection that we have grabbed from exp_files. +def match_aov_pattern(app, aov_patterns, render_file_name): + """Matching against a `AOV` pattern in the render files. + + In order to match the AOV name we must compare + against the render filename string that we are + grabbing the render filename string from the collection + that we have grabbed from `exp_files`. + + Args: + app (str): Host name. + aov_patterns (list): List of AOV patterns from AOV filters. + render_file_name (str): Incoming file name to match against. + + Returns: + bool: Review state for rendered file (render_file_name). """ - if app in self.aov_filter.keys(): - for aov_pattern in self.aov_filter[app]: - if re.match(aov_pattern, render_file_name): - preview = True - return preview \ No newline at end of file + aov_pattern = aov_patterns.get(app, []) + if aov_pattern: + return any(re.match(aov_pattern, render_file_name) for aov_pattern in aov_patterns) From cc86482f028b8fa34ab248ead2075db9af9983a4 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:30:50 +0300 Subject: [PATCH 046/207] style fixes --- openpype/pipeline/farm/patterning.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 0ad7e682fa..60467d47fa 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -1,12 +1,13 @@ # -*- coding: utf-8 -*- import re + def match_aov_pattern(app, aov_patterns, render_file_name): """Matching against a `AOV` pattern in the render files. - In order to match the AOV name we must compare - against the render filename string that we are - grabbing the render filename string from the collection + In order to match the AOV name we must compare + against the render filename string that we are + grabbing the render filename string from the collection that we have grabbed from `exp_files`. Args: From 6cd0423e5f8f0d3cacd9f24d86508c2280d7e90e Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:31:58 +0300 Subject: [PATCH 047/207] remove whitespace --- openpype/pipeline/farm/patterning.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 60467d47fa..d0a25f8e77 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -4,17 +4,17 @@ import re def match_aov_pattern(app, aov_patterns, render_file_name): """Matching against a `AOV` pattern in the render files. - + In order to match the AOV name we must compare against the render filename string that we are grabbing the render filename string from the collection that we have grabbed from `exp_files`. - + Args: app (str): Host name. aov_patterns (list): List of AOV patterns from AOV filters. render_file_name (str): Incoming file name to match against. - + Returns: bool: Review state for rendered file (render_file_name). """ From 72b6ae620a5a69a312976f6821c3b443d9f27478 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:33:37 +0300 Subject: [PATCH 048/207] remove extra line and whitespace --- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 0a374a75b6..a4e07a0684 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -454,8 +454,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): render_file_name = os.path.basename(col) aov_patterns = self.aov_filter.keys() preview = match_aov_pattern(app, aov_patterns, render_file_name) - - + # toggle preview on if multipart is on if instance_data.get("multipartExr"): preview = True From d45a7fdb3d849b11ff526d94c00fc9d6a4c60f01 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:36:22 +0300 Subject: [PATCH 049/207] Fix line length --- openpype/pipeline/farm/patterning.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index d0a25f8e77..ad59ecb509 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -20,4 +20,5 @@ def match_aov_pattern(app, aov_patterns, render_file_name): """ aov_pattern = aov_patterns.get(app, []) if aov_pattern: - return any(re.match(aov_pattern, render_file_name) for aov_pattern in aov_patterns) + return any(re.match(aov_pattern, render_file_name) + for aov_pattern in aov_patterns) From 071ae5876571252e1c025fdd9dcc01cf24fcbd00 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:37:49 +0300 Subject: [PATCH 050/207] fix over indentation --- openpype/pipeline/farm/patterning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index ad59ecb509..e92078b27c 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -20,5 +20,5 @@ def match_aov_pattern(app, aov_patterns, render_file_name): """ aov_pattern = aov_patterns.get(app, []) if aov_pattern: - return any(re.match(aov_pattern, render_file_name) - for aov_pattern in aov_patterns) + return any(re.match(aov_pattern, render_file_name) + for aov_pattern in aov_patterns) From b54bba9b0f492908d33f3f6e77f63adadf224663 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:39:15 +0300 Subject: [PATCH 051/207] fix under indentaiton --- openpype/pipeline/farm/patterning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index e92078b27c..f853b77601 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -21,4 +21,4 @@ def match_aov_pattern(app, aov_patterns, render_file_name): aov_pattern = aov_patterns.get(app, []) if aov_pattern: return any(re.match(aov_pattern, render_file_name) - for aov_pattern in aov_patterns) + for aov_pattern in aov_patterns) From b11d73671f7adc94d9a439f76b2f00d2172ed422 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:48:19 +0300 Subject: [PATCH 052/207] Fix function error, assuming one aov_pattern --- openpype/pipeline/farm/patterning.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index f853b77601..e534ed7506 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -20,5 +20,4 @@ def match_aov_pattern(app, aov_patterns, render_file_name): """ aov_pattern = aov_patterns.get(app, []) if aov_pattern: - return any(re.match(aov_pattern, render_file_name) - for aov_pattern in aov_patterns) + return any(re.match(aov_pattern, render_file_name)) From 340afab7d468cae8d4b30d7b90315b8ef3a9883a Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 21:55:13 +0300 Subject: [PATCH 053/207] remove unneeded any() --- openpype/pipeline/farm/patterning.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index e534ed7506..4703f4999d 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -20,4 +20,6 @@ def match_aov_pattern(app, aov_patterns, render_file_name): """ aov_pattern = aov_patterns.get(app, []) if aov_pattern: - return any(re.match(aov_pattern, render_file_name)) + if re.match(aov_pattern, render_file_name): + preview = True + return preview From 087f939aa6e4ea19a3addce828cb86753fb27cdb Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 22:46:57 +0300 Subject: [PATCH 054/207] add missing else statement --- openpype/pipeline/farm/patterning.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 4703f4999d..e1c05df77f 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -23,3 +23,5 @@ def match_aov_pattern(app, aov_patterns, render_file_name): if re.match(aov_pattern, render_file_name): preview = True return preview + else: + return False From 67f5f69f00b7ad14e5d7d37c515eb80f43520bbb Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Sat, 26 Mar 2022 22:57:51 +0300 Subject: [PATCH 055/207] fix passing keys only to matching function --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index a4e07a0684..16078fc236 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -452,7 +452,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): render_file_name = os.path.basename(col[0]) else: render_file_name = os.path.basename(col) - aov_patterns = self.aov_filter.keys() + aov_patterns = self.aov_filter preview = match_aov_pattern(app, aov_patterns, render_file_name) # toggle preview on if multipart is on @@ -529,7 +529,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): preview = False render_file_name = list(collection[0]) app = os.environ.get("AVALON_APP", "") - aov_patterns = self.aov_filter.keys() + aov_patterns = self.aov_filter # if filtered aov name is found in filename, toggle it for # preview video rendering preview = match_aov_pattern(app, aov_patterns, render_file_name) From 08f80ecf15911f1e96808fd8c6032b55d4f596e7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 13:29:16 +0200 Subject: [PATCH 056/207] flame: make sure only one clip in xml --- openpype/hosts/flame/api/plugin.py | 59 ++++++++++++++++++++++-------- 1 file changed, 43 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 4c9d3c5383..3a322e5208 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -1,24 +1,22 @@ +import itertools import os import re import shutil import sys -from xml.etree import ElementTree as ET -import six -import qargparse -from Qt import QtWidgets, QtCore -import openpype.api as openpype -from openpype.pipeline import ( - LegacyCreator, - LoaderPlugin, -) -from openpype import style -from . import ( - lib as flib, - pipeline as fpipeline, - constants -) - +import xml.etree.cElementTree as cET from copy import deepcopy +from xml.etree import ElementTree as ET + +import openpype.api as openpype +import qargparse +import six +from openpype import style +from openpype.pipeline import LegacyCreator, LoaderPlugin +from Qt import QtCore, QtWidgets + +from . import constants +from . import lib as flib +from . import pipeline as fpipeline log = openpype.Logger.get_logger(__name__) @@ -749,10 +747,39 @@ class OpenClipSolver: # execute creation of clip xml template data try: openpype.run_subprocess(cmd_args) + self._make_single_clip_media_info() except TypeError: self.log.error("Error creating self.tmp_file") six.reraise(*sys.exc_info()) + def _make_single_clip_media_info(self): + with open(self.tmp_file) as f: + lines = f.readlines() + _added_root = itertools.chain( + "", deepcopy(lines)[1:], "") + new_root = ET.fromstringlist(_added_root) + + # find the clip which is matching to my input name + xml_clips = new_root.findall("clip") + matching_clip = None + for xml_clip in xml_clips: + if xml_clip.find("name").text == self.feed_basename: + matching_clip = xml_clip + + if not matching_clip: + # return warning there is missing clip + raise ET.ParseError( + "Missing clip in `{}`. Available clips {}".format( + self.feed_basename, [ + xml_clip.find("name").text + for xml_clip in xml_clips + ] + )) + # save it as new file + tree = cET.ElementTree(matching_clip) + tree.write(self.tmp_file, xml_declaration=True, + method='xml', encoding='UTF-8') + def _clear_tmp_file(self): if os.path.isfile(self.tmp_file): os.remove(self.tmp_file) From 34a65cb646e5d267899f3f3df5eed4de72ac2074 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 13:29:40 +0200 Subject: [PATCH 057/207] flame: ignore clip file with zero lines --- openpype/hosts/flame/api/plugin.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 3a322e5208..949e8ad406 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -708,19 +708,32 @@ class OpenClipSolver: self.feed_dir = os.path.dirname(feed_path) self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - if not os.path.isfile(openclip_file_path): + if not self._is_valid_tmp_file(openclip_file_path): # openclip does not exist yet and will be created self.tmp_file = self.out_file = openclip_file_path self.create_new_clip = True else: + # update already created clip # output a temp file self.out_file = openclip_file_path self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) + + # remove previously generated temp files + # it will be regenerated self._clear_tmp_file() self.log.info("Temp File: {}".format(self.tmp_file)) + def _is_valid_tmp_file(self, file): + # check if file exists + if os.path.isfile(file): + with open(self.tmp_file) as f: + lines = f.readlines() + if len(lines) < 1: + self._clear_tmp_file() + return False + def make(self): self._generate_media_info_file() From 1c6ab37f351e87a0a4a01a93eb14de47668333f6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 20:02:58 +0200 Subject: [PATCH 058/207] flame: improving tmp file validation --- openpype/hosts/flame/api/plugin.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 949e8ad406..ab60bbad11 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -730,9 +730,12 @@ class OpenClipSolver: if os.path.isfile(file): with open(self.tmp_file) as f: lines = f.readlines() - if len(lines) < 1: - self._clear_tmp_file() - return False + if len(lines) > 2: + return True + + # file is probably corrupted + self._clear_tmp_file() + return False def make(self): self._generate_media_info_file() From 0abc8ae61367a3ee03a896704557b324ffc1e1bd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 20:18:59 +0200 Subject: [PATCH 059/207] flame: rework xml write file --- openpype/hosts/flame/api/plugin.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index ab60bbad11..0eba06a86d 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -791,10 +791,8 @@ class OpenClipSolver: for xml_clip in xml_clips ] )) - # save it as new file - tree = cET.ElementTree(matching_clip) - tree.write(self.tmp_file, xml_declaration=True, - method='xml', encoding='UTF-8') + + self._write_result_xml_to_file(self.tmp_file, matching_clip) def _clear_tmp_file(self): if os.path.isfile(self.tmp_file): @@ -901,7 +899,7 @@ class OpenClipSolver: self.log.info("Adding feed version: {}".format( self.feed_version_name)) - self._write_result_xml_to_file(xml_data) + self._write_result_xml_to_file(self.out_file, xml_data) self.log.info("openClip Updated: {}".format(self.out_file)) @@ -940,9 +938,11 @@ class OpenClipSolver: self._clear_handler(xml_root) return ET.tostring(xml_root).decode('utf-8') - def _write_result_xml_to_file(self, xml_data): - with open(self.out_file, "w") as f: - f.write(xml_data) + def _write_result_xml_to_file(self, file, xml_data): + # save it as new file + tree = cET.ElementTree(xml_data) + tree.write(file, xml_declaration=True, + method='xml', encoding='UTF-8') def _create_openclip_backup_file(self, file): bck_file = "{}.bak".format(file) From ae36d089690f9acb078cc185e5315667523669dc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 20:54:12 +0200 Subject: [PATCH 060/207] flame: little fixes of loading --- openpype/hosts/flame/api/plugin.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 0eba06a86d..3673dc6671 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -728,13 +728,14 @@ class OpenClipSolver: def _is_valid_tmp_file(self, file): # check if file exists if os.path.isfile(file): - with open(self.tmp_file) as f: + # test also if file is not empty + with open(file) as f: lines = f.readlines() if len(lines) > 2: return True # file is probably corrupted - self._clear_tmp_file() + os.remove(file) return False def make(self): @@ -779,7 +780,7 @@ class OpenClipSolver: xml_clips = new_root.findall("clip") matching_clip = None for xml_clip in xml_clips: - if xml_clip.find("name").text == self.feed_basename: + if xml_clip.find("name").text in self.feed_basename: matching_clip = xml_clip if not matching_clip: From 2bf75d270a3fbfa0054d750159439a52e1f0369f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 21:05:35 +0200 Subject: [PATCH 061/207] flame: fix loaded name to `output` make condition for fix if output is not in context data --- openpype/hosts/flame/plugins/load/load_clip.py | 2 +- openpype/hosts/flame/plugins/load/load_clip_batch.py | 7 ++++++- openpype/settings/defaults/project_settings/flame.json | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 8980f72cb8..b27600db1f 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -22,7 +22,7 @@ class LoadClip(opfapi.ClipLoader): # settings reel_group_name = "OpenPype_Reels" reel_name = "Loaded" - clip_name_template = "{asset}_{subset}_{representation}" + clip_name_template = "{asset}_{subset}_{output}" def load(self, context, name, namespace, options): diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index bf0bbb5168..1f87f94cc6 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -21,7 +21,7 @@ class LoadClipBatch(opfapi.ClipLoader): # settings reel_name = "OP_LoadedReel" - clip_name_template = "{asset}_{subset}_{representation}" + clip_name_template = "{asset}_{subset}_{output}" def load(self, context, name, namespace, options): @@ -34,6 +34,11 @@ class LoadClipBatch(opfapi.ClipLoader): version_data = version.get("data", {}) version_name = version.get("name", None) colorspace = version_data.get("colorspace", None) + + # in case output is not in context replace key to representation + if not context["representation"]["context"].get("output"): + self.clip_name_template.replace("output", "representation") + clip_name = self.clip_name_template.format( **context["representation"]["context"]) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index afd0834c9d..ef7a2a4467 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -91,7 +91,7 @@ ], "reel_group_name": "OpenPype_Reels", "reel_name": "Loaded", - "clip_name_template": "{asset}_{subset}_{representation}" + "clip_name_template": "{asset}_{subset}_{output}" }, "LoadClipBatch": { "enabled": true, @@ -113,7 +113,7 @@ "mp4" ], "reel_name": "OP_LoadedReel", - "clip_name_template": "{asset}_{subset}_{representation}" + "clip_name_template": "{asset}_{subset}_{output}" } } } \ No newline at end of file From eda39b5de29e9bce283a3326427db8508d2cfb05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 28 Mar 2022 21:12:56 +0200 Subject: [PATCH 062/207] flame: fix write to xml file input args --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 3673dc6671..750609f7d6 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -829,7 +829,7 @@ class OpenClipSolver: xml_data = self._fix_xml_data(tmp_xml) self.log.info("Adding feed version: {}".format(self.feed_basename)) - self._write_result_xml_to_file(xml_data) + self._write_result_xml_to_file(self.out_file, xml_data) self.log.info("openClip Updated: {}".format(self.tmp_file)) From 44257be4863cc0eb0522ef14aa431bb10344c14c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 29 Mar 2022 08:43:38 +0200 Subject: [PATCH 063/207] flame: fix utf8 error `'unicode' object has no attribute 'getiterator'` --- openpype/hosts/flame/api/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 750609f7d6..d5790d2f10 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -783,7 +783,7 @@ class OpenClipSolver: if xml_clip.find("name").text in self.feed_basename: matching_clip = xml_clip - if not matching_clip: + if matching_clip is not None: # return warning there is missing clip raise ET.ParseError( "Missing clip in `{}`. Available clips {}".format( @@ -937,7 +937,7 @@ class OpenClipSolver: def _fix_xml_data(self, xml_data): xml_root = xml_data.getroot() self._clear_handler(xml_root) - return ET.tostring(xml_root).decode('utf-8') + return xml_root def _write_result_xml_to_file(self, file, xml_data): # save it as new file From 69f5ace08485f0aea46a586602629a21416b779c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 29 Mar 2022 08:46:50 +0200 Subject: [PATCH 064/207] flame: fix condition direction --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index d5790d2f10..464f5ce89b 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -783,7 +783,7 @@ class OpenClipSolver: if xml_clip.find("name").text in self.feed_basename: matching_clip = xml_clip - if matching_clip is not None: + if matching_clip is None: # return warning there is missing clip raise ET.ParseError( "Missing clip in `{}`. Available clips {}".format( From 3459cec3a9adf5537d11c5963e7b33ec9b5d5c2b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 29 Mar 2022 10:29:17 +0200 Subject: [PATCH 065/207] flame: task workdir for .clip when integrating batch --- openpype/hosts/flame/plugins/load/load_clip_batch.py | 2 +- .../flame/plugins/publish/integrate_batch_group.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 1f87f94cc6..252c92516d 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -48,7 +48,7 @@ class LoadClipBatch(opfapi.ClipLoader): colorspace = colorspace # create workfile path - workfile_dir = os.environ["AVALON_WORKDIR"] + workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] openclip_dir = os.path.join( workfile_dir, clip_name ) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index eaab429111..7c61ed62b5 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -105,7 +105,9 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # load to flame by representation context try: op_pipeline.load.load_with_repre_context( - Loader, repre_context) + Loader, repre_context, **{ + "data": {"workdir": self.task_workdir} + }) except op_pipeline.load.IncompatibleLoaderError as msg: self.log.error( "Check allowed representations for Loader `{}` " @@ -192,12 +194,14 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): anatomy_data = self._get_anamoty_data_with_current_task( instance, task_data) - task_workfile_path = self._get_shot_task_dir_path(instance, task_data) - self.log.debug("__ task_workfile_path: {}".format(task_workfile_path)) + self.task_workdir = self._get_shot_task_dir_path( + instance, task_data) + self.log.debug("__ task_workdir: {}".format( + self.task_workdir)) # TODO: this might be done with template in settings render_dir_path = os.path.join( - task_workfile_path, "render", "flame") + self.task_workdir, "render", "flame") if not os.path.exists(render_dir_path): os.makedirs(render_dir_path, mode=0o777) From fe11ad9868cca57c6bc9ff34e8011beaba7989f4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 10:06:50 +0200 Subject: [PATCH 066/207] Remove unused website docs pages --- website/docs/api.md | 7 ------- website/docs/artist_hosts.md | 17 ----------------- website/docs/hosts-maya.md | 33 --------------------------------- 3 files changed, 57 deletions(-) delete mode 100644 website/docs/api.md delete mode 100644 website/docs/artist_hosts.md delete mode 100644 website/docs/hosts-maya.md diff --git a/website/docs/api.md b/website/docs/api.md deleted file mode 100644 index 7cad92d603..0000000000 --- a/website/docs/api.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -id: api -title: Pype API -sidebar_label: API ---- - -Work in progress diff --git a/website/docs/artist_hosts.md b/website/docs/artist_hosts.md deleted file mode 100644 index 609f6d97c8..0000000000 --- a/website/docs/artist_hosts.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -id: artist_hosts -title: Hosts -sidebar_label: Hosts ---- - -## Maya - -## Houdini - -## Nuke - -## Fusion - -## Unreal - -## System diff --git a/website/docs/hosts-maya.md b/website/docs/hosts-maya.md deleted file mode 100644 index 0ee0c2d86b..0000000000 --- a/website/docs/hosts-maya.md +++ /dev/null @@ -1,33 +0,0 @@ -### Tools -Creator -Publisher -Loader -Scene Inventory -Look assigner -Workfiles - -### Plugins -Deadline -Muster -Yeti -Arnold -Vray -Redshift - -### Families -Model -Look -Rig -Animation -Cache -Camera -Assembly -MayaAscii (generic scene) -Setdress -RenderSetup -Review -arnoldStandin -vrayProxy -vrayScene -yetiCache -yetiRig From ad7578fc7339ad2aaa885d616dc3e07a7a2df937 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 10:10:49 +0200 Subject: [PATCH 067/207] Remove unused `manager_naming.md` --- website/docs/manager_naming.md | 56 ---------------------------------- 1 file changed, 56 deletions(-) delete mode 100644 website/docs/manager_naming.md diff --git a/website/docs/manager_naming.md b/website/docs/manager_naming.md deleted file mode 100644 index bf822fbeb4..0000000000 --- a/website/docs/manager_naming.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -id: manager_naming -title: Naming Conventions -sidebar_label: Naming Conventions ---- - -:::note -This naming convention holds true for most of our pipeline. Please match it as close as possible even for projects and files that might be outside of pipeline scope at this point. Small errors count! The reason for given formatting is to allow people to understand the file at glance and that a script or a program can easily get meaningful information from your files without errors. -::: - -## General rules - -For more detailed rules and different file types, have a look at naming conventions for scenes and assets - -- Every file starts with file code based on a project it belongs to e.g. ‘tst_’, ‘drm_’ -- Optional subversion and comment always comes after the major version. v##.subversion_comment. -- File names can only be composed of letters, numbers, underscores `_` and dots “.” -- You can use snakeCase or CamelCase if you need more words in a section.  thisIsLongerSentenceInComment -- No spaces in filenames. Ever! -- Frame numbers are always separated by a period ”.” -- If you're not sure use this template: - -## Work files - -**`{code}_{shot}_{task}_v001.ext`** - -**`{code}_{asset}_{task}_v001.ext`** - -**Examples:** - - prj_sh010_enviro_v001.ma - prj_sh010_animation_v001.ma - prj_sh010_comp_v001.nk - - prj_bob_modelling_v001.ma - prj_bob_rigging_v001.ma - prj_bob_lookdev_v001.ma - -:::info -In all of the examples anything enclosed in curly brackets  { } is compulsory in the name. -Anything in square brackets [ ] is optional. -::: - -## Published Assets - -**`{code}_{asset}_{family}_{subset}_{version}_[comment].ext`** - -**Examples:** - - prj_bob_model_main_v01.ma - prj_bob_model_hires_v01.ma - prj_bob_model_main_v01_clothes.ma - prj_bob_model_main_v01_body.ma - prj_bob_rig_main_v01.ma - Prj_bob_look_main_v01.ma - Prj_bob_look_wet_v01.ma From df6499868bca5b0a3eea579591f309b3e78e1f59 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 30 Mar 2022 12:22:43 +0200 Subject: [PATCH 068/207] flame: cleaning code --- openpype/hosts/flame/api/__init__.py | 4 ---- openpype/hosts/flame/api/lib.py | 17 +---------------- openpype/hosts/flame/api/scripts/wiretap_com.py | 2 +- openpype/hosts/flame/plugins/load/load_clip.py | 2 +- .../hosts/flame/plugins/load/load_clip_batch.py | 2 +- .../plugins/publish/integrate_batch_group.py | 3 +++ 6 files changed, 7 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 561aaab3de..28511458c2 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -11,10 +11,8 @@ from .constants import ( from .lib import ( CTX, FlameAppFramework, - get_project_manager, get_current_project, get_current_sequence, - create_bin, create_segment_data_marker, get_segment_data_marker, set_segment_data_marker, @@ -87,10 +85,8 @@ __all__ = [ # lib "CTX", "FlameAppFramework", - "get_project_manager", "get_current_project", "get_current_sequence", - "create_bin", "create_segment_data_marker", "get_segment_data_marker", "set_segment_data_marker", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index dd91252a00..7316fa1c5b 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -227,16 +227,6 @@ class FlameAppFramework(object): return True -def get_project_manager(): - # TODO: get_project_manager - return - - -def get_media_storage(): - # TODO: get_media_storage - return - - def get_current_project(): import flame return flame.project.current_project @@ -266,11 +256,6 @@ def get_current_sequence(selection): return process_timeline -def create_bin(name, root=None): - # TODO: create_bin - return - - def rescan_hooks(): import flame try: @@ -724,5 +709,5 @@ def get_batch_group_from_desktop(name): project_desktop = project.current_workspace.desktop for bgroup in project_desktop.batch_groups: - if bgroup.name.get_value() == name: + if bgroup.name.get_value() in name: return bgroup diff --git a/openpype/hosts/flame/api/scripts/wiretap_com.py b/openpype/hosts/flame/api/scripts/wiretap_com.py index 54993d34eb..14fbcec954 100644 --- a/openpype/hosts/flame/api/scripts/wiretap_com.py +++ b/openpype/hosts/flame/api/scripts/wiretap_com.py @@ -254,7 +254,7 @@ class WireTapCom(object): filtered_users = [user for user in used_names if user_name in user] if filtered_users: - # todo: need to find lastly created following regex pattern for + # TODO: need to find lastly created following regex pattern for # date used in name return filtered_users.pop() diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index b27600db1f..e0a7297381 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -39,7 +39,7 @@ class LoadClip(opfapi.ClipLoader): clip_name = self.clip_name_template.format( **context["representation"]["context"]) - # todo: settings in imageio + # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section colorspace = colorspace diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 252c92516d..3c13d88d3a 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -42,7 +42,7 @@ class LoadClipBatch(opfapi.ClipLoader): clip_name = self.clip_name_template.format( **context["representation"]["context"]) - # todo: settings in imageio + # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section colorspace = colorspace diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 7c61ed62b5..253a1d6192 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -166,8 +166,11 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): self.log.info( "Updating batch group: {}".format(batchgroup_name)) # update already created batch group + bgroup.name = batchgroup_name bgroup.start_frame = frame_start bgroup.duration = frame_duration + # TODO: also update write node if there is any + # TODO: also update loaders to start from correct frameStart return bgroup From 5580ef083bd51bba96e11e1d68156d9dbedc4809 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 30 Mar 2022 12:24:12 +0200 Subject: [PATCH 069/207] hound catch --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 253a1d6192..4dd6081170 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -218,7 +218,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # /path/to/file.[0001-0010].exr media_path = render_dir_path # name of file represented by tokens - media_path_pattern = "_v/_v." + media_path_pattern = ( + "_v/_v.") # The Create Open Clip attribute of the Write File node. \ # Determines if an Open Clip is created by the Write File node. create_clip = True From 2520ceca630d5985a9646e0002aaea352445cfd0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 22:12:00 +0200 Subject: [PATCH 070/207] Fix #2946: Avoid ImportError on `hdefereval` when Houdini runs without UI --- openpype/hosts/houdini/api/pipeline.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index d079c9ea81..31c82b1cfd 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -4,7 +4,6 @@ import logging import contextlib import hou -import hdefereval import pyblish.api import avalon.api @@ -305,7 +304,13 @@ def on_new(): start = hou.playbar.playbackRange()[0] hou.setFrame(start) - hdefereval.executeDeferred(_enforce_start_frame) + if hou.isUIAvailable(): + import hdefereval + hdefereval.executeDeferred(_enforce_start_frame) + else: + # Run without execute deferred when no UI is available because + # without UI `hdefereval` is not available to import + _enforce_start_frame() def _set_context_settings(): From 2d038efde5122b9c709f1fedb9808eb75343f92e Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 31 Mar 2022 13:34:38 +0300 Subject: [PATCH 071/207] fixes parameter name for readability --- openpype/pipeline/farm/patterning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index e1c05df77f..6d3eb3e5ab 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -2,7 +2,7 @@ import re -def match_aov_pattern(app, aov_patterns, render_file_name): +def match_aov_pattern(host_name, aov_patterns, render_file_name): """Matching against a `AOV` pattern in the render files. In order to match the AOV name we must compare @@ -18,7 +18,7 @@ def match_aov_pattern(app, aov_patterns, render_file_name): Returns: bool: Review state for rendered file (render_file_name). """ - aov_pattern = aov_patterns.get(app, []) + aov_pattern = aov_patterns.get(host_name, []) if aov_pattern: if re.match(aov_pattern, render_file_name): preview = True From 995ff7b94ac12ec7c25b81655ebab99095fac529 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Thu, 31 Mar 2022 18:59:44 +0300 Subject: [PATCH 072/207] Updates match_aov_pattern() logic to handle empty regex Using `is not None` to simplify code and handle empty regex cases. Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/farm/patterning.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 6d3eb3e5ab..5ba7a8df4b 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -19,9 +19,6 @@ def match_aov_pattern(host_name, aov_patterns, render_file_name): bool: Review state for rendered file (render_file_name). """ aov_pattern = aov_patterns.get(host_name, []) - if aov_pattern: - if re.match(aov_pattern, render_file_name): - preview = True - return preview - else: - return False + if not aov_pattern: + return False + return re.match(aov_pattern, render_file_name) is not None From 986c3287494edb194724c402ae50615c38c07a42 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Sat, 2 Apr 2022 12:04:25 +0200 Subject: [PATCH 073/207] Resolve environment variable in credential path with accre --- openpype/modules/sync_server/providers/gdrive.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 0b586613b5..6a8d2b3422 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -3,7 +3,7 @@ import os.path import time import sys import six -import platform +import acre from openpype.api import Logger from openpype.api import get_system_settings @@ -73,8 +73,13 @@ class GDriveHandler(AbstractProvider): format(site_name)) return - cred_path = self.presets.get("credentials_url", {}).\ - get(platform.system().lower()) or '' + cred_data = { + 'cred_path': self.presets.get("credentials_url", {}) + } + cred_data = acre.parse(cred_data) + cred_data = acre.merge(cred_data, current_env=os.environ) + cred_path = cred_data['cred_path'] + if not os.path.exists(cred_path): msg = "Sync Server: No credentials for gdrive provider " + \ "for '{}' on path '{}'!".format(site_name, cred_path) From 4235a7674f6b37ff3b13fa9083a0114f558acb06 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 4 Apr 2022 19:39:24 +0200 Subject: [PATCH 074/207] Use format to fill cred_path --- .../modules/sync_server/providers/gdrive.py | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 6a8d2b3422..f7bb2d36df 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -3,7 +3,7 @@ import os.path import time import sys import six -import acre +import platform from openpype.api import Logger from openpype.api import get_system_settings @@ -73,12 +73,22 @@ class GDriveHandler(AbstractProvider): format(site_name)) return - cred_data = { - 'cred_path': self.presets.get("credentials_url", {}) - } - cred_data = acre.parse(cred_data) - cred_data = acre.merge(cred_data, current_env=os.environ) - cred_path = cred_data['cred_path'] + current_platform = platform.system().lower() + cred_path = self.presets.get("credentials_url", {}). \ + get(current_platform) or '' + + if not cred_path: + msg = "Sync Server: Please, fill the credentials for gdrive "\ + "provider for platform '{}' !".format(current_platform) + log.info(msg) + return + + try: + cred_path = cred_path.format(**os.environ) + except KeyError as e: + log.info("the key(s) {} does not exist in the environment " + "variables".format(" ".join(e.args))) + return if not os.path.exists(cred_path): msg = "Sync Server: No credentials for gdrive provider " + \ From 8896b36ef0c1a0295c09ef69097589aad765245b Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 4 Apr 2022 19:41:27 +0200 Subject: [PATCH 075/207] Replace t by T in log message --- openpype/modules/sync_server/providers/gdrive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index f7bb2d36df..d6369d39e6 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -86,7 +86,7 @@ class GDriveHandler(AbstractProvider): try: cred_path = cred_path.format(**os.environ) except KeyError as e: - log.info("the key(s) {} does not exist in the environment " + log.info("The key(s) {} does not exist in the environment " "variables".format(" ".join(e.args))) return From 57ecd9adfaadc7e81e686a0bb74d68efaaf85b61 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 4 Apr 2022 19:43:13 +0200 Subject: [PATCH 076/207] Better log message with Sync Server --- openpype/modules/sync_server/providers/gdrive.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index d6369d39e6..b783f7958b 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -86,8 +86,8 @@ class GDriveHandler(AbstractProvider): try: cred_path = cred_path.format(**os.environ) except KeyError as e: - log.info("The key(s) {} does not exist in the environment " - "variables".format(" ".join(e.args))) + log.info("Sync Server: The key(s) {} does not exist in the " + "environment variables".format(" ".join(e.args))) return if not os.path.exists(cred_path): From 1926e107659790d10a63770f70b72a6f7cf88ef1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 4 Apr 2022 20:50:58 +0200 Subject: [PATCH 077/207] flame: redundant code --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 4dd6081170..fc5f4cfcd0 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -7,7 +7,7 @@ import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline -@pyblish.api.log + class IntegrateBatchGroup(pyblish.api.InstancePlugin): """Integrate published shot to batch group""" From 54897163caee3dbb783bcafd68b7648c99434c9d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 4 Apr 2022 20:52:09 +0200 Subject: [PATCH 078/207] haunch catch --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index fc5f4cfcd0..a9ccd6b4a1 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -7,7 +7,6 @@ import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline - class IntegrateBatchGroup(pyblish.api.InstancePlugin): """Integrate published shot to batch group""" From 0dfca2ff4589e996708c9d27108e282d9107d847 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 11:38:00 +0200 Subject: [PATCH 079/207] Flame: refining the code for better understanding of flow --- .../plugins/publish/integrate_batch_group.py | 78 ++++++++----------- 1 file changed, 33 insertions(+), 45 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index a9ccd6b4a1..979134bbfe 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -38,44 +38,40 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): def _load_clip_to_context(self, instance, bgroup): # get all loaders for host - loaders = op_pipeline.discover_loader_plugins() + loaders_by_name = { + loader.__name__: loader + for loader in op_pipeline.discover_loader_plugins() + } # get all published representations published_representations = instance.data["published_representations"] + repres_db_id_by_name = { + repre_info["representation"]["name"]: repre_id + for repre_id, repre_info in published_representations.items() + } # get all loadable representations - representations = instance.data["representations"] + repres_by_name = { + repre["name"]: repre for repre in instance.data["representations"] + } # get repre_id for the loadable representations - loadable_representations = [ - { - "name": _repr["name"], - "loader": _repr.get("batch_group_loader_name"), - # match loader to the loadable representation - "_id": next( - ( - id - for id, repr in published_representations.items() - if repr["representation"]["name"] == _repr["name"] - ), - None - ) + loader_name_by_repre_id = { + repres_db_id_by_name[repr_name]: { + "loader": repr_data["batch_group_loader_name"], + # add repre data for exception logging + "_repre_data": repr_data } - for _repr in representations - if _repr.get("load_to_batch_group") is not None - ] + for repr_name, repr_data in repres_by_name.items() + if repr_data.get("load_to_batch_group") + } - self.log.debug("__ loadable_representations: {}".format(pformat( - loadable_representations))) + self.log.debug("__ loader_name_by_repre_id: {}".format(pformat( + loader_name_by_repre_id))) # get representation context from the repre_id - representation_ids = [ - repre["_id"] - for repre in loadable_representations - if repre["_id"] is not None - ] repre_contexts = op_pipeline.load.get_repres_contexts( - representation_ids) + loader_name_by_repre_id.keys()) self.log.debug("__ repre_contexts: {}".format(pformat( repre_contexts))) @@ -84,45 +80,37 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): for repre_id, repre_context in repre_contexts.items(): self.log.debug("__ repre_id: {}".format(repre_id)) # get loader name by representation id - loader_name = next( - ( - repr["loader"] - for repr in loadable_representations - if repr["_id"] == repre_id - )) or self.default_loader + loader_name = ( + loader_name_by_repre_id[repre_id]["loader"] + # if nothing was added to settings fallback to default + or self.default_loader + ) # get loader plugin - Loader = next( - ( - loader_plugin - for loader_plugin in loaders - if loader_plugin.__name__ == loader_name - ), - None - ) - if Loader: + loader_plugin = loaders_by_name.get(loader_name) + if loader_plugin: # load to flame by representation context try: op_pipeline.load.load_with_repre_context( - Loader, repre_context, **{ + loader_plugin, repre_context, **{ "data": {"workdir": self.task_workdir} }) except op_pipeline.load.IncompatibleLoaderError as msg: self.log.error( "Check allowed representations for Loader `{}` " "in settings > error: {}".format( - Loader.__name__, msg)) + loader_plugin.__name__, msg)) self.log.error( "Representaton context >>{}<< is not compatible " "with loader `{}`".format( - pformat(repre_context), Loader.__name__ + pformat(repre_context), loader_plugin.__name__ ) ) else: self.log.warning( "Something got wrong and there is not Loader found for " "following data: {}".format( - pformat(loadable_representations)) + pformat(loader_name_by_repre_id)) ) def _get_batch_group(self, instance, task_data): From 5b260afc6af67bbcda403e7ce35922e7a8350ff6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 15:47:57 +0200 Subject: [PATCH 080/207] flame: avoid hidden segment processing --- openpype/hosts/flame/otio/flame_export.py | 43 +++++++++++------------ 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 8c240fc9d5..f9dbe68421 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -401,8 +401,10 @@ def get_clips_in_reels(project): version = clip.versions[-1] track = version.tracks[-1] + # each reel clip is also having one segment for segment in track.segments: - segment_data = _get_segment_attributes(segment) + segment_data = _get_segment_attributes( + segment, from_clip=True) clip_data.update(segment_data) output_clips.append(clip_data) @@ -489,12 +491,14 @@ def add_otio_metadata(otio_item, item, **kwargs): otio_item.metadata.update({key: value}) -def _get_shot_tokens_values(clip, tokens): +def _get_shot_tokens_values(clip, tokens, from_clip=False): old_value = None output = {} - if not clip.shot_name: - return output + # in case it is segment from reel clip + # avoiding duplicity of segement data + if from_clip: + return {} old_value = clip.shot_name.get_value() @@ -512,16 +516,19 @@ def _get_shot_tokens_values(clip, tokens): return output -def _get_segment_attributes(segment): +def _get_segment_attributes(segment, from_clip=False): # log.debug(dir(segment)) - - if str(segment.name)[1:-1] == "": + if ( + segment.name.get_value() == "" + or segment.hidden + ): return None # Add timeline segment to tree clip_data = { "segment_name": segment.name.get_value(), "segment_comment": segment.comment.get_value(), + "shot_name": segment.shot_name.get_value(), "tape_name": segment.tape_name, "source_name": segment.source_name, "fpath": segment.file_path, @@ -531,7 +538,7 @@ def _get_segment_attributes(segment): # add all available shot tokens shot_tokens = _get_shot_tokens_values(segment, [ "", "", "", "", - ]) + ], from_clip) clip_data.update(shot_tokens) # populate shot source metadata @@ -597,11 +604,7 @@ def create_otio_timeline(sequence): continue all_segments.append(clip_data) - segments_ordered = { - itemindex: clip_data - for itemindex, clip_data in enumerate( - all_segments) - } + segments_ordered = dict(enumerate(all_segments)) log.debug("_ segments_ordered: {}".format( pformat(segments_ordered) )) @@ -612,15 +615,11 @@ def create_otio_timeline(sequence): log.debug("_ itemindex: {}".format(itemindex)) # Add Gap if needed - if itemindex == 0: - # if it is first track item at track then add - # it to previous item - prev_item = segment_data - - else: - # get previous item - prev_item = segments_ordered[itemindex - 1] - + prev_item = ( + segment_data + if itemindex == 0 + else segments_ordered[itemindex - 1] + ) log.debug("_ segment_data: {}".format(segment_data)) # calculate clip frame range difference from each other From 575898490f4aad68256207da0dea4f9960e2948f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 15:57:16 +0200 Subject: [PATCH 081/207] flame: fixing broken get_clips_in_reels --- openpype/hosts/flame/otio/flame_export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index f9dbe68421..78e5ceecb6 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -405,7 +405,8 @@ def get_clips_in_reels(project): for segment in track.segments: segment_data = _get_segment_attributes( segment, from_clip=True) - clip_data.update(segment_data) + if segment_data: + clip_data.update(segment_data) output_clips.append(clip_data) From 8d4541d68da458f5121029353119ab8ae7ff4791 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 16:29:04 +0200 Subject: [PATCH 082/207] flame: hidden attribute is PyAttribute so need to get value --- openpype/hosts/flame/otio/flame_export.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 78e5ceecb6..1b5980b40a 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -518,10 +518,13 @@ def _get_shot_tokens_values(clip, tokens, from_clip=False): def _get_segment_attributes(segment, from_clip=False): - # log.debug(dir(segment)) + + log.debug("Segment name|hidden: {}|{}".format( + segment.name.get_value(), segment.hidden + )) if ( segment.name.get_value() == "" - or segment.hidden + or segment.hidden.get_value() ): return None @@ -591,7 +594,12 @@ def create_otio_timeline(sequence): # create otio tracks and clips for ver in sequence.versions: for track in ver.tracks: - if len(track.segments) == 0 and track.hidden: + # avoid all empty tracks + # or hidden tracks + if ( + len(track.segments) == 0 + or track.hidden.get_value() + ): return None # convert track to otio From 60118298b6c3dcf2f41488624a3b6d3bf9166990 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 16:29:31 +0200 Subject: [PATCH 083/207] flame: make reel clip validation optional --- openpype/hosts/flame/plugins/publish/validate_source_clip.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/validate_source_clip.py b/openpype/hosts/flame/plugins/publish/validate_source_clip.py index 9ff015f628..345c00e05a 100644 --- a/openpype/hosts/flame/plugins/publish/validate_source_clip.py +++ b/openpype/hosts/flame/plugins/publish/validate_source_clip.py @@ -9,6 +9,8 @@ class ValidateSourceClip(pyblish.api.InstancePlugin): label = "Validate Source Clip" hosts = ["flame"] families = ["clip"] + optional = True + active = False def process(self, instance): flame_source_clip = instance.data["flameSourceClip"] From 818c3fe91f420de9b08ff4ebfc1bdae301a42927 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 16:58:25 +0200 Subject: [PATCH 084/207] flame: fallback if reel clip is not available --- .../publish/extract_subset_resources.py | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 31f7b6d574..341f12be16 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -63,7 +63,10 @@ class ExtractSubsetResources(openpype.api.Extractor): segment = instance.data["item"] sequence_clip = instance.context.data["flameSequence"] clip_data = instance.data["flameSourceClip"] - clip = clip_data["PyClip"] + + reel_clip = None + if clip_data: + reel_clip = clip_data["PyClip"] # segment's parent track name s_track_name = segment.parent.name.get_value() @@ -127,8 +130,20 @@ class ExtractSubsetResources(openpype.api.Extractor): in_mark = (source_start_handles - source_first_frame) + 1 out_mark = in_mark + source_duration_handles + # make test for type of preset and available reel_clip + if ( + not reel_clip + and export_type != "Sequence Publish" + ): + self.log.warning(( + "Skipping preset {}. Not available " + "reel clip for {}").format( + preset_file, segment.name.get_value() + )) + continue + # by default export source clips - exporting_clip = clip + exporting_clip = reel_clip if export_type == "Sequence Publish": # change export clip to sequence @@ -344,7 +359,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # create otio tracks and clips for ver in sequence_clip.versions: for track in ver.tracks: - if len(track.segments) == 0 and track.hidden: + if len(track.segments) == 0 and track.hidden.get_value(): continue if track.name.get_value() != track_name: From 246127c73b7d06bf5e7a2b68a7fb0bd31e949b22 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Apr 2022 17:20:41 +0200 Subject: [PATCH 085/207] flame: hidding all unrelated segments and tracks --- .../plugins/publish/extract_subset_resources.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 341f12be16..a780f8c9e5 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -61,6 +61,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # flame objects segment = instance.data["item"] + segment_name = segment.name.get_value() sequence_clip = instance.context.data["flameSequence"] clip_data = instance.data["flameSourceClip"] @@ -138,7 +139,7 @@ class ExtractSubsetResources(openpype.api.Extractor): self.log.warning(( "Skipping preset {}. Not available " "reel clip for {}").format( - preset_file, segment.name.get_value() + preset_file, segment_name )) continue @@ -175,7 +176,7 @@ class ExtractSubsetResources(openpype.api.Extractor): if export_type == "Sequence Publish": # only keep visible layer where instance segment is child - self.hide_other_tracks(duplclip, s_track_name) + self.hide_others(duplclip, segment_name, s_track_name) # validate xml preset file is filled if preset_file == "": @@ -349,11 +350,12 @@ class ExtractSubsetResources(openpype.api.Extractor): return new_stage_dir, new_files_list - def hide_other_tracks(self, sequence_clip, track_name): + def hide_others(self, sequence_clip, segment_name, track_name): """Helper method used only if sequence clip is used Args: sequence_clip (flame.Clip): sequence clip + segment_name (str): segment name track_name (str): track name """ # create otio tracks and clips @@ -362,5 +364,12 @@ class ExtractSubsetResources(openpype.api.Extractor): if len(track.segments) == 0 and track.hidden.get_value(): continue + # hide tracks which are not parent track if track.name.get_value() != track_name: track.hidden = True + continue + + # hidde all other segments + for segment in track.segments: + if segment.name.get_value() != segment_name: + segment.hidden = True From 7bd1f630e732ca0ea92ebbc1ecf8646b90de0c7f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 12:05:41 +0200 Subject: [PATCH 086/207] moved check of representations earlier --- .../plugins/publish/integrate_ftrack_instances.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index b54db918a6..b9a486d9da 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -40,6 +40,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug("instance {}".format(instance)) + instance_repres = instance.data.get("representations") + if not instance_repres: + self.log.info(( + "Skipping instance. Does not have any representations {}" + ).format(str(instance))) + return + instance_version = instance.data.get("version") if instance_version is None: raise ValueError("Instance version not set") @@ -64,13 +71,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ).format(family)) return - instance_repres = instance.data.get("representations") - if not instance_repres: - self.log.info(( - "Skipping instance. Does not have any representations {}" - ).format(str(instance))) - return - # Prepare FPS instance_fps = instance.data.get("fps") if instance_fps is None: From 66209b27cd0ad2efd90655ad352b66a2a043100a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 12:05:56 +0200 Subject: [PATCH 087/207] added default asset type into integrate ftrack instances --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index b9a486d9da..5ea0469bce 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -60,8 +60,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): if not asset_type and family_low in self.family_mapping: asset_type = self.family_mapping[family_low] - self.log.debug(self.family_mapping) - self.log.debug(family_low) + if not asset_type: + asset_type = "upload" + + self.log.debug( + "Family: {}\nMapping: {}".format(family_low, self.family_mapping) + ) # Ignore this instance if neither "ftrackFamily" or a family mapping is # found. From b9dc19a046f061c9b495fe39fb812154e8cdaf5b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 12:14:37 +0200 Subject: [PATCH 088/207] prepared functions to separate process function --- .../plugins/publish/integrate_ftrack_api.py | 346 ++++++++++++++++++ 1 file changed, 346 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 6c25b9191e..8ea2d8411b 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -464,3 +464,349 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): for asset_version in used_asset_versions: if asset_version not in instance.data[asset_versions_key]: instance.data[asset_versions_key].append(asset_version) + + def _ensure_asset_types_exists(self, session, component_list): + """Make sure that all AssetType entities exists for integration. + + Returns: + dict: All asset types by short name. + """ + # Query existing asset types + asset_types = session.query("select id, short from AssetType").all() + # Stpore all existing short names + asset_type_shorts = {asset_type["short"] for asset_type in asset_types} + # Check which asset types are missing and store them + asset_type_names_by_missing_shorts = {} + default_short_name = "upload" + for data in component_list: + asset_type_data = data.get("assettype_data") or {} + asset_type_short = asset_type_data.get("short") + if not asset_type_short: + # Use default asset type name if not set and change the + # input data + asset_type_short = default_short_name + asset_type_data["short"] = asset_type_short + data["assettype_data"] = asset_type_data + + if ( + # Skip if short name exists + asset_type_short in asset_type_shorts + # Skip if short name was already added to missing types + # and asset type name is filled + # - if asset type name is missing then try use name from other + # data + or asset_type_names_by_missing_shorts.get(asset_type_short) + ): + continue + + asset_type_names_by_missing_shorts[asset_type_short] = ( + asset_type_data.get("name") + ) + + # Create missing asset types if there are any + if asset_type_names_by_missing_shorts: + self.log.info("Creating asset types with short names: {}".format( + ", ".join(asset_type_names_by_missing_shorts.keys()) + )) + for missing_short, type_name in asset_type_names_by_missing_shorts: + # Use short for name if name is not defined + if not type_name: + type_name = missing_short + # Use short name also for name + # - there is not other source for 'name' + session.create( + "AssetType", + { + "short": missing_short, + "name": type_name + } + ) + + # Commit creation + session.commit() + # Requery asset types + asset_types = session.query( + "select id, short from AssetType" + ).all() + + return {asset_type["short"]: asset_type for asset_type in asset_types} + + def _ensure_asset_exists( + self, session, asset_data, asset_type_id, parent_id + ): + asset_name = asset_data["name"] + asset_entity = self._query_asset( + session, asset_name, asset_type_id, parent_id + ) + if asset_entity is not None: + return asset_entity + + asset_data = { + "name": asset_name, + "type_id": asset_type_id, + "context_id": parent_id + } + self.log.info("Created new Asset with data: {}.".format(asset_data)) + session.create("Asset", asset_data) + session.commit() + return self._query_asset(session, asset_name, asset_type_id, parent_id) + + def _query_asset(self, session, asset_name, asset_type_id, parent_id): + return session.query( + ( + "select id from Asset" + " where name is \"{}\"" + " and type_id is \"{}\"" + " and context_id is \"{}\"" + ).format(asset_name, asset_type_id, parent_id) + ).first() + + def _ensure_asset_version_exists( + self, session, asset_version_data, asset_id, task_entity + ): + task_id = None + if task_entity: + task_id = task_entity["id"] + + # Try query asset version by criteria (asset id and version) + version = asset_version_data.get("version") or 0 + asset_version_entity = self._query_asset_version( + session, version, asset_id + ) + + # Prepare comment value + comment = asset_version_data.get("comment") or "" + if asset_version_entity is not None: + changed = False + if comment != asset_version_entity["comment"]: + asset_version_entity["comment"] = comment + changed = True + + if task_id != asset_version_entity["task_id"]: + asset_version_entity["task_id"] = task_id + changed = True + + if changed: + session.commit() + + else: + new_asset_version_data = { + "version": version, + "asset_id": asset_id + } + if task_id: + new_asset_version_data["task_id"] = task_id + + if comment: + new_asset_version_data["comment"] = comment + + self.log.info("Created new AssetVersion with data {}".format( + new_asset_version_data + )) + session.create("AssetVersion", new_asset_version_data) + session.commit() + asset_version_entity = self._query_asset_version( + session, version, asset_id + ) + + # Set custom attributes if there were any set + custom_attrs = asset_version_data.get("custom_attributes") or {} + for attr_key, attr_value in custom_attrs.items(): + if attr_key in asset_version_entity["custom_attributes"]: + try: + asset_version_entity["custom_attributes"][attr_key] = ( + attr_value + ) + session.commit() + continue + except Exception: + session.rollback() + session._configure_locations() + + self.log.warning( + ( + "Custom Attrubute \"{0}\" is not available for" + " AssetVersion <{1}>. Can't set it's value to: \"{2}\"" + ).format( + attr_key, asset_version_entity["id"], str(attr_value) + ) + ) + + return asset_version_entity + + def _query_asset_version(self, session, version, asset_id): + return session.query( + ( + "select id, task_id, comment from AssetVersion" + " where version is \"{}\" and asset_id is \"{}\"" + ).format(version, asset_id) + ).first() + + def create_component(self, session, asset_version_entity, data): + component_data = data.get("component_data") or {} + + if not component_data.get("name"): + component_data["name"] = "main" + + version_id = asset_version_entity["id"] + component_data["version_id"] = version_id + component_entity = session.query( + ( + "select id, name from Component where name is \"{}\"" + " and version_id is \"{}\"" + ).format(component_data["name"], version_id) + ).first() + + component_overwrite = data.get("component_overwrite", False) + location = data.get("component_location", session.pick_location()) + + # Overwrite existing component data if requested. + if component_entity and component_overwrite: + origin_location = session.query( + "Location where name is \"ftrack.origin\"" + ).one() + + # Removing existing members from location + components = list(component_entity.get("members", [])) + components += [component_entity] + for component in components: + for loc in component["component_locations"]: + if location["id"] == loc["location_id"]: + location.remove_component( + component, recursive=False + ) + + # Deleting existing members on component entity + for member in component_entity.get("members", []): + session.delete(member) + del(member) + + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + session._configure_locations() + six.reraise(tp, value, tb) + + # Reset members in memory + if "members" in component_entity.keys(): + component_entity["members"] = [] + + # Add components to origin location + try: + collection = clique.parse(data["component_path"]) + except ValueError: + # Assume its a single file + # Changing file type + name, ext = os.path.splitext(data["component_path"]) + component_entity["file_type"] = ext + + origin_location.add_component( + component_entity, data["component_path"] + ) + else: + # Changing file type + component_entity["file_type"] = collection.format("{tail}") + + # Create member components for sequence. + for member_path in collection: + + size = 0 + try: + size = os.path.getsize(member_path) + except OSError: + pass + + name = collection.match(member_path).group("index") + + member_data = { + "name": name, + "container": component_entity, + "size": size, + "file_type": os.path.splitext(member_path)[-1] + } + + component = session.create( + "FileComponent", member_data + ) + origin_location.add_component( + component, member_path, recursive=False + ) + component_entity["members"].append(component) + + # Add components to location. + location.add_component( + component_entity, origin_location, recursive=True + ) + + data["component"] = component_entity + self.log.info( + ( + "Overwriting Component with path: {0}, data: {1}," + " location: {2}" + ).format( + data["component_path"], + component_data, + location + ) + ) + + # Extracting metadata, and adding after entity creation. This is + # due to a ftrack_api bug where you can't add metadata on creation. + component_metadata = component_data.pop("metadata", {}) + + # Create new component if none exists. + new_component = False + if not component_entity: + component_entity = asset_version_entity.create_component( + data["component_path"], + data=component_data, + location=location + ) + data["component"] = component_entity + self.log.info( + ( + "Created new Component with path: {0}, data: {1}," + " metadata: {2}, location: {3}" + ).format( + data["component_path"], + component_data, + component_metadata, + location + ) + ) + new_component = True + + # Adding metadata + existing_component_metadata = component_entity["metadata"] + existing_component_metadata.update(component_metadata) + component_entity["metadata"] = existing_component_metadata + + # if component_data['name'] = 'ftrackreview-mp4-mp4': + # assetversion_entity["thumbnail_id"] + + # Setting assetversion thumbnail + if data.get("thumbnail"): + asset_version_entity["thumbnail_id"] = component_entity["id"] + + # Inform user about no changes to the database. + if ( + component_entity + and not component_overwrite + and not new_component + ): + data["component"] = component_entity + self.log.info( + "Found existing component, and no request to overwrite. " + "Nothing has been changed." + ) + else: + # Commit changes. + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + session._configure_locations() + six.reraise(tp, value, tb) From ad4fe059e8b90d4e2bda589c319b3ed2e5d94812 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 12:15:40 +0200 Subject: [PATCH 089/207] simplified process function of integrate ftrack api --- .../plugins/publish/integrate_ftrack_api.py | 468 ++++-------------- 1 file changed, 103 insertions(+), 365 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 8ea2d8411b..7bba93c7cd 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -1,3 +1,15 @@ +"""Integrate components into ftrack + +Requires: + context -> ftrackSession - connected ftrack.Session + instance -> ftrackComponentsList - list of components to integrate + +Provides: + instance -> ftrackIntegratedAssetVersionsData + # legacy + instance -> ftrackIntegratedAssetVersions +""" + import os import sys import six @@ -54,6 +66,97 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.debug(query) return query + def process(self, instance): + session = instance.context.data["ftrackSession"] + context = instance.context + component_list = instance.data.get("ftrackComponentsList") + if not component_list: + self.log.info( + "Instance don't have components to integrate to Ftrack." + " Skipping." + ) + return + + session = instance.context.data["ftrackSession"] + context = instance.context + + parent_entity = None + default_asset_name = None + # If instance has set "ftrackEntity" or "ftrackTask" then use them from + # instance. Even if they are set to None. If they are set to None it + # has a reason. (like has different context) + if "ftrackEntity" in instance.data or "ftrackTask" in instance.data: + task_entity = instance.data.get("ftrackTask") + parent_entity = instance.data.get("ftrackEntity") + + elif "ftrackEntity" in context.data or "ftrackTask" in context.data: + task_entity = context.data.get("ftrackTask") + parent_entity = context.data.get("ftrackEntity") + + if task_entity: + default_asset_name = task_entity["name"] + parent_entity = task_entity["parent"] + + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + + if not default_asset_name: + default_asset_name = parent_entity["name"] + + # Change status on task + self._set_task_status(instance, task_entity, session) + + # Prepare AssetTypes + asset_types_by_short = self._ensure_asset_types_exists( + session, component_list + ) + + used_asset_versions = [] + # Iterate over components and publish + for data in component_list: + self.log.debug("data: {}".format(data)) + + # AssetType + asset_type_short = data["assettype_data"]["short"] + asset_type_entity = asset_types_by_short[asset_type_short] + + # Asset + asset_data = data.get("asset_data") or {} + if "name" not in asset_data: + asset_data["name"] = default_asset_name + asset_entity = self._ensure_asset_exists( + session, + asset_data, + asset_type_entity["id"], + parent_entity["id"] + ) + + # Asset Version + asset_version_data = data.get("assetversion_data") or {} + asset_version_entity = self._ensure_asset_version_exists( + session, asset_version_data, asset_entity["id"], task_entity + ) + + # Component + self.create_component(session, asset_version_entity, data) + + + # Backwards compatibility + if asset_version_entity not in used_asset_versions: + used_asset_versions.append(asset_version_entity) + + asset_versions_key = "ftrackIntegratedAssetVersions" + if asset_versions_key not in instance.data: + instance.data[asset_versions_key] = [] + + for asset_version in used_asset_versions: + if asset_version not in instance.data[asset_versions_key]: + instance.data[asset_versions_key].append(asset_version) + def _set_task_status(self, instance, task_entity, session): project_entity = instance.context.data.get("ftrackProject") if not project_entity: @@ -100,371 +203,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session._configure_locations() six.reraise(tp, value, tb) - def process(self, instance): - session = instance.context.data["ftrackSession"] - context = instance.context - - name = None - # If instance has set "ftrackEntity" or "ftrackTask" then use them from - # instance. Even if they are set to None. If they are set to None it - # has a reason. (like has different context) - if "ftrackEntity" in instance.data or "ftrackTask" in instance.data: - task = instance.data.get("ftrackTask") - parent = instance.data.get("ftrackEntity") - - elif "ftrackEntity" in context.data or "ftrackTask" in context.data: - task = context.data.get("ftrackTask") - parent = context.data.get("ftrackEntity") - - if task: - parent = task["parent"] - name = task - elif parent: - name = parent["name"] - - if not name: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return - - info_msg = ( - "Created new {entity_type} with data: {data}" - ", metadata: {metadata}." - ) - - used_asset_versions = [] - - self._set_task_status(instance, task, session) - - # Iterate over components and publish - for data in instance.data.get("ftrackComponentsList", []): - # AssetType - # Get existing entity. - assettype_data = {"short": "upload"} - assettype_data.update(data.get("assettype_data", {})) - self.log.debug("data: {}".format(data)) - - assettype_entity = session.query( - self.query("AssetType", assettype_data) - ).first() - - # Create a new entity if none exits. - if not assettype_entity: - assettype_entity = session.create("AssetType", assettype_data) - self.log.debug("Created new AssetType with data: {}".format( - assettype_data - )) - - # Asset - # Get existing entity. - asset_data = { - "name": name, - "type": assettype_entity, - "parent": parent, - } - asset_data.update(data.get("asset_data", {})) - - asset_entity = session.query( - self.query("Asset", asset_data) - ).first() - - self.log.info("asset entity: {}".format(asset_entity)) - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - asset_metadata = asset_data.pop("metadata", {}) - - # Create a new entity if none exits. - if not asset_entity: - asset_entity = session.create("Asset", asset_data) - self.log.debug( - info_msg.format( - entity_type="Asset", - data=asset_data, - metadata=asset_metadata - ) - ) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) - - # Adding metadata - existing_asset_metadata = asset_entity["metadata"] - existing_asset_metadata.update(asset_metadata) - asset_entity["metadata"] = existing_asset_metadata - - # AssetVersion - # Get existing entity. - assetversion_data = { - "version": 0, - "asset": asset_entity, - } - _assetversion_data = data.get("assetversion_data", {}) - assetversion_cust_attrs = _assetversion_data.pop( - "custom_attributes", {} - ) - asset_version_comment = _assetversion_data.pop( - "comment", None - ) - assetversion_data.update(_assetversion_data) - - assetversion_entity = session.query( - self.query("AssetVersion", assetversion_data) - ).first() - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - assetversion_metadata = assetversion_data.pop("metadata", {}) - - if task: - assetversion_data['task'] = task - - # Create a new entity if none exits. - if not assetversion_entity: - assetversion_entity = session.create( - "AssetVersion", assetversion_data - ) - self.log.debug( - info_msg.format( - entity_type="AssetVersion", - data=assetversion_data, - metadata=assetversion_metadata - ) - ) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) - - # Adding metadata - existing_assetversion_metadata = assetversion_entity["metadata"] - existing_assetversion_metadata.update(assetversion_metadata) - assetversion_entity["metadata"] = existing_assetversion_metadata - - # Add comment - if asset_version_comment: - assetversion_entity["comment"] = asset_version_comment - try: - session.commit() - except Exception: - session.rollback() - session._configure_locations() - self.log.warning(( - "Comment was not possible to set for AssetVersion" - "\"{0}\". Can't set it's value to: \"{1}\"" - ).format( - assetversion_entity["id"], str(asset_version_comment) - )) - - # Adding Custom Attributes - for attr, val in assetversion_cust_attrs.items(): - if attr in assetversion_entity["custom_attributes"]: - try: - assetversion_entity["custom_attributes"][attr] = val - session.commit() - continue - except Exception: - session.rollback() - session._configure_locations() - - self.log.warning(( - "Custom Attrubute \"{0}\"" - " is not available for AssetVersion <{1}>." - " Can't set it's value to: \"{2}\"" - ).format(attr, assetversion_entity["id"], str(val))) - - # Have to commit the version and asset, because location can't - # determine the final location without. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) - - # Component - # Get existing entity. - component_data = { - "name": "main", - "version": assetversion_entity - } - component_data.update(data.get("component_data", {})) - - component_entity = session.query( - self.query("Component", component_data) - ).first() - - component_overwrite = data.get("component_overwrite", False) - location = data.get("component_location", session.pick_location()) - - # Overwrite existing component data if requested. - if component_entity and component_overwrite: - - origin_location = session.query( - "Location where name is \"ftrack.origin\"" - ).one() - - # Removing existing members from location - components = list(component_entity.get("members", [])) - components += [component_entity] - for component in components: - for loc in component["component_locations"]: - if location["id"] == loc["location_id"]: - location.remove_component( - component, recursive=False - ) - - # Deleting existing members on component entity - for member in component_entity.get("members", []): - session.delete(member) - del(member) - - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) - - # Reset members in memory - if "members" in component_entity.keys(): - component_entity["members"] = [] - - # Add components to origin location - try: - collection = clique.parse(data["component_path"]) - except ValueError: - # Assume its a single file - # Changing file type - name, ext = os.path.splitext(data["component_path"]) - component_entity["file_type"] = ext - - origin_location.add_component( - component_entity, data["component_path"] - ) - else: - # Changing file type - component_entity["file_type"] = collection.format("{tail}") - - # Create member components for sequence. - for member_path in collection: - - size = 0 - try: - size = os.path.getsize(member_path) - except OSError: - pass - - name = collection.match(member_path).group("index") - - member_data = { - "name": name, - "container": component_entity, - "size": size, - "file_type": os.path.splitext(member_path)[-1] - } - - component = session.create( - "FileComponent", member_data - ) - origin_location.add_component( - component, member_path, recursive=False - ) - component_entity["members"].append(component) - - # Add components to location. - location.add_component( - component_entity, origin_location, recursive=True - ) - - data["component"] = component_entity - msg = "Overwriting Component with path: {0}, data: {1}, " - msg += "location: {2}" - self.log.info( - msg.format( - data["component_path"], - component_data, - location - ) - ) - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - component_metadata = component_data.pop("metadata", {}) - - # Create new component if none exists. - new_component = False - if not component_entity: - component_entity = assetversion_entity.create_component( - data["component_path"], - data=component_data, - location=location - ) - data["component"] = component_entity - msg = "Created new Component with path: {0}, data: {1}" - msg += ", metadata: {2}, location: {3}" - self.log.info( - msg.format( - data["component_path"], - component_data, - component_metadata, - location - ) - ) - new_component = True - - # Adding metadata - existing_component_metadata = component_entity["metadata"] - existing_component_metadata.update(component_metadata) - component_entity["metadata"] = existing_component_metadata - - # if component_data['name'] = 'ftrackreview-mp4-mp4': - # assetversion_entity["thumbnail_id"] - - # Setting assetversion thumbnail - if data.get("thumbnail", False): - assetversion_entity["thumbnail_id"] = component_entity["id"] - - # Inform user about no changes to the database. - if (component_entity and not component_overwrite and - not new_component): - data["component"] = component_entity - self.log.info( - "Found existing component, and no request to overwrite. " - "Nothing has been changed." - ) - else: - # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) - - if assetversion_entity not in used_asset_versions: - used_asset_versions.append(assetversion_entity) - - asset_versions_key = "ftrackIntegratedAssetVersions" - if asset_versions_key not in instance.data: - instance.data[asset_versions_key] = [] - - for asset_version in used_asset_versions: - if asset_version not in instance.data[asset_versions_key]: - instance.data[asset_versions_key].append(asset_version) - def _ensure_asset_types_exists(self, session, component_list): """Make sure that all AssetType entities exists for integration. From 89d29a1d87ffe21e5d6d6d187ebb6e268bf289a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 12:16:03 +0200 Subject: [PATCH 090/207] store more data about asset versions in integrate ftrack api --- .../plugins/publish/integrate_ftrack_api.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 7bba93c7cd..7ebf807f55 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -115,6 +115,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session, component_list ) + asset_versions_data_by_id = {} used_asset_versions = [] # Iterate over components and publish for data in component_list: @@ -144,11 +145,27 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Component self.create_component(session, asset_version_entity, data) + # Store asset version and components items that were + version_id = asset_version_entity["id"] + if version_id not in asset_versions_data_by_id: + asset_versions_data_by_id[version_id] = { + "asset_version": asset_version_entity, + "component_items": [] + } + + asset_versions_data_by_id[version_id]["component_items"].append( + data + ) # Backwards compatibility if asset_version_entity not in used_asset_versions: used_asset_versions.append(asset_version_entity) + instance.data["ftrackIntegratedAssetVersionsData"] = ( + asset_versions_data_by_id + ) + + # Backwards compatibility asset_versions_key = "ftrackIntegratedAssetVersions" if asset_versions_key not in instance.data: instance.data[asset_versions_key] = [] From 312d0309ab92de834629c58587f1a758d1d1e90c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Apr 2022 12:36:09 +0200 Subject: [PATCH 091/207] Fix - reworked alternative sites Implements recursive relationship between alternative sites --- openpype/plugins/publish/integrate_new.py | 80 +++++++++++++++++------ 1 file changed, 61 insertions(+), 19 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 959fd3bbee..ed1c02b825 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -1116,18 +1116,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): rec["sites"].append(meta) already_attached_sites[meta["name"]] = None + # add alternative sites + rec, already_attached_sites = self._add_alternative_sites( + system_sync_server_presets, already_attached_sites, rec) + # add skeleton for site where it should be always synced to - for always_on_site in always_accesible: + for always_on_site in set(always_accesible): if always_on_site not in already_attached_sites.keys(): meta = {"name": always_on_site.strip()} rec["sites"].append(meta) already_attached_sites[meta["name"]] = None - # add alternative sites - rec = self._add_alternative_sites(system_sync_server_presets, - already_attached_sites, - rec) - log.debug("final sites:: {}".format(rec["sites"])) return rec @@ -1158,22 +1157,65 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): """ conf_sites = system_sync_server_presets.get("sites", {}) + alt_site_pairs = self._get_alt_site_pairs(conf_sites) + + already_attached_keys = list(already_attached_sites.keys()) + for added_site in already_attached_keys: + real_created = already_attached_sites[added_site] + for alt_site in alt_site_pairs.get(added_site, []): + if alt_site in already_attached_sites.keys(): + continue + meta = {"name": alt_site} + # alt site inherits state of 'created_dt' + if real_created: + meta["created_dt"] = real_created + rec["sites"].append(meta) + already_attached_sites[meta["name"]] = real_created + + return rec, already_attached_sites + + def _get_alt_site_pairs(self, conf_sites): + """Returns dict of site and its alternative sites. + + If `site` has alternative site, it means that alt_site has 'site' as + alternative site + Args: + conf_sites (dict) + Returns: + (dict): {'site': [alternative sites]...} + """ + alt_site_pairs = {} for site_name, site_info in conf_sites.items(): alt_sites = set(site_info.get("alternative_sites", [])) - already_attached_keys = list(already_attached_sites.keys()) - for added_site in already_attached_keys: - if added_site in alt_sites: - if site_name in already_attached_keys: - continue - meta = {"name": site_name} - real_created = already_attached_sites[added_site] - # alt site inherits state of 'created_dt' - if real_created: - meta["created_dt"] = real_created - rec["sites"].append(meta) - already_attached_sites[meta["name"]] = real_created + if not alt_site_pairs.get(site_name): + alt_site_pairs[site_name] = [] - return rec + alt_site_pairs[site_name].extend(alt_sites) + + for alt_site in alt_sites: + if not alt_site_pairs.get(alt_site): + alt_site_pairs[alt_site] = [] + alt_site_pairs[alt_site].extend([site_name]) + + # transitive relationship, eg site is alternative to another which is + # alternative to nex site + loop = True + while loop: + loop = False + for site_name, alt_sites in alt_site_pairs.items(): + for alt_site in alt_sites: + # safety against wrong config + # {"SFTP": {"alternative_site": "SFTP"} + if alt_site == site_name: + continue + + for alt_alt_site in alt_site_pairs.get(alt_site, []): + if ( alt_alt_site != site_name + and alt_alt_site not in alt_sites): + alt_site_pairs[site_name].append(alt_alt_site) + loop = True + + return alt_site_pairs def handle_destination_files(self, integrated_file_sizes, mode): """ Clean destination files From 9f9f47145b0a1e88c5e28c1b2ade7c842191e14c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Apr 2022 12:45:49 +0200 Subject: [PATCH 092/207] Fix - added active site from settings if same as local id Without this Tray configuring background process will not show proper site in LS dropdown --- openpype/modules/sync_server/sync_server_module.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index caf58503f1..ddcf16a410 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -848,6 +848,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if self.enabled and sync_settings.get('enabled'): sites.append(self.LOCAL_SITE) + active_site = sync_settings["config"]["active_site"] + # for Tray running background process + if active_site == get_local_site_id() and active_site not in sites: + sites.append(active_site) + return sites def tray_init(self): From 4da7f7c1cce65434d385bf8cad5dc678226c62e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 13:48:21 +0200 Subject: [PATCH 093/207] changed integrate ftrack note to b able add published paths into asset version comment --- .../plugins/publish/integrate_ftrack_note.py | 196 +++++++++--------- .../defaults/project_settings/ftrack.json | 2 +- .../defaults/project_settings/global.json | 2 +- .../schema_project_ftrack.json | 8 +- 4 files changed, 111 insertions(+), 97 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index acd295854d..c165e99918 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -15,10 +15,112 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): # Can be set in presets: # - Allows only `intent` and `comment` keys + note_template = None + # Backwards compatibility note_with_intent_template = "{intent}: {comment}" # - note label must exist in Ftrack note_labels = [] + def process(self, instance): + # Check if there are any integrated AssetVersion entities + asset_versions_key = "ftrackIntegratedAssetVersionsData" + asset_versions_data_by_id = instance.data.get(asset_versions_key) + if not asset_versions_data_by_id: + self.log.info("There are any integrated AssetVersions") + return + + comment = (instance.context.data.get("comment") or "").strip() + if not comment: + self.log.info("Comment is not set.") + else: + self.log.debug("Comment is set to `{}`".format(comment)) + + session = instance.context.data["ftrackSession"] + + intent = instance.context.data.get("intent") + if intent and isinstance(intent, dict): + intent_val = intent.get("value") + intent_label = intent.get("label") + else: + intent_val = intent_label = intent + + final_intent_label = None + if intent_val: + final_intent_label = self.get_intent_label(session, intent_val) + if final_intent_label is None: + final_intent_label = intent_label + + # if intent label is set then format comment + # - it is possible that intent_label is equal to "" (empty string) + if final_intent_label: + self.log.debug( + "Intent label is set to `{}`.".format(final_intent_label) + ) + + elif intent_val: + self.log.debug(( + "Intent is set to `{}` and was not added" + " to comment because label is set to `{}`." + ).format(intent_val, final_intent_label)) + + else: + self.log.debug("Intent is not set.") + + user = session.query( + "User where username is \"{}\"".format(session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + session.api_user + ) + ) + + labels = [] + if self.note_labels: + all_labels = session.query("select id, name from NoteLabel").all() + labels_by_low_name = {lab["name"].lower(): lab for lab in all_labels} + for _label in self.note_labels: + label = labels_by_low_name.get(_label.lower()) + if not label: + self.log.warning( + "Note Label `{}` was not found.".format(_label) + ) + continue + + labels.append(label) + + for asset_version_data in asset_versions_data_by_id.values(): + asset_version = asset_version_data["asset_version"] + component_items = asset_version_data["component_items"] + + published_paths = set() + for component_item in component_items: + published_paths.add(component_item["component_path"]) + + # Backwards compatibility for older settings using + # attribute 'note_with_intent_template' + template = self.note_template + if template is None: + template = self.note_with_intent_template + comment = template.format(**{ + "intent": final_intent_label, + "comment": comment, + "published_paths": "\n".join(sorted(published_paths)) + }) + asset_version.create_note(comment, author=user, labels=labels) + + try: + session.commit() + self.log.debug("Note added to AssetVersion \"{}\"".format( + str(asset_version) + )) + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + session._configure_locations() + six.reraise(tp, value, tb) + def get_intent_label(self, session, intent_value): if not intent_value: return @@ -45,12 +147,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): if not items: return - if sys.version_info[0] < 3: - string_type = basestring - else: - string_type = str - - if isinstance(items, string_type): + if isinstance(items, six.string_types): items = json.loads(items) intent_label = None @@ -60,90 +157,3 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): break return intent_label - - def process(self, instance): - comment = (instance.context.data.get("comment") or "").strip() - if not comment: - self.log.info("Comment is not set.") - return - - self.log.debug("Comment is set to `{}`".format(comment)) - - session = instance.context.data["ftrackSession"] - - intent = instance.context.data.get("intent") - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent_label = intent - - final_label = None - if intent_val: - final_label = self.get_intent_label(session, intent_val) - if final_label is None: - final_label = intent_label - - # if intent label is set then format comment - # - it is possible that intent_label is equal to "" (empty string) - if final_label: - msg = "Intent label is set to `{}`.".format(final_label) - comment = self.note_with_intent_template.format(**{ - "intent": final_label, - "comment": comment - }) - - elif intent_val: - msg = ( - "Intent is set to `{}` and was not added" - " to comment because label is set to `{}`." - ).format(intent_val, final_label) - - else: - msg = "Intent is not set." - - self.log.debug(msg) - - asset_versions_key = "ftrackIntegratedAssetVersions" - asset_versions = instance.data.get(asset_versions_key) - if not asset_versions: - self.log.info("There are any integrated AssetVersions") - return - - user = session.query( - "User where username is \"{}\"".format(session.api_user) - ).first() - if not user: - self.log.warning( - "Was not able to query current User {}".format( - session.api_user - ) - ) - - labels = [] - if self.note_labels: - all_labels = session.query("NoteLabel").all() - labels_by_low_name = {lab["name"].lower(): lab for lab in all_labels} - for _label in self.note_labels: - label = labels_by_low_name.get(_label.lower()) - if not label: - self.log.warning( - "Note Label `{}` was not found.".format(_label) - ) - continue - - labels.append(label) - - for asset_version in asset_versions: - asset_version.create_note(comment, author=user, labels=labels) - - try: - session.commit() - self.log.debug("Note added to AssetVersion \"{}\"".format( - str(asset_version) - )) - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index ca1cfe1e12..9b350ec88d 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -354,7 +354,7 @@ }, "IntegrateFtrackNote": { "enabled": true, - "note_with_intent_template": "{intent}: {comment}", + "note_template": "{intent}: {comment}", "note_labels": [] }, "ValidateFtrackAttributes": { diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index ed28d357f2..4c94eee254 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -190,7 +190,7 @@ "tasks": [], "template_name": "simpleUnrealTexture" }, - { + { "families": [ "staticMesh", "skeletalMesh" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index fb384882c6..2b62d67c98 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -738,10 +738,14 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "label", + "label": "Template may contain formatting keys {intent}, {comment} and {published_paths}." + }, { "type": "text", - "key": "note_with_intent_template", - "label": "Note with intent template" + "key": "note_template", + "label": "Note template" }, { "type": "list", From 87f40d53e098ce3cef8bc31fc733e73df36f9cc3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 14:44:03 +0200 Subject: [PATCH 094/207] flame: adding media info getter obj --- openpype/hosts/flame/api/lib.py | 116 +++++++++++++++++++++++++++++++- 1 file changed, 114 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 7316fa1c5b..4e989abb9e 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1,9 +1,14 @@ import sys import os import re +import six import json import pickle +import itertools import contextlib +import xml.etree.cElementTree as cET +from copy import deepcopy +from xml.etree import ElementTree as ET from pprint import pformat from .constants import ( MARKER_COLOR, @@ -12,9 +17,10 @@ from .constants import ( COLOR_MAP, MARKER_PUBLISH_DEFAULT ) -from openpype.api import Logger -log = Logger.get_logger(__name__) +import openpype.api as openpype + +log = openpype.Logger.get_logger(__name__) FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]") @@ -711,3 +717,109 @@ def get_batch_group_from_desktop(name): for bgroup in project_desktop.batch_groups: if bgroup.name.get_value() in name: return bgroup + + +class MediaInfoFile: + media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" + tmp_name = "_tmp.clip" + tmp_file = None + + out_feed_nb_ticks = None + out_feed_fps = None + out_feed_drop_mode = None + + log = log + + def __init__(self, path): + # test if media script paht exists + self._validate_media_script_path() + + # derivate other feed variables + self.feed_basename = os.path.basename(path) + self.feed_dir = os.path.dirname(path) + self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() + + self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) + + # remove previously generated temp files + # it will be regenerated + self._clear_tmp_file() + + self.log.info("Temp File: {}".format(self.tmp_file)) + + def _validate_media_script_path(self): + if not os.path.isfile(self.media_script_path): + raise IOError("Media Scirpt does not exist: `{}`".format( + self.media_script_path)) + + def _generate_media_info_file(self): + # Create cmd arguments for gettig xml file info file + cmd_args = [ + self.media_script_path, + "-e", self.feed_ext, + "-o", self.tmp_file, + self.feed_dir + ] + + # execute creation of clip xml template data + try: + openpype.run_subprocess(cmd_args) + self._make_single_clip_media_info() + except TypeError: + self.log.error("Error creating self.tmp_file") + six.reraise(*sys.exc_info()) + + def _make_single_clip_media_info(self): + with open(self.tmp_file) as f: + lines = f.readlines() + _added_root = itertools.chain( + "", deepcopy(lines)[1:], "") + new_root = ET.fromstringlist(_added_root) + + # find the clip which is matching to my input name + xml_clips = new_root.findall("clip") + matching_clip = None + for xml_clip in xml_clips: + if xml_clip.find("name").text in self.feed_basename: + matching_clip = xml_clip + + if matching_clip is None: + # return warning there is missing clip + raise ET.ParseError( + "Missing clip in `{}`. Available clips {}".format( + self.feed_basename, [ + xml_clip.find("name").text + for xml_clip in xml_clips + ] + )) + + self._write_result_xml_to_file(self.tmp_file, matching_clip) + + def _clear_tmp_file(self): + if os.path.isfile(self.tmp_file): + os.remove(self.tmp_file) + + def _get_time_info_from_origin(self, xml_data): + try: + for out_track in xml_data.iter('track'): + for out_feed in out_track.iter('feeds'): + out_feed_nb_ticks_obj = out_feed.find( + 'startTimecode/nbTicks') + self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text + out_feed_fps_obj = out_feed.find( + 'startTimecode/rate') + self.out_feed_fps = out_feed_fps_obj.text + out_feed_drop_mode_obj = out_feed.find( + 'startTimecode/dropMode') + self.out_feed_drop_mode = out_feed_drop_mode_obj.text + break + else: + continue + except Exception as msg: + self.log.warning(msg) + + def _write_result_xml_to_file(self, file, xml_data): + # save it as new file + tree = cET.ElementTree(xml_data) + tree.write(file, xml_declaration=True, + method='xml', encoding='UTF-8') \ No newline at end of file From 7dc2c618a0b6ffd80529620d576f929afaa01cf0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 14:53:13 +0200 Subject: [PATCH 095/207] flame: update media info file generator --- openpype/hosts/flame/api/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 4e989abb9e..633854168a 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -724,6 +724,7 @@ class MediaInfoFile: tmp_name = "_tmp.clip" tmp_file = None + clip_data = None out_feed_nb_ticks = None out_feed_fps = None out_feed_drop_mode = None @@ -752,7 +753,7 @@ class MediaInfoFile: raise IOError("Media Scirpt does not exist: `{}`".format( self.media_script_path)) - def _generate_media_info_file(self): + def generate_media_info_file(self): # Create cmd arguments for gettig xml file info file cmd_args = [ self.media_script_path, @@ -793,6 +794,8 @@ class MediaInfoFile: ] )) + self._get_time_info_from_origin(matching_clip) + self.clip_data = matching_clip self._write_result_xml_to_file(self.tmp_file, matching_clip) def _clear_tmp_file(self): From 8167a65f597b6526e69a66a5591f86c2b5c8aa54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 15:26:53 +0200 Subject: [PATCH 096/207] added more checks --- .../plugins/publish/integrate_ftrack_note.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index c165e99918..a77b6d6674 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -47,8 +47,9 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): final_intent_label = None if intent_val: final_intent_label = self.get_intent_label(session, intent_val) - if final_intent_label is None: - final_intent_label = intent_label + + if final_intent_label is None: + final_intent_label = intent_label # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) @@ -103,11 +104,18 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): template = self.note_template if template is None: template = self.note_with_intent_template - comment = template.format(**{ + format_data = { "intent": final_intent_label, "comment": comment, "published_paths": "\n".join(sorted(published_paths)) - }) + } + comment = template.format(**format_data) + if not comment: + self.log.info(( + "Note for AssetVersion {} would be empty. Skipping." + "\nTemplate: {}\nData: {}" + ).format(asset_version["id"], template, format_data)) + continue asset_version.create_note(comment, author=user, labels=labels) try: From fe39d0a300fd1ad76390f152cd8502a6f798c244 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 15:39:50 +0200 Subject: [PATCH 097/207] add host name to possible keys in template --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index a77b6d6674..8609e8bca6 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -29,6 +29,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return + host_name = instance.context.data["hostName"] comment = (instance.context.data.get("comment") or "").strip() if not comment: self.log.info("Comment is not set.") @@ -107,7 +108,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): format_data = { "intent": final_intent_label, "comment": comment, - "published_paths": "\n".join(sorted(published_paths)) + "host_name": host_name, + "published_paths": "\n".join(sorted(published_paths)), } comment = template.format(**format_data) if not comment: From d79773e801b428ee629521a65374406cda8b8bfc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 15:42:58 +0200 Subject: [PATCH 098/207] flame: fixing feeds to feed tag --- openpype/hosts/flame/api/lib.py | 2 +- openpype/hosts/flame/api/plugin.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 633854168a..508dc0155f 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -805,7 +805,7 @@ class MediaInfoFile: def _get_time_info_from_origin(self, xml_data): try: for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feeds'): + for out_feed in out_track.iter('feed'): out_feed_nb_ticks_obj = out_feed.find( 'startTimecode/nbTicks') self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 464f5ce89b..bd0f9f1a81 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -679,6 +679,7 @@ class ClipLoader(LoaderPlugin): ] +# TODO: inheritance from flame.api.lib.MediaInfoFile class OpenClipSolver: media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" tmp_name = "_tmp.clip" From 04b8eaf2998eb3269800df0d8fa4985a4a6a8df0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 15:44:30 +0200 Subject: [PATCH 099/207] added new key into settings label --- .../entities/schemas/projects_schema/schema_project_ftrack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 2b62d67c98..b3c094e398 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -740,7 +740,7 @@ }, { "type": "label", - "label": "Template may contain formatting keys {intent}, {comment} and {published_paths}." + "label": "Template may contain formatting keys {intent}, {comment}, {host_name} and {published_paths}." }, { "type": "text", From 67c759ae2d72558c2caa5cfcc30266a1d16cddb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 15:45:42 +0200 Subject: [PATCH 100/207] flame: add media info file class to api --- openpype/hosts/flame/api/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 28511458c2..1308b04a7d 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -28,7 +28,8 @@ from .lib import ( get_padding_from_filename, maintained_object_duplication, get_clip_segment, - get_batch_group_from_desktop + get_batch_group_from_desktop, + MediaInfoFile ) from .utils import ( setup, @@ -103,6 +104,7 @@ __all__ = [ "maintained_object_duplication", "get_clip_segment", "get_batch_group_from_desktop", + "MediaInfoFile", # pipeline "install", From 6c9f9c18119cc286078cd109d3253073a3d273aa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 16:08:31 +0200 Subject: [PATCH 101/207] flame: add generator into init of class --- openpype/hosts/flame/api/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 508dc0155f..998d7dfa7e 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -748,12 +748,14 @@ class MediaInfoFile: self.log.info("Temp File: {}".format(self.tmp_file)) + self._generate_media_info_file() + def _validate_media_script_path(self): if not os.path.isfile(self.media_script_path): raise IOError("Media Scirpt does not exist: `{}`".format( self.media_script_path)) - def generate_media_info_file(self): + def _generate_media_info_file(self): # Create cmd arguments for gettig xml file info file cmd_args = [ self.media_script_path, From f43e6f0fcdfdb82876c06f6631654a3318edf053 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 6 Apr 2022 16:08:48 +0200 Subject: [PATCH 102/207] flame: get real source_in --- openpype/hosts/flame/otio/flame_export.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 1b5980b40a..3e76968963 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -7,6 +7,7 @@ import json import logging import opentimelineio as otio from . import utils +from openpype.hosts.flame.api import MediaInfoFile import flame from pprint import pformat @@ -345,7 +346,13 @@ def create_otio_clip(clip_data): media_reference = create_otio_reference(clip_data) # calculate source in - first_frame = utils.get_frame_from_filename(clip_data["fpath"]) or 0 + media_info = MediaInfoFile(clip_data["fpath"]) + xml_timecode_ticks = media_info.out_feed_nb_ticks + if xml_timecode_ticks: + first_frame = int(xml_timecode_ticks) + else: + first_frame = utils.get_frame_from_filename(clip_data["fpath"]) or 0 + source_in = int(clip_data["source_in"]) - int(first_frame) # creatae source range From 00406a737610dc4623ff78edda1e7369ddbfc3f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 16:13:50 +0200 Subject: [PATCH 103/207] added more information abou app - name and label --- .../plugins/publish/collect_app_name.py | 13 +++++++++ .../plugins/publish/collect_app_name.py | 13 +++++++++ openpype/plugins/publish/collect_host_name.py | 28 +++++++++++++------ 3 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/standalonepublisher/plugins/publish/collect_app_name.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_app_name.py diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_app_name.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..857f3dca20 --- /dev/null +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_app_name.py @@ -0,0 +1,13 @@ +import pyblish.api + + +class CollectSAAppName(pyblish.api.ContextPlugin): + """Collect app name and label.""" + + label = "Collect App Name/Label" + order = pyblish.api.CollectorOrder - 0.5 + hosts = ["standalonepublisher"] + + def process(self, context): + context.data["appName"] = "standalone publisher" + context.data["appLabel"] = "Standalone publisher" diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_app_name.py b/openpype/hosts/traypublisher/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..e38d10e70f --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_app_name.py @@ -0,0 +1,13 @@ +import pyblish.api + + +class CollectTrayPublisherAppName(pyblish.api.ContextPlugin): + """Collect app name and label.""" + + label = "Collect App Name/Label" + order = pyblish.api.CollectorOrder - 0.5 + hosts = ["traypublisher"] + + def process(self, context): + context.data["appName"] = "tray publisher" + context.data["appLabel"] = "Tray publisher" diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py index b731e3ed26..d64af4d049 100644 --- a/openpype/plugins/publish/collect_host_name.py +++ b/openpype/plugins/publish/collect_host_name.py @@ -18,20 +18,30 @@ class CollectHostName(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data.get("hostName") + app_name = context.data.get("appName") + app_label = context.data.get("appLabel") # Don't override value if is already set - if host_name: + if host_name and app_name and app_label: return - # Use AVALON_APP as first if available it is the same as host name - # - only if is not defined use AVALON_APP_NAME (e.g. on Farm) and - # set it back to AVALON_APP env variable - host_name = os.environ.get("AVALON_APP") + # Use AVALON_APP to get host name if available if not host_name: + host_name = os.environ.get("AVALON_APP") + + # Use AVALON_APP_NAME to get full app name + if not app_name: app_name = os.environ.get("AVALON_APP_NAME") - if app_name: - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app: + + # Fill missing values based on app full name + if (not host_name or not app_label) and app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + if not host_name: host_name = app.host_name + if not app_label: + app_label = app.full_label context.data["hostName"] = host_name + context.data["appName"] = app_name + context.data["appLabel"] = app_label From e68a7cbf7bb9c4362bd5c64ad617ec38db9c633b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 16:15:40 +0200 Subject: [PATCH 104/207] added app name and lable keys into note template --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 4 ++++ .../schemas/projects_schema/schema_project_ftrack.json | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 8609e8bca6..8220b8e6ca 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -30,6 +30,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): return host_name = instance.context.data["hostName"] + app_name = instance.context.data["appName"] + app_label = instance.context.data["appLabel"] comment = (instance.context.data.get("comment") or "").strip() if not comment: self.log.info("Comment is not set.") @@ -109,6 +111,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): "intent": final_intent_label, "comment": comment, "host_name": host_name, + "app_name": app_name, + "app_label": app_label, "published_paths": "\n".join(sorted(published_paths)), } comment = template.format(**format_data) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index b3c094e398..0d7faac2ba 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -740,7 +740,7 @@ }, { "type": "label", - "label": "Template may contain formatting keys {intent}, {comment}, {host_name} and {published_paths}." + "label": "Template may contain formatting keys intent, comment, host_name, app_name, app_label and published_paths." }, { "type": "text", From 2a3460e0fa57408356a2af0ebfcba33ad83f4132 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 16:15:55 +0200 Subject: [PATCH 105/207] changed note template input into multiline input --- .../schemas/projects_schema/schema_project_ftrack.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 0d7faac2ba..0ed2fb3536 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -745,7 +745,8 @@ { "type": "text", "key": "note_template", - "label": "Note template" + "label": "Note template", + "multiline": true }, { "type": "list", From 2e9b7325e47c30a75850c048669c2e04d8bbf3de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 16:25:27 +0200 Subject: [PATCH 106/207] change new line char with br/ html tag --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 8220b8e6ca..3a9f904d00 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -113,7 +113,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): "host_name": host_name, "app_name": app_name, "app_label": app_label, - "published_paths": "\n".join(sorted(published_paths)), + "published_paths": "
".join(sorted(published_paths)), } comment = template.format(**format_data) if not comment: From dff1a51f96a1237071e2fe2ed3cac400c7a3bf6e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 16:33:05 +0200 Subject: [PATCH 107/207] removed outdated log --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 3a9f904d00..56a7a89e16 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -61,12 +61,6 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): "Intent label is set to `{}`.".format(final_intent_label) ) - elif intent_val: - self.log.debug(( - "Intent is set to `{}` and was not added" - " to comment because label is set to `{}`." - ).format(intent_val, final_intent_label)) - else: self.log.debug("Intent is not set.") From 86647e02310d0913afd7919d364f6c07bf2274e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 17:26:53 +0200 Subject: [PATCH 108/207] added collector for intent label --- .../plugins/publish/collect_intent_label.py | 78 +++++++++++++++++ .../plugins/publish/integrate_ftrack_note.py | 83 +++++-------------- 2 files changed, 101 insertions(+), 60 deletions(-) create mode 100644 openpype/modules/ftrack/plugins/publish/collect_intent_label.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_intent_label.py b/openpype/modules/ftrack/plugins/publish/collect_intent_label.py new file mode 100644 index 0000000000..c23722933c --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/collect_intent_label.py @@ -0,0 +1,78 @@ +""" +Requires: + context -> ftrackSession - connected ftrack.Session + +Provides: + context -> ftrackIntentLabel +""" +import json + +import six +import pyblish.api + + +class CollectFtrackApi(pyblish.api.ContextPlugin): + """ Collects an ftrack session and the current task id. """ + + order = pyblish.api.CollectorOrder + 0.49991 + label = "Collect Ftrack Intent Label" + + def process(self, context): + intent = context.data.get("intent") + if intent and isinstance(intent, dict): + intent_val = intent.get("value") + intent_label = intent.get("label") + else: + intent_val = intent_label = intent + + session = context.data.get("ftrackSession") + if session is None: + context.data["ftrackIntentLabel"] = intent_label + self.log.info("Ftrack session is not available. Skipping.") + return + + final_intent_label = None + if intent_val: + final_intent_label = self.get_intent_label(session, intent_val) + + if final_intent_label is None: + final_intent_label = intent_label + + context.data["ftrackIntentLabel"] = final_intent_label + + def get_intent_label(self, session, intent_value): + if not intent_value: + return + + intent_configurations = session.query( + "CustomAttributeConfiguration where key is intent" + ).all() + if not intent_configurations: + return + + intent_configuration = intent_configurations[0] + if len(intent_configuration) > 1: + self.log.warning(( + "Found more than one `intent` custom attribute." + " Using first found." + )) + + config = intent_configuration.get("config") + if not config: + return + + configuration = json.loads(config) + items = configuration.get("data") + if not items: + return + + if isinstance(items, six.string_types): + items = json.loads(items) + + intent_label = None + for item in items: + if item["value"] == intent_value: + intent_label = item["menu"] + break + + return intent_label diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 56a7a89e16..2fe97dc7ac 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -1,7 +1,18 @@ +""" +Requires: + context > hostName + context > appName + context > appLabel + context > comment + context > ftrackSession + context > ftrackIntentLabel + instance > ftrackIntegratedAssetVersionsData +""" + import sys -import json -import pyblish.api + import six +import pyblish.api class IntegrateFtrackNote(pyblish.api.InstancePlugin): @@ -29,36 +40,25 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return - host_name = instance.context.data["hostName"] - app_name = instance.context.data["appName"] - app_label = instance.context.data["appLabel"] - comment = (instance.context.data.get("comment") or "").strip() + context = instance.context + host_name = context.data["hostName"] + app_name = context.data["appName"] + app_label = context.data["appLabel"] + comment = (context.data.get("comment") or "").strip() if not comment: self.log.info("Comment is not set.") else: self.log.debug("Comment is set to `{}`".format(comment)) - session = instance.context.data["ftrackSession"] + session = context.data["ftrackSession"] - intent = instance.context.data.get("intent") - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent_label = intent - - final_intent_label = None - if intent_val: - final_intent_label = self.get_intent_label(session, intent_val) - - if final_intent_label is None: - final_intent_label = intent_label + intent_label = context.data["ftrackIntentLabel"] # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) - if final_intent_label: + if intent_label: self.log.debug( - "Intent label is set to `{}`.".format(final_intent_label) + "Intent label is set to `{}`.".format(intent_label) ) else: @@ -102,7 +102,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): if template is None: template = self.note_with_intent_template format_data = { - "intent": final_intent_label, + "intent": intent_label, "comment": comment, "host_name": host_name, "app_name": app_name, @@ -128,40 +128,3 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): session.rollback() session._configure_locations() six.reraise(tp, value, tb) - - def get_intent_label(self, session, intent_value): - if not intent_value: - return - - intent_configurations = session.query( - "CustomAttributeConfiguration where key is intent" - ).all() - if not intent_configurations: - return - - intent_configuration = intent_configurations[0] - if len(intent_configuration) > 1: - self.log.warning(( - "Found more than one `intent` custom attribute." - " Using first found." - )) - - config = intent_configuration.get("config") - if not config: - return - - configuration = json.loads(config) - items = configuration.get("data") - if not items: - return - - if isinstance(items, six.string_types): - items = json.loads(items) - - intent_label = None - for item in items: - if item["value"] == intent_value: - intent_label = item["menu"] - break - - return intent_label From e277cb8ed87be7e7591ae76e048f183c5bf6ce27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 17:28:39 +0200 Subject: [PATCH 109/207] added ftrack integrator adding comment to description --- .../publish/integrate_ftrack_description.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py new file mode 100644 index 0000000000..7e8371cd9d --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -0,0 +1,76 @@ +""" +Requires: + context > comment + context > ftrackSession + context > ftrackIntentLabel + instance > ftrackIntegratedAssetVersionsData +""" + +import sys + +import six +import pyblish.api + + +class IntegrateFtrackDescription(pyblish.api.InstancePlugin): + """Add description to AssetVersions in Ftrack.""" + + # Must be after integrate asset new + order = pyblish.api.IntegratorOrder + 0.4999 + label = "Integrate Ftrack description" + families = ["ftrack"] + optional = True + + # Can be set in settings: + # - Allows `intent` and `comment` keys + description_template = "{comment}" + + def process(self, instance): + # Check if there are any integrated AssetVersion entities + asset_versions_key = "ftrackIntegratedAssetVersionsData" + asset_versions_data_by_id = instance.data.get(asset_versions_key) + if not asset_versions_data_by_id: + self.log.info("There are any integrated AssetVersions") + return + + comment = (instance.context.data.get("comment") or "").strip() + if not comment: + self.log.info("Comment is not set.") + else: + self.log.debug("Comment is set to `{}`".format(comment)) + + session = instance.context.data["ftrackSession"] + + intent_label = instance.context.data["ftrackIntentLabel"] + + # if intent label is set then format comment + # - it is possible that intent_label is equal to "" (empty string) + if intent_label: + self.log.debug( + "Intent label is set to `{}`.".format(intent_label) + ) + + else: + self.log.debug("Intent is not set.") + + for asset_version_data in asset_versions_data_by_id.values(): + asset_version = asset_version_data["asset_version"] + + # Backwards compatibility for older settings using + # attribute 'note_with_intent_template' + comment = self.description_template.format(**{ + "intent": intent_label, + "comment": comment + }) + asset_version["comment"] = comment + + try: + session.commit() + self.log.debug("Comment added to AssetVersion \"{}\"".format( + str(asset_version) + )) + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + session._configure_locations() + six.reraise(tp, value, tb) From 49808788f03f07037cb4c21270a1510c235d9ca1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 17:41:54 +0200 Subject: [PATCH 110/207] added settings for integrate ftrack description --- .../defaults/project_settings/ftrack.json | 6 +++ .../schema_project_ftrack.json | 38 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 9b350ec88d..31d6a70ac7 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -357,6 +357,12 @@ "note_template": "{intent}: {comment}", "note_labels": [] }, + "IntegrateFtrackDescription": { + "enabled": false, + "optional": true, + "active": true, + "description_template": "{comment}" + }, "ValidateFtrackAttributes": { "enabled": false, "ftrack_custom_attributes": {} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 0ed2fb3536..5ce9b24b4b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -756,6 +756,44 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "IntegrateFtrackDescription", + "label": "Integrate Ftrack Description", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Add description to integrated AssetVersion." + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + }, + { + "type": "label", + "label": "Template may contain formatting keys intent and comment." + }, + { + "type": "text", + "key": "description_template", + "label": "Description template" + } + ] + }, { "type": "dict", "collapsible": true, From 68957cc0d9e545be7328dc484aaa22b4039b10b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 18:14:34 +0200 Subject: [PATCH 111/207] changed name of publish plugin --- openpype/modules/ftrack/plugins/publish/collect_intent_label.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_intent_label.py b/openpype/modules/ftrack/plugins/publish/collect_intent_label.py index c23722933c..8375fba15e 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_intent_label.py +++ b/openpype/modules/ftrack/plugins/publish/collect_intent_label.py @@ -11,7 +11,7 @@ import six import pyblish.api -class CollectFtrackApi(pyblish.api.ContextPlugin): +class CollectFtrackIntentLabel(pyblish.api.ContextPlugin): """ Collects an ftrack session and the current task id. """ order = pyblish.api.CollectorOrder + 0.49991 From acba6e8bb08913906209946970f86359a37ca1e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 6 Apr 2022 18:52:44 +0200 Subject: [PATCH 112/207] fix import of get_representation_context --- openpype/hosts/tvpaint/plugins/load/load_reference_image.py | 2 +- openpype/pipeline/__init__.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py index 5e4e3965d2..af1a4a9b6b 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py @@ -1,6 +1,6 @@ import collections import qargparse -from avalon.pipeline import get_representation_context +from openpype.pipeline import get_representation_context from openpype.hosts.tvpaint.api import lib, pipeline, plugin diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 8460d20ef1..883713b078 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -41,6 +41,7 @@ from .load import ( loaders_from_representation, get_representation_path, + get_representation_context, get_repres_contexts, ) @@ -113,6 +114,7 @@ __all__ = ( "loaders_from_representation", "get_representation_path", + "get_representation_context", "get_repres_contexts", # --- Publish --- From f9df89dc0d57fed7f1fccc3a8f43fa194ef8b2d6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 7 Apr 2022 04:49:26 +0200 Subject: [PATCH 113/207] Fix Validate Asset Docs filename and class name --- .../publish/{validate_aseset_docs.py => validate_asset_docs.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename openpype/plugins/publish/{validate_aseset_docs.py => validate_asset_docs.py} (94%) diff --git a/openpype/plugins/publish/validate_aseset_docs.py b/openpype/plugins/publish/validate_asset_docs.py similarity index 94% rename from openpype/plugins/publish/validate_aseset_docs.py rename to openpype/plugins/publish/validate_asset_docs.py index eed75cdf8a..ddd579621c 100644 --- a/openpype/plugins/publish/validate_aseset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -2,7 +2,7 @@ import pyblish.api from openpype.pipeline import PublishValidationError -class ValidateContainers(pyblish.api.InstancePlugin): +class ValidateAssetDocs(pyblish.api.InstancePlugin): """Validate existence of asset asset documents on instances. Without asset document it is not possible to publish the instance. From ae57c8619d863589ae1fc365b86adbab0af890bf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 7 Apr 2022 04:50:32 +0200 Subject: [PATCH 114/207] Fix grammar + typos --- openpype/plugins/publish/validate_asset_docs.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/validate_asset_docs.py b/openpype/plugins/publish/validate_asset_docs.py index ddd579621c..bc1f9b9e6c 100644 --- a/openpype/plugins/publish/validate_asset_docs.py +++ b/openpype/plugins/publish/validate_asset_docs.py @@ -3,7 +3,7 @@ from openpype.pipeline import PublishValidationError class ValidateAssetDocs(pyblish.api.InstancePlugin): - """Validate existence of asset asset documents on instances. + """Validate existence of asset documents on instances. Without asset document it is not possible to publish the instance. @@ -22,10 +22,10 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): return if instance.data.get("assetEntity"): - self.log.info("Instance have set asset document in it's data.") + self.log.info("Instance has set asset document in its data.") else: raise PublishValidationError(( - "Instance \"{}\" don't have set asset" - " document which is needed for publishing." + "Instance \"{}\" doesn't have asset document " + "set which is needed for publishing." ).format(instance.data["name"])) From 31683fb432b885add58e90883f23a4b82a989428 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 7 Apr 2022 05:17:59 +0200 Subject: [PATCH 115/207] Allow to select invalid camera contents if no cameras found + improve error logging --- .../maya/plugins/publish/validate_camera_contents.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py index d9e88edaac..20af8d2315 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py @@ -40,7 +40,14 @@ class ValidateCameraContents(pyblish.api.InstancePlugin): # list when there are no actual cameras results in # still an empty 'invalid' list if len(cameras) < 1: - raise RuntimeError("No cameras in instance.") + if members: + # If there are members in the instance return all of + # them as 'invalid' so the user can still select invalid + cls.log.error("No cameras found in instance " + "members: {}".format(members)) + return members + + raise RuntimeError("No cameras found in empty instance.") # non-camera shapes valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True) From c298e06ba621162e204299ff849786614d0c02be Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 11:11:46 +0200 Subject: [PATCH 116/207] use 'filepath_from_context' instead of 'get_representation_path_from_context' --- openpype/hosts/maya/plugins/inventory/import_modelrender.py | 5 ++--- .../hosts/photoshop/plugins/load/load_image_from_sequence.py | 3 +-- openpype/pipeline/load/plugins.py | 3 ++- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py index d9bb256fac..c2e43f196f 100644 --- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -4,7 +4,6 @@ from bson.objectid import ObjectId from openpype.pipeline import ( InventoryAction, get_representation_context, - get_representation_path_from_context, ) from openpype.hosts.maya.api.lib import ( maintained_selection, @@ -80,10 +79,10 @@ class ImportModelRender(InventoryAction): }) context = get_representation_context(look_repr["_id"]) - maya_file = get_representation_path_from_context(context) + maya_file = self.filepath_from_context(context) context = get_representation_context(json_repr["_id"]) - json_file = get_representation_path_from_context(context) + json_file = self.filepath_from_context(context) # Import the look file with maintained_selection(): diff --git a/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py index 5f39121ae1..c25c5a8f2c 100644 --- a/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -2,7 +2,6 @@ import os import qargparse -from openpype.pipeline import get_representation_path_from_context from openpype.hosts.photoshop import api as photoshop from openpype.hosts.photoshop.api import get_unique_layer_name @@ -63,7 +62,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): """ files = [] for context in repre_contexts: - fname = get_representation_path_from_context(context) + fname = cls.filepath_from_context(context) _, file_extension = os.path.splitext(fname) for file_name in os.listdir(os.path.dirname(fname)): diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index d60aed0083..a30a2188a4 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -41,7 +41,8 @@ class LoaderPlugin(list): def get_representations(cls): return cls.representations - def filepath_from_context(self, context): + @classmethod + def filepath_from_context(cls, context): return get_representation_path_from_context(context) def load(self, context, name=None, namespace=None, options=None): From cb3722552c4e2875365f237c36aee702af3bf39f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 11:57:33 +0200 Subject: [PATCH 117/207] removed ftrackIntentLabel --- .../plugins/publish/collect_intent_label.py | 78 ------------------- .../publish/integrate_ftrack_description.py | 12 ++- .../plugins/publish/integrate_ftrack_note.py | 12 ++- 3 files changed, 20 insertions(+), 82 deletions(-) delete mode 100644 openpype/modules/ftrack/plugins/publish/collect_intent_label.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_intent_label.py b/openpype/modules/ftrack/plugins/publish/collect_intent_label.py deleted file mode 100644 index 8375fba15e..0000000000 --- a/openpype/modules/ftrack/plugins/publish/collect_intent_label.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Requires: - context -> ftrackSession - connected ftrack.Session - -Provides: - context -> ftrackIntentLabel -""" -import json - -import six -import pyblish.api - - -class CollectFtrackIntentLabel(pyblish.api.ContextPlugin): - """ Collects an ftrack session and the current task id. """ - - order = pyblish.api.CollectorOrder + 0.49991 - label = "Collect Ftrack Intent Label" - - def process(self, context): - intent = context.data.get("intent") - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent_label = intent - - session = context.data.get("ftrackSession") - if session is None: - context.data["ftrackIntentLabel"] = intent_label - self.log.info("Ftrack session is not available. Skipping.") - return - - final_intent_label = None - if intent_val: - final_intent_label = self.get_intent_label(session, intent_val) - - if final_intent_label is None: - final_intent_label = intent_label - - context.data["ftrackIntentLabel"] = final_intent_label - - def get_intent_label(self, session, intent_value): - if not intent_value: - return - - intent_configurations = session.query( - "CustomAttributeConfiguration where key is intent" - ).all() - if not intent_configurations: - return - - intent_configuration = intent_configurations[0] - if len(intent_configuration) > 1: - self.log.warning(( - "Found more than one `intent` custom attribute." - " Using first found." - )) - - config = intent_configuration.get("config") - if not config: - return - - configuration = json.loads(config) - items = configuration.get("data") - if not items: - return - - if isinstance(items, six.string_types): - items = json.loads(items) - - intent_label = None - for item in items: - if item["value"] == intent_value: - intent_label = item["menu"] - break - - return intent_label diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index 7e8371cd9d..c6a3d47f66 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -2,7 +2,6 @@ Requires: context > comment context > ftrackSession - context > ftrackIntentLabel instance > ftrackIntegratedAssetVersionsData """ @@ -41,7 +40,16 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): session = instance.context.data["ftrackSession"] - intent_label = instance.context.data["ftrackIntentLabel"] + intent = instance.context.data.get("intent") + intent_label = None + if intent and isinstance(intent, dict): + intent_val = intent.get("value") + intent_label = intent.get("label") + else: + intent_val = intent + + if not intent_label: + intent_label = intent_val or "" # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 2fe97dc7ac..952b21546d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -5,7 +5,6 @@ Requires: context > appLabel context > comment context > ftrackSession - context > ftrackIntentLabel instance > ftrackIntegratedAssetVersionsData """ @@ -52,7 +51,16 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): session = context.data["ftrackSession"] - intent_label = context.data["ftrackIntentLabel"] + intent = instance.context.data.get("intent") + intent_label = None + if intent and isinstance(intent, dict): + intent_val = intent.get("value") + intent_label = intent.get("label") + else: + intent_val = intent + + if not intent_label: + intent_label = intent_val or "" # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) From c2788070a370a39536e134acc13a5f5243b2530f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 12:22:34 +0200 Subject: [PATCH 118/207] flame: refactoring batch group creation --- openpype/hosts/flame/api/__init__.py | 6 +- openpype/hosts/flame/api/batch_utils.py | 146 +++++++++++++----- .../plugins/publish/integrate_batch_group.py | 98 ++++++++---- 3 files changed, 182 insertions(+), 68 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 1308b04a7d..a0c40904ed 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -72,7 +72,8 @@ from .render_utils import ( modify_preset_file ) from .batch_utils import ( - create_batch + create_batch_group, + create_batch_group_conent ) __all__ = [ @@ -148,5 +149,6 @@ __all__ = [ "modify_preset_file", # batch utils - "create_batch" + "create_batch_group", + "create_batch_group_conent" ] diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 43742c6e4f..26f324090b 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -1,66 +1,134 @@ import flame -def create_batch(name, frame_start, frame_duration, **kwargs): +def create_batch_group( + name, + frame_start, + frame_duration, + update_batch_group=None, + **kwargs +): """Create Batch Group in active project's Desktop Args: name (str): name of batch group to be created frame_start (int): start frame of batch frame_end (int): end frame of batch + update_batch_group (PyBatch)[optional]: batch group to update + + Return: + PyBatch: active flame batch group """ + # make sure some batch obj is present + batch_group = update_batch_group or flame.batch + schematic_reels = kwargs.get("shematic_reels") or ['LoadedReel1'] shelf_reels = kwargs.get("shelf_reels") or ['ShelfReel1'] - write_pref = kwargs["write_pref"] handle_start = kwargs.get("handleStart") or 0 handle_end = kwargs.get("handleEnd") or 0 frame_start -= handle_start frame_duration += handle_start + handle_end - # Create batch group with name, start_frame value, duration value, - # set of schematic reel names, set of shelf reel names - bgroup = flame.batch.create_batch_group( - name, - start_frame=frame_start, - duration=frame_duration, - reels=schematic_reels, - shelf_reels=shelf_reels - ) + if not update_batch_group: + # Create batch group with name, start_frame value, duration value, + # set of schematic reel names, set of shelf reel names + batch_group = batch_group.create_batch_group( + name, + start_frame=frame_start, + duration=frame_duration, + reels=schematic_reels, + shelf_reels=shelf_reels + ) + else: + batch_group.name = name + batch_group.start_frame = frame_start + batch_group.duration = frame_duration + + # add reels to batch group + _add_reels_to_batch_group( + batch_group, schematic_reels, shelf_reels) + + # TODO: also update write node if there is any + # TODO: also update loaders to start from correct frameStart if kwargs.get("switch_batch_tab"): # use this command to switch to the batch tab - flame.batch.go_to() + batch_group.go_to() - comp_node = flame.batch.create_node("Comp") + return batch_group - # TODO: convert this to iterational processing, - # so it could be driven from `imageio` settigns - # create write node - write_node = flame.batch.create_node('Write File') - # assign attrs - write_node.name = write_pref["name"] - write_node.media_path = write_pref["media_path"] - write_node.media_path_pattern = write_pref["media_path_pattern"] - write_node.create_clip = write_pref["create_clip"] - write_node.include_setup = write_pref["include_setup"] - write_node.create_clip_path = write_pref["create_clip_path"] - write_node.include_setup_path = write_pref["include_setup_path"] - write_node.file_type = write_pref["file_type"] - write_node.format_extension = write_pref["format_extension"] - write_node.bit_depth = write_pref["bit_depth"] - write_node.compress = write_pref["compress"] - write_node.compress_mode = write_pref["compress_mode"] - write_node.frame_index_mode = write_pref["frame_index_mode"] - write_node.frame_padding = write_pref["frame_padding"] - write_node.version_mode = write_pref["version_mode"] - write_node.version_name = write_pref["version_name"] - write_node.version_padding = write_pref["version_padding"] - flame.batch.connect_nodes(comp_node, "Result", write_node, "Front") +def _add_reels_to_batch_group(batch_group, reels, shelf_reels): + # update or create defined reels + # helper variables + reel_names = [ + r.name.get_value() + for r in batch_group.reels + ] + shelf_reel_names = [ + r.name.get_value() + for r in batch_group.shelf_reels + ] + # add schematic reels + for _r in reels: + if _r in reel_names: + continue + batch_group.create_reel(_r) + + # add shelf reels + for _sr in shelf_reels: + if _sr in shelf_reel_names: + continue + batch_group.create_shelf_reel(_sr) + + +def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): + """Creating batch group with links + + Args: + batch_nodes (list of dict): each dict is node definition + batch_links (list of dict): each dict is link definition + batch_group (PyBatch, optional): batch group. Defaults to None. + """ + # make sure some batch obj is present + batch_group = batch_group or flame.batch + + created_nodes = {} + for node in batch_nodes: + # NOTE: node_props needs to be ideally OrederDict type + node_id, node_type, node_props = ( + node["id"], node["type"], node["properties"]) + + # create batch node + batch_node = batch_group.create_node(node_type) + + # set attributes found in node props + for key, value in node_props.items(): + if not hasattr(batch_node, key): + continue + setattr(batch_node, key, value) + + # add created node for possible linking + created_nodes[node_id] = batch_node + + # link nodes to each other + for link in batch_links: + _from_n, _to_n = link["from_node"], link["to_node"] + + # check if all linking nodes are available + if not all([ + created_nodes.get(_from_n["id"]), + created_nodes.get(_to_n["id"]) + ]): + continue + + # link nodes in defined link + batch_group.connect_nodes( + created_nodes[_from_n["id"]], _from_n["connector"], + created_nodes[_to_n["id"]], _to_n["connector"] + ) # sort batch nodes - flame.batch.organize() - - return bgroup + batch_group.organize() diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 979134bbfe..524d9b1ac2 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -1,5 +1,6 @@ import os import copy +from collections import OrderedDict from pprint import pformat import pyblish from openpype.lib import get_workdir @@ -30,12 +31,48 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # create or get already created batch group bgroup = self._get_batch_group(instance, task_data) + # add batch group content + self._add_nodes_to_batch_with_links(instance, task_data, bgroup) + # load plate to batch group self.log.info("Loading subset `{}` into batch `{}`".format( instance.data["subset"], bgroup.name.get_value() )) self._load_clip_to_context(instance, bgroup) + def _add_nodes_to_batch_with_links(self, instance, task_data, batch_group): + # get write file node properties > OrederDict because order does mater + write_pref_data = self._get_write_prefs(instance, task_data) + + batch_nodes = [ + { + "type": "comp", + "properties": {}, + "id": "comp_node01" + }, + { + "type": "Write File", + "properties": write_pref_data, + "id": "write_file_node01" + } + ] + batch_links = [ + { + "from_node": { + "id": "comp_node01", + "connector": "Result" + }, + "to_node": { + "id": "write_file_node01", + "connector": "Front" + } + } + ] + + # add nodes into batch group + opfapi.create_batch_group_conent( + batch_nodes, batch_links, batch_group) + def _load_clip_to_context(self, instance, bgroup): # get all loaders for host loaders_by_name = { @@ -123,13 +160,11 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): task_name = task_data["name"] batchgroup_name = "{}_{}".format(asset_name, task_name) - write_pref_data = self._get_write_prefs(instance, task_data) batch_data = { "shematic_reels": [ "OP_LoadedReel" ], - "write_pref": write_pref_data, "handleStart": handle_start, "handleEnd": handle_end } @@ -143,21 +178,24 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): self.log.info( "Creating new batch group: {}".format(batchgroup_name)) # create batch with utils - bgroup = opfapi.create_batch( + bgroup = opfapi.create_batch_group( batchgroup_name, frame_start, frame_duration, **batch_data ) + else: self.log.info( "Updating batch group: {}".format(batchgroup_name)) # update already created batch group - bgroup.name = batchgroup_name - bgroup.start_frame = frame_start - bgroup.duration = frame_duration - # TODO: also update write node if there is any - # TODO: also update loaders to start from correct frameStart + bgroup = opfapi.create_batch_group( + batchgroup_name, + frame_start, + frame_duration, + update_batch_group=bgroup, + **batch_data + ) return bgroup @@ -249,25 +287,31 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): version_name = "v" version_padding = 3 - return { - "name": name, - "media_path": media_path, - "media_path_pattern": media_path_pattern, - "create_clip": create_clip, - "include_setup": include_setup, - "create_clip_path": create_clip_path, - "include_setup_path": include_setup_path, - "file_type": file_type, - "format_extension": format_extension, - "bit_depth": bit_depth, - "compress": compress, - "compress_mode": compress_mode, - "frame_index_mode": frame_index_mode, - "frame_padding": frame_padding, - "version_mode": version_mode, - "version_name": version_name, - "version_padding": version_padding - } + # return it as ordered dict + reutrn_dict = OrderedDict() + # need to make sure the order of keys is correct + for item in ( + ("name", name), + ("media_path", media_path), + ("media_path_pattern", media_path_pattern), + ("create_clip", create_clip), + ("include_setup", include_setup), + ("create_clip_path", create_clip_path), + ("include_setup_path", include_setup_path), + ("file_type", file_type), + ("format_extension", format_extension), + ("bit_depth", bit_depth), + ("compress", compress), + ("compress_mode", compress_mode), + ("frame_index_mode", frame_index_mode), + ("frame_padding", frame_padding), + ("version_mode", version_mode), + ("version_name", version_name), + ("version_padding", version_padding) + ): + reutrn_dict.update({item[0]: item[1]}) + + return reutrn_dict def _get_shot_task_dir_path(self, instance, task_data): project_doc = instance.data["projectEntity"] From 1c6164005c89e8cf26a15e642c991a40a1261e9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 13:24:43 +0200 Subject: [PATCH 119/207] wrapper around settings content is splitter so projects view can be resized --- openpype/tools/settings/settings/categories.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index a5b5cd40f0..c8ade5fcdb 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -216,7 +216,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): def create_ui(self): self.modify_defaults_checkbox = None - conf_wrapper_widget = QtWidgets.QWidget(self) + conf_wrapper_widget = QtWidgets.QSplitter(self) configurations_widget = QtWidgets.QWidget(conf_wrapper_widget) # Breadcrumbs/Path widget @@ -294,10 +294,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): configurations_layout.addWidget(scroll_widget, 1) - conf_wrapper_layout = QtWidgets.QHBoxLayout(conf_wrapper_widget) - conf_wrapper_layout.setContentsMargins(0, 0, 0, 0) - conf_wrapper_layout.setSpacing(0) - conf_wrapper_layout.addWidget(configurations_widget, 1) + conf_wrapper_widget.addWidget(configurations_widget) main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -327,7 +324,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.breadcrumbs_model = None self.refresh_btn = refresh_btn - self.conf_wrapper_layout = conf_wrapper_layout + self.conf_wrapper_widget = conf_wrapper_widget self.main_layout = main_layout self.ui_tweaks() @@ -818,7 +815,9 @@ class ProjectWidget(SettingsCategoryWidget): project_list_widget = ProjectListWidget(self) - self.conf_wrapper_layout.insertWidget(0, project_list_widget, 0) + self.conf_wrapper_widget.insertWidget(0, project_list_widget) + self.conf_wrapper_widget.setStretchFactor(0, 0) + self.conf_wrapper_widget.setStretchFactor(1, 1) project_list_widget.project_changed.connect(self._on_project_change) project_list_widget.version_change_requested.connect( From c966b96e059e4344027542f5db2d42cc80a39a3c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 13:29:12 +0200 Subject: [PATCH 120/207] fixed default value of use sequence for review --- .../modules/deadline/plugins/publish/submit_publish_job.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 921b172f2b..3c4e0d2913 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -531,10 +531,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # expected files contains more explicitly and from what # should be review made. # - "review" tag is never added when is set to 'False' - use_sequence_for_review = instance.get( - "useSequenceForReview", True - ) - if use_sequence_for_review: + if instance["useSequenceForReview"]: # if filtered aov name is found in filename, toggle it for # preview video rendering for app in self.aov_filter.keys(): @@ -737,7 +734,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "resolutionHeight": data.get("resolutionHeight", 1080), "multipartExr": data.get("multipartExr", False), "jobBatchName": data.get("jobBatchName", ""), - "useSequenceForReview": data.get("useSequenceForReview") + "useSequenceForReview": data.get("useSequenceForReview", True) } if "prerender" in instance.data["families"]: From c2a41760676afceb065351520db9cfeec62b9d33 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 13:30:43 +0200 Subject: [PATCH 121/207] flame: fix integrating batch group to loader with bgroup --- openpype/hosts/flame/plugins/load/load_clip_batch.py | 2 +- .../hosts/flame/plugins/publish/integrate_batch_group.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 3c13d88d3a..5de3226035 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -26,7 +26,7 @@ class LoadClipBatch(opfapi.ClipLoader): def load(self, context, name, namespace, options): # get flame objects - self.batch = flame.batch + self.batch = options.get("batch") or flame.batch # load clip to timeline and get main variables namespace = namespace diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 524d9b1ac2..c70c2baa4a 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -130,7 +130,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): try: op_pipeline.load.load_with_repre_context( loader_plugin, repre_context, **{ - "data": {"workdir": self.task_workdir} + "data": { + "workdir": self.task_workdir, + "batch": bgroup + } }) except op_pipeline.load.IncompatibleLoaderError as msg: self.log.error( From 53fba93840cc8af47dad7d274880de83146b558f Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 7 Apr 2022 12:14:49 +0000 Subject: [PATCH 122/207] [Automated] Bump version --- CHANGELOG.md | 51 +++++++++++++++++---------------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 21 insertions(+), 34 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a53311d70..e17ab74293 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.9.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...HEAD) @@ -10,16 +10,28 @@ **🆕 New features** +- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) - Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) -- Maya to Unreal \> Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) **🚀 Enhancements** +- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025) +- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) +- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) - Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) - Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) **🐛 Bug fixes** +- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) +- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) +- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) +- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) +- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) - Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) - Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) - Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) @@ -27,6 +39,7 @@ **Merged pull requests:** +- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) - General: adding limitations for pyright [\#2994](https://github.com/pypeclub/OpenPype/pull/2994) ## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04) @@ -58,7 +71,8 @@ - Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) - General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) - Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) -- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) **🐛 Bug fixes** @@ -92,7 +106,6 @@ - General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) - General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) - General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) -- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) - General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) **Merged pull requests:** @@ -108,11 +121,9 @@ **🚀 Enhancements** -- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) -- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) - nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) -- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) -- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) **🐛 Bug fixes** @@ -121,39 +132,15 @@ - Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) - SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) - Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) -- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) -- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) -- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) **🔀 Refactored code** - General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) -- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0) -### 📖 Documentation - -- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) - -**🚀 Enhancements** - -- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) -- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867) - -**🐛 Bug fixes** - -- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) -- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) -- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) -- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) - -**🔀 Refactored code** - -- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876) - ## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.2-nightly.3...3.8.2) diff --git a/openpype/version.py b/openpype/version.py index c314151e9b..1dbbab64de 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.3-nightly.1" +__version__ = "3.9.3-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index dd1a666dea..aa00f4022f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.3-nightly.1" # OpenPype +version = "3.9.3-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 616d0cf6208ce1be4134b63a9a6722b30bca8252 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 7 Apr 2022 12:23:14 +0000 Subject: [PATCH 123/207] [Automated] Release --- CHANGELOG.md | 9 ++++----- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e17ab74293..f1e7d5d9e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.9.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) ### 📖 Documentation @@ -20,6 +20,7 @@ - Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) - Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) - Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) - Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) @@ -59,7 +60,6 @@ **🚀 Enhancements** -- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) - Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) - General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) @@ -71,7 +71,6 @@ - Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) - General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) - Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) -- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) - Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) **🐛 Bug fixes** @@ -121,7 +120,7 @@ **🚀 Enhancements** -- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) - nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) - Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) diff --git a/openpype/version.py b/openpype/version.py index 1dbbab64de..97aa585ca7 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.3-nightly.2" +__version__ = "3.9.3" diff --git a/pyproject.toml b/pyproject.toml index aa00f4022f..006f6eb4e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.3-nightly.2" # OpenPype +version = "3.9.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 614c49b57f4acd32d9d6db9cf97f6e0c628ce33d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 14:27:00 +0200 Subject: [PATCH 124/207] flame: fix wiretap with umask --- openpype/hosts/flame/api/scripts/wiretap_com.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/scripts/wiretap_com.py b/openpype/hosts/flame/api/scripts/wiretap_com.py index 14fbcec954..d904e7850a 100644 --- a/openpype/hosts/flame/api/scripts/wiretap_com.py +++ b/openpype/hosts/flame/api/scripts/wiretap_com.py @@ -185,7 +185,9 @@ class WireTapCom(object): exit_code = subprocess.call( project_create_cmd, - cwd=os.path.expanduser('~')) + cwd=os.path.expanduser('~'), + preexec_fn=_subprocess_preexec_fn + ) if exit_code != 0: RuntimeError("Cannot create project in flame db") @@ -448,7 +450,9 @@ class WireTapCom(object): exit_code = subprocess.call( project_colorspace_cmd, - cwd=os.path.expanduser('~')) + cwd=os.path.expanduser('~'), + preexec_fn=_subprocess_preexec_fn + ) if exit_code != 0: RuntimeError("Cannot set colorspace {} on project {}".format( @@ -456,6 +460,11 @@ class WireTapCom(object): )) +def _subprocess_preexec_fn(): + os.setpgrp() + os.umask(0o022) + + if __name__ == "__main__": # get json exchange data json_path = sys.argv[-1] From ce4aa40f217857ae527800667d243e048ff40159 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 14:52:13 +0200 Subject: [PATCH 125/207] flame: fixing umask to 0o000 to reflect permissions to 0777 --- openpype/hosts/flame/api/scripts/wiretap_com.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/scripts/wiretap_com.py b/openpype/hosts/flame/api/scripts/wiretap_com.py index d904e7850a..f78102c0a1 100644 --- a/openpype/hosts/flame/api/scripts/wiretap_com.py +++ b/openpype/hosts/flame/api/scripts/wiretap_com.py @@ -462,7 +462,7 @@ class WireTapCom(object): def _subprocess_preexec_fn(): os.setpgrp() - os.umask(0o022) + os.umask(0o000) if __name__ == "__main__": From cd59b3af66e6bd39e20cce1ec2d1195a26225e9e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 14:53:29 +0200 Subject: [PATCH 126/207] flame: make sure nodes are not duplicated and update --- openpype/hosts/flame/api/batch_utils.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 26f324090b..23d16f8d1a 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -94,15 +94,25 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): """ # make sure some batch obj is present batch_group = batch_group or flame.batch - + all_batch_nodes = { + b.name.get_value(): b + for b in batch_group.nodes + } created_nodes = {} for node in batch_nodes: # NOTE: node_props needs to be ideally OrederDict type node_id, node_type, node_props = ( node["id"], node["type"], node["properties"]) - # create batch node - batch_node = batch_group.create_node(node_type) + # get node name for checking if exists + node_name = node_props.get("name") or node_id + + if all_batch_nodes.get(node_name): + # update existing batch node + batch_node = all_batch_nodes[node_name] + else: + # create new batch node + batch_node = batch_group.create_node(node_type) # set attributes found in node props for key, value in node_props.items(): From 4ed6c0257ece24cc254afb4edc9a4cc698c4dd53 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:05:46 +0200 Subject: [PATCH 127/207] flame: returning all batch nodes --- openpype/hosts/flame/api/batch_utils.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 23d16f8d1a..20118c249c 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -91,6 +91,9 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): batch_nodes (list of dict): each dict is node definition batch_links (list of dict): each dict is link definition batch_group (PyBatch, optional): batch group. Defaults to None. + + Return: + dict: all batch nodes {name or id: PyNode} """ # make sure some batch obj is present batch_group = batch_group or flame.batch @@ -98,7 +101,6 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): b.name.get_value(): b for b in batch_group.nodes } - created_nodes = {} for node in batch_nodes: # NOTE: node_props needs to be ideally OrederDict type node_id, node_type, node_props = ( @@ -121,7 +123,7 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): setattr(batch_node, key, value) # add created node for possible linking - created_nodes[node_id] = batch_node + all_batch_nodes[node_id] = batch_node # link nodes to each other for link in batch_links: @@ -129,16 +131,18 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): # check if all linking nodes are available if not all([ - created_nodes.get(_from_n["id"]), - created_nodes.get(_to_n["id"]) + all_batch_nodes.get(_from_n["id"]), + all_batch_nodes.get(_to_n["id"]) ]): continue # link nodes in defined link batch_group.connect_nodes( - created_nodes[_from_n["id"]], _from_n["connector"], - created_nodes[_to_n["id"]], _to_n["connector"] + all_batch_nodes[_from_n["id"]], _from_n["connector"], + all_batch_nodes[_to_n["id"]], _to_n["connector"] ) # sort batch nodes batch_group.organize() + + return all_batch_nodes From 4da106cf3def8ee5d2dfb91076f0fecb7f05bc0e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:06:14 +0200 Subject: [PATCH 128/207] flame: debug log nodes attrs --- .../flame/plugins/publish/integrate_batch_group.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index c70c2baa4a..350acdfa90 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -32,7 +32,16 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): bgroup = self._get_batch_group(instance, task_data) # add batch group content - self._add_nodes_to_batch_with_links(instance, task_data, bgroup) + all_batch_nodes = self._add_nodes_to_batch_with_links( + instance, task_data, bgroup) + + for name, node in all_batch_nodes: + self.log.debug("name: {}, dir: {}".format( + name, dir(node) + )) + self.log.debug("__ node.attributes: {}".format( + node.attributes + )) # load plate to batch group self.log.info("Loading subset `{}` into batch `{}`".format( @@ -70,7 +79,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): ] # add nodes into batch group - opfapi.create_batch_group_conent( + return opfapi.create_batch_group_conent( batch_nodes, batch_links, batch_group) def _load_clip_to_context(self, instance, bgroup): From d0a2a781aea6009a2b928c7752ef64ae87c47fab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:08:45 +0200 Subject: [PATCH 129/207] flame: fixing head and tail --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index fe9bce5232..0af769a380 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -72,9 +72,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # solve handles length marker_data["handleStart"] = min( - marker_data["handleStart"], head) + marker_data["handleStart"], abs(head)) marker_data["handleEnd"] = min( - marker_data["handleEnd"], tail) + marker_data["handleEnd"], abs(tail)) with_audio = bool(marker_data.pop("audio")) From 1535b4712409a57bbbbddb73a65261106423ec7b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:10:23 +0200 Subject: [PATCH 130/207] flame: add hack comment --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 0af769a380..95c2002bd9 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -247,6 +247,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): head = clip_data.get("segment_head") tail = clip_data.get("segment_tail") + # HACK: it is here to serve for versions bellow 2021.1 if not head: head = int(clip_data["source_in"]) - int(first_frame) if not tail: From 304584573b7c67aac46f6b255f1ceb09fe1b4d7f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:14:32 +0200 Subject: [PATCH 131/207] flame: missing dict items function call --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 350acdfa90..cac99a25ac 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -35,7 +35,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): all_batch_nodes = self._add_nodes_to_batch_with_links( instance, task_data, bgroup) - for name, node in all_batch_nodes: + for name, node in all_batch_nodes.items(): self.log.debug("name: {}, dir: {}".format( name, dir(node) )) From 16bd11083da52c5836e183e8895524279378f08e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 15:22:09 +0200 Subject: [PATCH 132/207] flame: set node name if it doesn't exists in node props then set it from node_id --- openpype/hosts/flame/api/batch_utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index 20118c249c..d4c8294466 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -107,7 +107,7 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): node["id"], node["type"], node["properties"]) # get node name for checking if exists - node_name = node_props.get("name") or node_id + node_name = node_props.pop("name", None) or node_id if all_batch_nodes.get(node_name): # update existing batch node @@ -116,6 +116,9 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): # create new batch node batch_node = batch_group.create_node(node_type) + # set name + setattr(batch_node, "name", node_name) + # set attributes found in node props for key, value in node_props.items(): if not hasattr(batch_node, key): From aa0176aeb71af90fd8808d30ffed0098ac55a8b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 15:58:01 +0200 Subject: [PATCH 133/207] modified extract slate frame to add values based on templates --- .../plugins/publish/extract_slate_frame.py | 58 +++++++++++++++---- 1 file changed, 47 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e917a28046..9737d4d5f8 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -1,6 +1,8 @@ import os import nuke +import six import pyblish.api + import openpype from openpype.hosts.nuke.api.lib import maintained_selection @@ -18,6 +20,10 @@ class ExtractSlateFrame(openpype.api.Extractor): families = ["slate"] hosts = ["nuke"] + key_value_mapping = { + "f_submission_note": [True, "{comment}"], + "f_submitting_for": [True, "{intent[value]}"] + } def process(self, instance): if hasattr(self, "viewer_lut_raw"): @@ -129,9 +135,7 @@ class ExtractSlateFrame(openpype.api.Extractor): for node in temporary_nodes: nuke.delete(node) - def get_view_process_node(self): - # Select only the target node if nuke.selectedNodes(): [n.setSelected(False) for n in nuke.selectedNodes()] @@ -162,13 +166,45 @@ class ExtractSlateFrame(openpype.api.Extractor): return comment = instance.context.data.get("comment") - intent_value = instance.context.data.get("intent") - if intent_value and isinstance(intent_value, dict): - intent_value = intent_value.get("value") + intent = instance.context.data.get("intent") + if not isinstance(intent, dict): + intent = { + "label": intent, + "value": intent + } - try: - node["f_submission_note"].setValue(comment) - node["f_submitting_for"].setValue(intent_value or "") - except NameError: - return - instance.data.pop("slateNode") + fill_data = { + "comment": comment, + "intent": intent + } + + for key, value in self.key_value_mapping.items(): + enabled, template = value + if not enabled: + continue + + try: + value = template.format(**fill_data) + + except ValueError: + self.log.warning( + "Couldn't fill template \"{}\" with data: {}".format( + template, fill_data + ), + exc_info=True + ) + continue + + except KeyError: + self.log.warning( + "Template contains unknown key", + exc_info=True + ) + continue + + try: + node[key].setValue(value) + except NameError: + self.log.warning( + "Failed to set value \"{}\" on node attribute \"{}\"" + ).format(value)) From 18c53a527a22c1a52d12c181d4d6302715ddf148 Mon Sep 17 00:00:00 2001 From: jrsndlr Date: Thu, 7 Apr 2022 16:06:20 +0200 Subject: [PATCH 134/207] Nuke Tut doc fix remove artist_host_nuke.md, fix in-page link to artist_hosts_nuke.md --- website/docs/artist_hosts_nuke.md | 145 -------------------------- website/docs/artist_hosts_nuke_tut.md | 2 +- 2 files changed, 1 insertion(+), 146 deletions(-) delete mode 100644 website/docs/artist_hosts_nuke.md diff --git a/website/docs/artist_hosts_nuke.md b/website/docs/artist_hosts_nuke.md deleted file mode 100644 index 1e02599570..0000000000 --- a/website/docs/artist_hosts_nuke.md +++ /dev/null @@ -1,145 +0,0 @@ ---- -id: artist_hosts_nuke -title: Nuke -sidebar_label: Nuke ---- - -:::important -After Nuke starts it will automatically **Apply All Settings** for you. If you are sure the settings are wrong just contact your supervisor and he will set them correctly for you in project database. -::: - -:::note -The workflows are identical for both. We are supporting versions **`11.0`** and above. -::: - -## OpenPype global tools - -- [Set Context](artist_tools.md#set-context) -- [Work Files](artist_tools.md#workfiles) -- [Create](artist_tools.md#creator) -- [Load](artist_tools.md#loader) -- [Manage (Inventory)](artist_tools.md#inventory) -- [Publish](artist_tools.md#publisher) -- [Library Loader](artist_tools.md#library-loader) - -## Nuke specific tools - -
-
- -### Set Frame Ranges - -Use this feature in case you are not sure the frame range is correct. - -##### Result - -- setting Frame Range in script settings -- setting Frame Range in viewers (timeline) - -
-
- -![Set Frame Ranges](assets/nuke_setFrameRanges.png) - -
-
- - -
- -![Set Frame Ranges Timeline](assets/nuke_setFrameRanges_timeline.png) - -
- -1. limiting to Frame Range without handles -2. **Input** handle on start -3. **Output** handle on end - -
-
- -### Set Resolution - -
-
- - -This menu item will set correct resolution format for you defined by your production. - -##### Result - -- creates new item in formats with project name -- sets the new format as used - -
-
- -![Set Resolution](assets/nuke_setResolution.png) - -
-
- - -### Set Colorspace - -
-
- -This menu item will set correct Colorspace definitions for you. All has to be configured by your production (Project coordinator). - -##### Result - -- set Colorspace in your script settings -- set preview LUT to your viewers -- set correct colorspace to all discovered Read nodes (following expression set in settings) - -
-
- -![Set Colorspace](assets/nuke_setColorspace.png) - -
-
- - -### Apply All Settings - -
-
- -It is usually enough if you once per while use this option just to make yourself sure the workfile is having set correct properties. - -##### Result - -- set Frame Ranges -- set Colorspace -- set Resolution - -
-
- -![Apply All Settings](assets/nuke_applyAllSettings.png) - -
-
- -### Build Workfile - -
-
- -This tool will append all available subsets into an actual node graph. It will look into database and get all last [versions](artist_concepts.md#version) of available [subsets](artist_concepts.md#subset). - - -##### Result - -- adds all last versions of subsets (rendered image sequences) as read nodes -- adds publishable write node as `renderMain` subset - -
-
- -![Build First Work File](assets/nuke_buildFirstWorkfile.png) - -
-
\ No newline at end of file diff --git a/website/docs/artist_hosts_nuke_tut.md b/website/docs/artist_hosts_nuke_tut.md index 4b0ef7a78a..eefb213dd2 100644 --- a/website/docs/artist_hosts_nuke_tut.md +++ b/website/docs/artist_hosts_nuke_tut.md @@ -161,7 +161,7 @@ Nuke OpenPype menu shows the current context Launching Nuke with context stops your timer, and starts the clock on the shot and task you picked. -Openpype makes initial setup for your Nuke script. It is the same as running [Apply All Settings](artist_hosts_nuke.md#apply-all-settings) from the OpenPype menu. +Openpype makes initial setup for your Nuke script. It is the same as running [Apply All Settings](artist_hosts_nuke_tut.md#apply-all-settings) from the OpenPype menu. - Reads frame range and resolution from Avalon database, sets it in Nuke Project Settings, Creates Viewer node, sets it’s range and indicates handles by In and Out points. From 5bc3516baf676d115a992a6f63828a5c110c2556 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 16:43:46 +0200 Subject: [PATCH 135/207] added settings for slate templates --- .../plugins/publish/extract_slate_frame.py | 15 ++++-- .../defaults/project_settings/nuke.json | 14 +++++- .../schemas/schema_nuke_publish.json | 47 +++++++++++++++++++ 3 files changed, 70 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 9737d4d5f8..f71d3ffff5 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -1,6 +1,7 @@ import os import nuke -import six +import copy + import pyblish.api import openpype @@ -20,9 +21,12 @@ class ExtractSlateFrame(openpype.api.Extractor): families = ["slate"] hosts = ["nuke"] + # Settings values + # - can be extended by other attributes from node in the future key_value_mapping = { "f_submission_note": [True, "{comment}"], - "f_submitting_for": [True, "{intent[value]}"] + "f_submitting_for": [True, "{intent[value]}"], + "f_vfx_scope_of_work": [False, ""] } def process(self, instance): @@ -173,10 +177,11 @@ class ExtractSlateFrame(openpype.api.Extractor): "value": intent } - fill_data = { + fill_data = copy.deepcopy(instance.data["anatomyData"]) + fill_data.update({ "comment": comment, "intent": intent - } + }) for key, value in self.key_value_mapping.items(): enabled, template = value @@ -205,6 +210,6 @@ class ExtractSlateFrame(openpype.api.Extractor): try: node[key].setValue(value) except NameError: - self.log.warning( + self.log.warning(( "Failed to set value \"{}\" on node attribute \"{}\"" ).format(value)) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 44d7f2d9d0..bdccb9b38e 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -160,7 +160,19 @@ } }, "ExtractSlateFrame": { - "viewer_lut_raw": false + "viewer_lut_raw": false, + "f_submission_note": [ + true, + "{comment}" + ], + "f_submitting_for": [ + true, + "{intent[value]}" + ], + "f_vfx_scope_of_work": [ + false, + "" + ] }, "IncrementScriptVersion": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 27e8957786..8dbf224ce5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -389,6 +389,53 @@ "type": "boolean", "key": "viewer_lut_raw", "label": "Viewer LUT raw" + }, + { + "type": "separator" + }, + { + "type": "label", + "label": "Fill specific slate node values with templates. Uncheck the checkbox to not change the value.", + "word_wrap": true + }, + { + "type": "list-strict", + "key": "f_submission_note", + "label": "Submission Note", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] + }, + { + "type": "list-strict", + "key": "f_submitting_for", + "label": "Submission For", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] + }, + { + "type": "list-strict", + "key": "f_vfx_scope_of_work", + "label": "VFX Scope Of Work", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] } ] }, From 208a6b0ecae86bca96dad9fc0984df16f3b86d7a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Apr 2022 17:08:16 +0200 Subject: [PATCH 136/207] Fix Python requirements to 3.7.9 According to some cases on Discord, 3.7.8 is not enough with PySide2 combination --- website/docs/dev_requirements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 6c87054ba0..a10aea7865 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -14,7 +14,7 @@ The main things you will need to run and build pype are: - **Terminal** in your OS - PowerShell 5.0+ (Windows) - Bash (Linux) -- [**Python 3.7.8**](#python) or higher +- [**Python 3.7.9**](#python) or higher - [**MongoDB**](#database) From afbacb1944b690f03e31fb129d39476880c3934a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 17:14:33 +0200 Subject: [PATCH 137/207] flame: fixing setter for node name --- openpype/hosts/flame/api/batch_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/batch_utils.py b/openpype/hosts/flame/api/batch_utils.py index d4c8294466..9d419a4a90 100644 --- a/openpype/hosts/flame/api/batch_utils.py +++ b/openpype/hosts/flame/api/batch_utils.py @@ -117,7 +117,7 @@ def create_batch_group_conent(batch_nodes, batch_links, batch_group=None): batch_node = batch_group.create_node(node_type) # set name - setattr(batch_node, "name", node_name) + batch_node.name.set_value(node_name) # set attributes found in node props for key, value in node_props.items(): From fc6d01d0043df4e019ab89cf44d8c592e71be8e3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 7 Apr 2022 17:15:44 +0200 Subject: [PATCH 138/207] hound catch --- openpype/hosts/flame/api/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 998d7dfa7e..a4d8a7f9f0 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -826,5 +826,7 @@ class MediaInfoFile: def _write_result_xml_to_file(self, file, xml_data): # save it as new file tree = cET.ElementTree(xml_data) - tree.write(file, xml_declaration=True, - method='xml', encoding='UTF-8') \ No newline at end of file + tree.write( + file, xml_declaration=True, + method='xml', encoding='UTF-8' + ) From 81b2be514f970e19ccc292bee0afba80f893cb83 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 18:47:37 +0200 Subject: [PATCH 139/207] changed order of ftrack collectors --- openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py | 2 +- .../modules/ftrack/plugins/publish/collect_ftrack_family.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 07af217fb6..436a61cc18 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -6,7 +6,7 @@ import avalon.api class CollectFtrackApi(pyblish.api.ContextPlugin): """ Collects an ftrack session and the current task id. """ - order = pyblish.api.CollectorOrder + 0.4999 + order = pyblish.api.CollectorOrder + 0.4991 label = "Collect Ftrack Api" def process(self, context): diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index 70030acad9..95987fe42e 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -25,7 +25,7 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin): based on 'families' (editorial drives it by presence of 'review') """ label = "Collect Ftrack Family" - order = pyblish.api.CollectorOrder + 0.4998 + order = pyblish.api.CollectorOrder + 0.4990 profiles = None From a34f279685d087dda82c99a6b0c32f9ceb9cc907 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 18:47:53 +0200 Subject: [PATCH 140/207] modified labels to contain colons --- .../projects_schema/schemas/schema_nuke_publish.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 8dbf224ce5..3bf0eb3214 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -401,7 +401,7 @@ { "type": "list-strict", "key": "f_submission_note", - "label": "Submission Note", + "label": "Submission Note:", "object_types": [ { "type": "boolean" @@ -414,7 +414,7 @@ { "type": "list-strict", "key": "f_submitting_for", - "label": "Submission For", + "label": "Submission For:", "object_types": [ { "type": "boolean" @@ -427,7 +427,7 @@ { "type": "list-strict", "key": "f_vfx_scope_of_work", - "label": "VFX Scope Of Work", + "label": "VFX Scope Of Work:", "object_types": [ { "type": "boolean" From 2ce4704a981f621d4bdfbdc7af27840edf0cb409 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 18:55:43 +0200 Subject: [PATCH 141/207] added collector looking for custom attribute values and it's settings --- .../modules/ftrack/lib/custom_attributes.py | 2 +- .../publish/collect_custom_attributes_data.py | 138 ++++++++++++++++++ .../defaults/project_settings/ftrack.json | 4 + .../schema_project_ftrack.json | 25 ++++ 4 files changed, 168 insertions(+), 1 deletion(-) create mode 100644 openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/ftrack/lib/custom_attributes.py index 29c6b5e7f8..2f53815368 100644 --- a/openpype/modules/ftrack/lib/custom_attributes.py +++ b/openpype/modules/ftrack/lib/custom_attributes.py @@ -135,7 +135,7 @@ def query_custom_attributes( output.extend( session.query( ( - "select value, entity_id from {}" + "select value, entity_id, configuration_id from {}" " where entity_id in ({}) and configuration_id in ({})" ).format( table_name, diff --git a/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py new file mode 100644 index 0000000000..f04c7c7954 --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py @@ -0,0 +1,138 @@ +""" +Requires: + context > ftrackSession + context > ftrackEntity + instance > ftrackEntity + +Provides: + instance > customData > ftrack +""" +import copy + +import pyblish.api + + +class CollectFtrackCustomAttributeData(pyblish.api.ContextPlugin): + """Collect custom attribute values and store them to customData. + + Data are stored into each instance in context under + instance.data["customData"]["ftrack"]. + """ + + order = pyblish.api.CollectorOrder + 0.4992 + label = "Collect Ftrack Custom Attribute Data" + + # Name of custom attributes for which will be look for + custom_attribute_keys = [] + + def process(self, context): + if not self.custom_attribute_keys: + self.log.info("Custom attribute keys are not set. Skipping") + return + + ftrack_entities_by_id = {} + default_entity_id = None + + context_entity = context.data.get("ftrackEntity") + if context_entity: + entity_id = context_entity["id"] + default_entity_id = entity_id + ftrack_entities_by_id[entity_id] = context_entity + + instances_by_entity_id = { + default_entity_id: [] + } + for instance in context: + entity = instance.data.get("ftrackEntity") + if not entity: + instances_by_entity_id[default_entity_id].append(instance) + continue + + entity_id = entity["id"] + ftrack_entities_by_id[entity_id] = entity + if entity_id not in instances_by_entity_id: + instances_by_entity_id[entity_id] = [] + instances_by_entity_id[entity_id].append(instance) + + if not ftrack_entities_by_id: + self.log.info("Ftrack entities are not set. Skipping") + return + + session = context.data["ftrackSession"] + custom_attr_key_by_id = self.query_attr_confs(session) + if not custom_attr_key_by_id: + self.log.info(( + "Didn't find any of defined custom attributes {}" + ).format(", ".join(self.custom_attribute_keys))) + return + + entity_ids = list(instances_by_entity_id.keys()) + values_by_entity_id = self.query_attr_values( + session, entity_ids, custom_attr_key_by_id + ) + + for entity_id, instances in instances_by_entity_id.items(): + if entity_id not in values_by_entity_id: + # Use defaut empty values + entity_id = None + + value = values_by_entity_id[entity_id] + if "customData" not in instance.data: + instance.data["customData"] = {} + instance.data["customData"]["ftrack"] = copy.deepcopy(value) + + def query_attr_values(self, session, entity_ids, custom_attr_key_by_id): + # Prepare values for query + entity_ids_joined = ",".join([ + '"{}"'.format(entity_id) + for entity_id in entity_ids + ]) + conf_ids_joined = ",".join([ + '"{}"'.format(conf_id) + for conf_id in custom_attr_key_by_id.keys() + ]) + # Query custom attribute values + value_items = session.query( + ( + "select value, entity_id, configuration_id" + " from CustomAttributeValue" + " where entity_id in ({}) and configuration_id in ({})" + ).format( + entity_ids_joined, + conf_ids_joined + ) + ).all() + + # Prepare default value output per entity id + values_by_key = { + key: None for key in self.custom_attribute_keys + } + # Prepare all entity ids that were queried + values_by_entity_id = { + entity_id: copy.deepcopy(values_by_key) + for entity_id in entity_ids + } + # Add none entity id which is used as default value + values_by_entity_id[None] = copy.deepcopy(values_by_key) + # Go through queried data and store them + for item in value_items: + conf_id = item["configuration_id"] + conf_key = custom_attr_key_by_id[conf_id] + entity_id = item["entity_id"] + values_by_entity_id[entity_id][conf_key] = item["value"] + return values_by_entity_id + + def query_attr_confs(self, session): + custom_attributes = set(self.custom_attribute_keys) + cust_attrs_query = ( + "select id, key from CustomAttributeConfiguration" + " where key in ({})" + ).format(", ".join( + ["\"{}\"".format(attr_name) for attr_name in custom_attributes] + )) + + custom_attr_confs = session.query(cust_attrs_query).all() + return { + conf["id"]: conf["key"] + for conf in custom_attr_confs + } diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 31d6a70ac7..deade08c0b 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -352,6 +352,10 @@ } ] }, + "CollectFtrackCustomAttributeData": { + "enabled": false, + "custom_attribute_keys": [] + }, "IntegrateFtrackNote": { "enabled": true, "note_template": "{intent}: {comment}", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 5ce9b24b4b..47effb3dbd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -725,6 +725,31 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CollectFtrackCustomAttributeData", + "label": "Collect Custom Attribute Data", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Collect custom attributes from ftrack for ftrack entities that can be used in some templates during publishing." + }, + { + "type": "list", + "key": "custom_attribute_keys", + "label": "Custom attribute keys", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From ab252ec7c9b7af96835ff248bce0cc75d6df6bd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 18:56:00 +0200 Subject: [PATCH 142/207] added ability to define word wrap of labels --- openpype/settings/entities/schemas/README.md | 1 + openpype/tools/settings/settings/base.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index fbfd699937..b4bfef2972 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -745,6 +745,7 @@ How output of the schema could look like on save: ### label - add label with note or explanations - it is possible to use html tags inside the label +- set `work_wrap` to `true`/`false` if you want to enable word wrapping in UI (default: `false`) ``` { diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index bd48b3a966..44ec09b2ca 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -567,7 +567,9 @@ class GUIWidget(BaseWidget): def _create_label_ui(self): label = self.entity["label"] + word_wrap = self.entity.schema_data.get("word_wrap", False) label_widget = QtWidgets.QLabel(label, self) + label_widget.setWordWrap(word_wrap) label_widget.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction) label_widget.setObjectName("SettingsLabel") label_widget.linkActivated.connect(self._on_link_activate) From 486317cb96826185cd1257c880ee04681dd6a264 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 19:06:25 +0200 Subject: [PATCH 143/207] added custom data to slate text formatting data --- openpype/hosts/nuke/plugins/publish/extract_slate_frame.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index f71d3ffff5..6935afe144 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -179,6 +179,9 @@ class ExtractSlateFrame(openpype.api.Extractor): fill_data = copy.deepcopy(instance.data["anatomyData"]) fill_data.update({ + "custom": copy.deepcopy( + instance.data.get("customData") or {} + ), "comment": comment, "intent": intent }) From bedccd6f69540117379f7db680260ebd3bfdcea2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 19:06:35 +0200 Subject: [PATCH 144/207] added custom data to burnin custm data --- openpype/plugins/publish/extract_burnin.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index b2ca8850b6..a543083a87 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -221,11 +221,17 @@ class ExtractBurnin(openpype.api.Extractor): filled_anatomy = anatomy.format_all(burnin_data) burnin_data["anatomy"] = filled_anatomy.get_solved() - # Add context data burnin_data. - burnin_data["custom"] = ( + custom_data = copy.deepcopy( + instance.data.get("customData") or {} + ) + # Backwards compatibility + custom_data.update( instance.data.get("custom_burnin_data") or {} ) + # Add context data burnin_data. + burnin_data["custom"] = custom_data + # Add source camera name to burnin data camera_name = repre.get("camera_name") if camera_name: From f9c2f87f903d4f411b8b950997e258756a6aa0f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 19:09:29 +0200 Subject: [PATCH 145/207] fixed adding data to instance --- .../publish/collect_custom_attributes_data.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py index f04c7c7954..ef0e4a9ccb 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py +++ b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py @@ -76,10 +76,17 @@ class CollectFtrackCustomAttributeData(pyblish.api.ContextPlugin): # Use defaut empty values entity_id = None - value = values_by_entity_id[entity_id] - if "customData" not in instance.data: - instance.data["customData"] = {} - instance.data["customData"]["ftrack"] = copy.deepcopy(value) + for instance in instances: + value = copy.deepcopy(values_by_entity_id[entity_id]) + if "customData" not in instance.data: + instance.data["customData"] = {} + instance.data["customData"]["ftrack"] = value + instance_label = ( + instance.data.get("label") or instance.data["name"] + ) + self.log.debug(( + "Added ftrack custom data to instance \"{}\": {}" + ).format(instance_label, value)) def query_attr_values(self, session, entity_ids, custom_attr_key_by_id): # Prepare values for query From a2eb78a3a7d3c275fb650c70cd3d67a275d80b9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 7 Apr 2022 19:17:05 +0200 Subject: [PATCH 146/207] added few more comments --- .../ftrack/plugins/publish/collect_custom_attributes_data.py | 3 +++ openpype/plugins/publish/extract_burnin.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py index ef0e4a9ccb..43fa3bc3f8 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py +++ b/openpype/modules/ftrack/plugins/publish/collect_custom_attributes_data.py @@ -17,6 +17,9 @@ class CollectFtrackCustomAttributeData(pyblish.api.ContextPlugin): Data are stored into each instance in context under instance.data["customData"]["ftrack"]. + + Hierarchical attributes are not looked up properly for that functionality + custom attribute values lookup must be extended. """ order = pyblish.api.CollectorOrder + 0.4992 diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index a543083a87..41c84103a6 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -224,7 +224,7 @@ class ExtractBurnin(openpype.api.Extractor): custom_data = copy.deepcopy( instance.data.get("customData") or {} ) - # Backwards compatibility + # Backwards compatibility (since 2022/04/07) custom_data.update( instance.data.get("custom_burnin_data") or {} ) From 5f1940a9982ffee2266776d0f70812853241c941 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 11:17:22 +0200 Subject: [PATCH 147/207] flame: adding maintainable temp file path --- openpype/hosts/flame/api/__init__.py | 2 ++ openpype/hosts/flame/api/lib.py | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index a0c40904ed..6744a7ff11 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -27,6 +27,7 @@ from .lib import ( get_frame_from_filename, get_padding_from_filename, maintained_object_duplication, + maintained_temp_file_path, get_clip_segment, get_batch_group_from_desktop, MediaInfoFile @@ -103,6 +104,7 @@ __all__ = [ "get_frame_from_filename", "get_padding_from_filename", "maintained_object_duplication", + "maintained_temp_file_path", "get_clip_segment", "get_batch_group_from_desktop", "MediaInfoFile", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index a4d8a7f9f0..51d48becf1 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -4,6 +4,7 @@ import re import six import json import pickle +import tempfile import itertools import contextlib import xml.etree.cElementTree as cET @@ -695,6 +696,25 @@ def maintained_object_duplication(item): flame.delete(duplicate) +@contextlib.contextmanager +def maintained_temp_file_path(suffix=None): + _suffix = suffix or "" + + try: + # Store dumped json to temporary file + temporary_file = tempfile.mktemp( + suffix=_suffix, prefix="flame_maintained_") + yield temporary_file.name.replace("\\", "/") + + except IOError as _error: + raise IOError( + "Not able to create temp json file: {}".format(_error)) from _error + + finally: + # Remove the temporary json + os.remove(temporary_file) + + def get_clip_segment(flame_clip): name = flame_clip.name.get_value() version = flame_clip.versions[0] From 03bf240816d98bd47e0944eac0a57706c7cc0869 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 11:18:10 +0200 Subject: [PATCH 148/207] flame: refactoring MediaInfoFile class so it is parentable --- openpype/hosts/flame/api/lib.py | 161 +++++++++++++++++++++++--------- 1 file changed, 116 insertions(+), 45 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 51d48becf1..de3467aa76 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1,7 +1,6 @@ import sys import os import re -import six import json import pickle import tempfile @@ -740,19 +739,34 @@ def get_batch_group_from_desktop(name): class MediaInfoFile: - media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" - tmp_name = "_tmp.clip" - tmp_file = None + """Class to get media info file clip data - clip_data = None - out_feed_nb_ticks = None - out_feed_fps = None - out_feed_drop_mode = None + Raises: + IOError: MEDIA_SCRIPT_PATH path doesn't exists + TypeError: Not able to generate clip xml data file + ET.ParseError: Missing clip in xml clip data + IOError: Not able to save xml clip data to file + + Attributes: + str: `MEDIA_SCRIPT_PATH` path to flame binary + logging.Logger: `log` logger + """ + MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info" log = log - def __init__(self, path): - # test if media script paht exists + _clip_data = None + _start_frame = None + _fps = None + _drop_mode = None + + def __init__(self, path, **kwargs): + + # replace log if any + if kwargs.get("log"): + self.log = kwargs["log"] + + # test if `dl_get_media_info` paht exists self._validate_media_script_path() # derivate other feed variables @@ -760,40 +774,93 @@ class MediaInfoFile: self.feed_dir = os.path.dirname(path) self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) + with maintained_temp_file_path(".clip") as tmp_path: + self.log.info("Temp File: {}".format(tmp_path)) + self._generate_media_info_file(tmp_path) - # remove previously generated temp files - # it will be regenerated - self._clear_tmp_file() + # get clip data and make them single if there is multiple + # clips data + xml_data = self._make_single_clip_media_info(tmp_path) - self.log.info("Temp File: {}".format(self.tmp_file)) + # get all time related data and assign them + self._get_time_info_from_origin(xml_data) + self.set_clip_data(xml_data) - self._generate_media_info_file() + @property + def clip_data(self): + """Clip's xml clip data + + Returns: + xml.etree.ElementTree: xml data + """ + return self._clip_data + + @clip_data.setter + def clip_data(self, data): + self._clip_data = data + + @property + def start_frame(self): + """ Clip's starting frame found in timecode + + Returns: + int: number of frames + """ + return self._start_frame + + @start_frame.setter + def start_frame(self, number): + self._start_frame = int(number) + + @property + def fps(self): + """ Clip's frame rate + + Returns: + float: frame rate + """ + return self._fps + + @fps.setter + def fps(self, fl_number): + self._fps = float(fl_number) + + @property + def drop_mode(self): + """ Clip's drop frame mode + + Returns: + str: drop frame flag + """ + return self._drop_mode + + @drop_mode.setter + def drop_mode(self, text): + self._drop_mode = str(text) def _validate_media_script_path(self): - if not os.path.isfile(self.media_script_path): + if not os.path.isfile(self.MEDIA_SCRIPT_PATH): raise IOError("Media Scirpt does not exist: `{}`".format( - self.media_script_path)) + self.MEDIA_SCRIPT_PATH)) - def _generate_media_info_file(self): + def _generate_media_info_file(self, fpath): # Create cmd arguments for gettig xml file info file cmd_args = [ - self.media_script_path, + self.MEDIA_SCRIPT_PATH, "-e", self.feed_ext, - "-o", self.tmp_file, + "-o", fpath, self.feed_dir ] - # execute creation of clip xml template data try: + # execute creation of clip xml template data openpype.run_subprocess(cmd_args) - self._make_single_clip_media_info() - except TypeError: - self.log.error("Error creating self.tmp_file") - six.reraise(*sys.exc_info()) + except TypeError as error: + raise TypeError( + "Error creating `{}` due: {}".format(fpath, error)) from error - def _make_single_clip_media_info(self): - with open(self.tmp_file) as f: + def _make_single_clip_media_info(self, fpath): + with open(fpath) as f: lines = f.readlines() _added_root = itertools.chain( "", deepcopy(lines)[1:], "") @@ -816,37 +883,41 @@ class MediaInfoFile: ] )) - self._get_time_info_from_origin(matching_clip) - self.clip_data = matching_clip - self._write_result_xml_to_file(self.tmp_file, matching_clip) - - def _clear_tmp_file(self): - if os.path.isfile(self.tmp_file): - os.remove(self.tmp_file) + return matching_clip def _get_time_info_from_origin(self, xml_data): try: for out_track in xml_data.iter('track'): for out_feed in out_track.iter('feed'): + # start frame out_feed_nb_ticks_obj = out_feed.find( 'startTimecode/nbTicks') - self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text + self.start_frame(out_feed_nb_ticks_obj.text) + + # fps out_feed_fps_obj = out_feed.find( 'startTimecode/rate') - self.out_feed_fps = out_feed_fps_obj.text + self.fps(out_feed_fps_obj.text) + + # drop frame mode out_feed_drop_mode_obj = out_feed.find( 'startTimecode/dropMode') - self.out_feed_drop_mode = out_feed_drop_mode_obj.text + self.drop_mode(out_feed_drop_mode_obj.text) break else: continue except Exception as msg: self.log.warning(msg) - def _write_result_xml_to_file(self, file, xml_data): - # save it as new file - tree = cET.ElementTree(xml_data) - tree.write( - file, xml_declaration=True, - method='xml', encoding='UTF-8' - ) + @staticmethod + def write_clip_data_to_file(fpath, xml_data): + try: + # save it as new file + tree = cET.ElementTree(xml_data) + tree.write( + fpath, xml_declaration=True, + method='xml', encoding='UTF-8' + ) + except IOError as error: + raise IOError( + "Not able to write data to file: {}".format(error)) from error From 1848513fb90943266c882a1fd55840e2c069f24d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 11:49:35 +0200 Subject: [PATCH 149/207] OP-3072 - check enablement as a first step for GDrive --- openpype/modules/sync_server/providers/gdrive.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index b783f7958b..aa7329b104 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -73,6 +73,11 @@ class GDriveHandler(AbstractProvider): format(site_name)) return + if not self.presets["enabled"]: + log.debug("Sync Server: Site {} not enabled for {}.". + format(site_name, project_name)) + return + current_platform = platform.system().lower() cred_path = self.presets.get("credentials_url", {}). \ get(current_platform) or '' @@ -97,11 +102,10 @@ class GDriveHandler(AbstractProvider): return self.service = None - if self.presets["enabled"]: - self.service = self._get_gd_service(cred_path) + self.service = self._get_gd_service(cred_path) - self._tree = tree - self.active = True + self._tree = tree + self.active = True def is_active(self): """ From 753ee312a1f36985603e57b3488bd62cff15ad41 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 11:52:10 +0200 Subject: [PATCH 150/207] fixed imports in unreal creators --- openpype/hosts/unreal/plugins/create/create_camera.py | 8 +++----- openpype/hosts/unreal/plugins/create/create_layout.py | 8 +++----- openpype/hosts/unreal/plugins/create/create_look.py | 5 ++--- .../hosts/unreal/plugins/create/create_staticmeshfbx.py | 4 ++-- 4 files changed, 10 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index c2905fb6dd..2842900834 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -2,13 +2,11 @@ import unreal from unreal import EditorAssetLibrary as eal from unreal import EditorLevelLibrary as ell -from openpype.hosts.unreal.api.plugin import Creator -from avalon.unreal import ( - instantiate, -) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api.pipeline import instantiate -class CreateCamera(Creator): +class CreateCamera(plugin.Creator): """Layout output for character rigs""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_layout.py b/openpype/hosts/unreal/plugins/create/create_layout.py index 00e83cf433..751bece167 100644 --- a/openpype/hosts/unreal/plugins/create/create_layout.py +++ b/openpype/hosts/unreal/plugins/create/create_layout.py @@ -1,12 +1,10 @@ # -*- coding: utf-8 -*- from unreal import EditorLevelLibrary as ell -from openpype.hosts.unreal.api.plugin import Creator -from avalon.unreal import ( - instantiate, -) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api.pipeline import instantiate -class CreateLayout(Creator): +class CreateLayout(plugin.Creator): """Layout output for character rigs.""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 59c40d3e74..12f6b70ae6 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -1,11 +1,10 @@ # -*- coding: utf-8 -*- """Create look in Unreal.""" import unreal # noqa -from openpype.hosts.unreal.api.plugin import Creator -from openpype.hosts.unreal.api import pipeline +from openpype.hosts.unreal.api import pipeline, plugin -class CreateLook(Creator): +class CreateLook(plugin.Creator): """Shader connections defining shape look.""" name = "unrealLook" diff --git a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py index 700eac7366..601c2fae06 100644 --- a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- """Create Static Meshes as FBX geometry.""" import unreal # noqa -from openpype.hosts.unreal.api.plugin import Creator +from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api.pipeline import ( instantiate, ) -class CreateStaticMeshFBX(Creator): +class CreateStaticMeshFBX(plugin.Creator): """Static FBX geometry.""" name = "unrealStaticMeshMain" From 08e7d47cf80bddee85623f36e425114b23a02e38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 11:52:23 +0200 Subject: [PATCH 151/207] removed unused creator in pipeline.py --- openpype/hosts/unreal/api/pipeline.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 6d7a6ad1e2..f2c264e5a4 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -4,7 +4,6 @@ import logging from typing import List import pyblish.api -from avalon import api from openpype.pipeline import ( register_loader_plugin_path, @@ -76,30 +75,6 @@ def _register_events(): pass -class Creator(LegacyCreator): - hosts = ["unreal"] - asset_types = [] - - def process(self): - nodes = list() - - with unreal.ScopedEditorTransaction("OpenPype Creating Instance"): - if (self.options or {}).get("useSelection"): - self.log.info("setting ...") - print("settings ...") - nodes = unreal.EditorUtilityLibrary.get_selected_assets() - - asset_paths = [a.get_path_name() for a in nodes] - self.name = move_assets_to_path( - "/Game", self.name, asset_paths - ) - - instance = create_publish_instance("/Game", self.name) - imprint(instance, self.data) - - return instance - - def ls(): """List all containers. From fc49311937375908301845196e8677967476a7c1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 11:50:32 +0200 Subject: [PATCH 152/207] OP-3072 - check enablement as a first step for Dropbox --- .../modules/sync_server/providers/dropbox.py | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index f5910299e5..dfc42fed75 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -17,6 +17,7 @@ class DropboxHandler(AbstractProvider): self.active = False self.site_name = site_name self.presets = presets + self.dbx = None if not self.presets: log.info( @@ -24,6 +25,11 @@ class DropboxHandler(AbstractProvider): ) return + if not self.presets["enabled"]: + log.debug("Sync Server: Site {} not enabled for {}.". + format(site_name, project_name)) + return + token = self.presets.get("token", "") if not token: msg = "Sync Server: No access token for dropbox provider" @@ -44,16 +50,13 @@ class DropboxHandler(AbstractProvider): log.info(msg) return - self.dbx = None - - if self.presets["enabled"]: - try: - self.dbx = self._get_service( - token, acting_as_member, team_folder_name - ) - except Exception as e: - log.info("Could not establish dropbox object: {}".format(e)) - return + try: + self.dbx = self._get_service( + token, acting_as_member, team_folder_name + ) + except Exception as e: + log.info("Could not establish dropbox object: {}".format(e)) + return super(AbstractProvider, self).__init__() From adbd3593424fe9ea981705505038bcf89b9d81b9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 11:54:36 +0200 Subject: [PATCH 153/207] flame: otio removing reel clip dependency --- openpype/hosts/flame/otio/flame_export.py | 70 ++++------------------- 1 file changed, 11 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 3e76968963..25be310d01 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -261,24 +261,15 @@ def create_otio_markers(otio_item, item): otio_item.markers.append(otio_marker) -def create_otio_reference(clip_data): +def create_otio_reference(clip_data, fps=None): metadata = _get_metadata(clip_data) # get file info for path and start frame frame_start = 0 - fps = CTX.get_fps() + fps = fps or CTX.get_fps() path = clip_data["fpath"] - reel_clip = None - match_reel_clip = [ - clip for clip in CTX.clips - if clip["fpath"] == path - ] - if match_reel_clip: - reel_clip = match_reel_clip.pop() - fps = reel_clip["fps"] - file_name = os.path.basename(path) file_head, extension = os.path.splitext(file_name) @@ -342,16 +333,17 @@ def create_otio_reference(clip_data): def create_otio_clip(clip_data): segment = clip_data["PySegment"] - # create media reference - media_reference = create_otio_reference(clip_data) - # calculate source in media_info = MediaInfoFile(clip_data["fpath"]) - xml_timecode_ticks = media_info.out_feed_nb_ticks - if xml_timecode_ticks: - first_frame = int(xml_timecode_ticks) - else: - first_frame = utils.get_frame_from_filename(clip_data["fpath"]) or 0 + media_timecode_start = media_info.start_frame + media_fps = media_info.fps + + # create media reference + media_reference = create_otio_reference(clip_data, media_fps) + + # define first frame + first_frame = media_timecode_start or utils.get_frame_from_filename( + clip_data["fpath"]) or 0 source_in = int(clip_data["source_in"]) - int(first_frame) @@ -385,41 +377,6 @@ def create_otio_gap(gap_start, clip_start, tl_start_frame, fps): ) -def get_clips_in_reels(project): - output_clips = [] - project_desktop = project.current_workspace.desktop - - for reel_group in project_desktop.reel_groups: - for reel in reel_group.reels: - for clip in reel.clips: - clip_data = { - "PyClip": clip, - "fps": float(str(clip.frame_rate)[:-4]) - } - - attrs = [ - "name", "width", "height", - "ratio", "sample_rate", "bit_depth" - ] - - for attr in attrs: - val = getattr(clip, attr) - clip_data[attr] = val - - version = clip.versions[-1] - track = version.tracks[-1] - # each reel clip is also having one segment - for segment in track.segments: - segment_data = _get_segment_attributes( - segment, from_clip=True) - if segment_data: - clip_data.update(segment_data) - - output_clips.append(clip_data) - - return output_clips - - def _get_colourspace_policy(): output = {} @@ -579,11 +536,6 @@ def create_otio_timeline(sequence): log.info(sequence.attributes) CTX.project = get_current_flame_project() - CTX.clips = get_clips_in_reels(CTX.project) - - log.debug(pformat( - CTX.clips - )) # get current timeline CTX.set_fps( From 4797f584981bd5d07370f417ebe073c6cedbd76f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 11:59:34 +0200 Subject: [PATCH 154/207] flame: add todos for metadata feature --- openpype/hosts/flame/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index de3467aa76..e9e0130401 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -271,6 +271,7 @@ def rescan_hooks(): def get_metadata(project_name, _log=None): + # TODO: can be replaced by MediaInfoFile class method from adsk.libwiretapPythonClientAPI import ( WireTapClient, WireTapServerHandle, @@ -750,6 +751,8 @@ class MediaInfoFile: Attributes: str: `MEDIA_SCRIPT_PATH` path to flame binary logging.Logger: `log` logger + + TODO: add method for getting metadata to dict """ MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info" From 7f0b4710f2f1fba3f185e1ee47ec85fa03187662 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 12:13:47 +0200 Subject: [PATCH 155/207] Refactor - faster resolution of query --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index ddcf16a410..2c27571f9f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -850,7 +850,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): active_site = sync_settings["config"]["active_site"] # for Tray running background process - if active_site == get_local_site_id() and active_site not in sites: + if active_site not in sites and active_site == get_local_site_id(): sites.append(active_site) return sites From bfae95a8475981e014561c1eec8b9b54de5a8424 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 12:23:16 +0200 Subject: [PATCH 156/207] flame: OpenClipSolver inheriting from MediaInfoFile class --- openpype/hosts/flame/api/lib.py | 4 +- openpype/hosts/flame/api/plugin.py | 162 ++++++----------------------- 2 files changed, 36 insertions(+), 130 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index e9e0130401..91f5c26562 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -766,8 +766,8 @@ class MediaInfoFile: def __init__(self, path, **kwargs): # replace log if any - if kwargs.get("log"): - self.log = kwargs["log"] + if kwargs.get("logger"): + self.log = kwargs["logger"] # test if `dl_get_media_info` paht exists self._validate_media_script_path() diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index bd0f9f1a81..a23be946ba 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -679,53 +679,39 @@ class ClipLoader(LoaderPlugin): ] -# TODO: inheritance from flame.api.lib.MediaInfoFile -class OpenClipSolver: - media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" - tmp_name = "_tmp.clip" - tmp_file = None +class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False - out_feed_nb_ticks = None - out_feed_fps = None - out_feed_drop_mode = None - log = log def __init__(self, openclip_file_path, feed_data): - # test if media script paht exists - self._validate_media_script_path() + self.out_file = openclip_file_path # new feed variables: - feed_path = feed_data["path"] - self.feed_version_name = feed_data["version"] - self.feed_colorspace = feed_data.get("colorspace") + feed_path = feed_data.pop("path") + # initialize parent class + super(OpenClipSolver).__init__( + feed_path, + **feed_data + ) + + # get logger if any if feed_data.get("logger"): self.log = feed_data["logger"] + # get other metadata + self.feed_version_name = feed_data["version"] + self.feed_colorspace = feed_data.get("colorspace") + # derivate other feed variables self.feed_basename = os.path.basename(feed_path) self.feed_dir = os.path.dirname(feed_path) self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - if not self._is_valid_tmp_file(openclip_file_path): - # openclip does not exist yet and will be created - self.tmp_file = self.out_file = openclip_file_path + if not self._is_valid_tmp_file(self.out_file): self.create_new_clip = True - else: - # update already created clip - # output a temp file - self.out_file = openclip_file_path - self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) - - # remove previously generated temp files - # it will be regenerated - self._clear_tmp_file() - - self.log.info("Temp File: {}".format(self.tmp_file)) - def _is_valid_tmp_file(self, file): # check if file exists if os.path.isfile(file): @@ -740,7 +726,6 @@ class OpenClipSolver: return False def make(self): - self._generate_media_info_file() if self.create_new_clip: # New openClip @@ -748,58 +733,6 @@ class OpenClipSolver: else: self._update_open_clip() - def _validate_media_script_path(self): - if not os.path.isfile(self.media_script_path): - raise IOError("Media Scirpt does not exist: `{}`".format( - self.media_script_path)) - - def _generate_media_info_file(self): - # Create cmd arguments for gettig xml file info file - cmd_args = [ - self.media_script_path, - "-e", self.feed_ext, - "-o", self.tmp_file, - self.feed_dir - ] - - # execute creation of clip xml template data - try: - openpype.run_subprocess(cmd_args) - self._make_single_clip_media_info() - except TypeError: - self.log.error("Error creating self.tmp_file") - six.reraise(*sys.exc_info()) - - def _make_single_clip_media_info(self): - with open(self.tmp_file) as f: - lines = f.readlines() - _added_root = itertools.chain( - "", deepcopy(lines)[1:], "") - new_root = ET.fromstringlist(_added_root) - - # find the clip which is matching to my input name - xml_clips = new_root.findall("clip") - matching_clip = None - for xml_clip in xml_clips: - if xml_clip.find("name").text in self.feed_basename: - matching_clip = xml_clip - - if matching_clip is None: - # return warning there is missing clip - raise ET.ParseError( - "Missing clip in `{}`. Available clips {}".format( - self.feed_basename, [ - xml_clip.find("name").text - for xml_clip in xml_clips - ] - )) - - self._write_result_xml_to_file(self.tmp_file, matching_clip) - - def _clear_tmp_file(self): - if os.path.isfile(self.tmp_file): - os.remove(self.tmp_file) - def _clear_handler(self, xml_object): for handler in xml_object.findall("./handler"): self.log.debug("Handler found") @@ -808,9 +741,8 @@ class OpenClipSolver: def _create_new_open_clip(self): self.log.info("Building new openClip") - tmp_xml = ET.parse(self.tmp_file) - - tmp_xml_feeds = tmp_xml.find('tracks/track/feeds') + # clip data comming from MediaInfoFile + tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) for tmp_feed in tmp_xml_feeds: tmp_feed.set('vuid', self.feed_version_name) @@ -821,46 +753,47 @@ class OpenClipSolver: self._clear_handler(tmp_feed) - tmp_xml_versions_obj = tmp_xml.find('versions') + tmp_xml_versions_obj = self.clip_data.find('versions') tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) for xml_new_version in tmp_xml_versions_obj: xml_new_version.set('uid', self.feed_version_name) xml_new_version.set('type', 'version') - xml_data = self._fix_xml_data(tmp_xml) + xml_data = self._fix_xml_data(self.clip_data) self.log.info("Adding feed version: {}".format(self.feed_basename)) - self._write_result_xml_to_file(self.out_file, xml_data) - - self.log.info("openClip Updated: {}".format(self.tmp_file)) + self.write_clip_data_to_file(self.out_file, xml_data) def _update_open_clip(self): self.log.info("Updating openClip ..") out_xml = ET.parse(self.out_file) - tmp_xml = ET.parse(self.tmp_file) self.log.debug(">> out_xml: {}".format(out_xml)) - self.log.debug(">> tmp_xml: {}".format(tmp_xml)) + self.log.debug(">> self.clip_data: {}".format(self.clip_data)) # Get new feed from tmp file - tmp_xml_feed = tmp_xml.find('tracks/track/feeds/feed') + tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') self._clear_handler(tmp_xml_feed) - self._get_time_info_from_origin(out_xml) - if self.out_feed_fps: + # update fps from MediaInfoFile class + if self.fps: tmp_feed_fps_obj = tmp_xml_feed.find( "startTimecode/rate") - tmp_feed_fps_obj.text = self.out_feed_fps - if self.out_feed_nb_ticks: + tmp_feed_fps_obj.text = self.fps + + # update start_frame from MediaInfoFile class + if self.start_frame: tmp_feed_nb_ticks_obj = tmp_xml_feed.find( "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = self.out_feed_nb_ticks - if self.out_feed_drop_mode: + tmp_feed_nb_ticks_obj.text = self.start_frame + + # update drop_mode from MediaInfoFile class + if self.drop_mode: tmp_feed_drop_mode_obj = tmp_xml_feed.find( "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = self.out_feed_drop_mode + tmp_feed_drop_mode_obj.text = self.drop_mode new_path_obj = tmp_xml_feed.find( "spans/span/path") @@ -901,31 +834,10 @@ class OpenClipSolver: self.log.info("Adding feed version: {}".format( self.feed_version_name)) - self._write_result_xml_to_file(self.out_file, xml_data) + self.write_clip_data_to_file(self.out_file, xml_data) self.log.info("openClip Updated: {}".format(self.out_file)) - self._clear_tmp_file() - - def _get_time_info_from_origin(self, xml_data): - try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): - out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') - self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text - out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') - self.out_feed_fps = out_feed_fps_obj.text - out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') - self.out_feed_drop_mode = out_feed_drop_mode_obj.text - break - else: - continue - except Exception as msg: - self.log.warning(msg) - def _feed_exists(self, xml_data, path): # loop all available feed paths and check if # the path is not already in file @@ -940,12 +852,6 @@ class OpenClipSolver: self._clear_handler(xml_root) return xml_root - def _write_result_xml_to_file(self, file, xml_data): - # save it as new file - tree = cET.ElementTree(xml_data) - tree.write(file, xml_declaration=True, - method='xml', encoding='UTF-8') - def _create_openclip_backup_file(self, file): bck_file = "{}.bak".format(file) # if backup does not exist From 507f3615ab8f42f5664afcac01d339e0517afdf5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 12:24:04 +0200 Subject: [PATCH 157/207] Refactor - changed logic to loop through alt sites --- openpype/plugins/publish/integrate_new.py | 31 +++++++++++++---------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index ed1c02b825..3eca460ba3 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -8,6 +8,7 @@ import errno import six import re import shutil +from collections import deque from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne @@ -1199,21 +1200,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # transitive relationship, eg site is alternative to another which is # alternative to nex site - loop = True - while loop: - loop = False - for site_name, alt_sites in alt_site_pairs.items(): - for alt_site in alt_sites: - # safety against wrong config - # {"SFTP": {"alternative_site": "SFTP"} - if alt_site == site_name: - continue + for site_name, alt_sites in alt_site_pairs.items(): + sites_queue = deque(alt_sites) + while sites_queue: + alt_site = sites_queue.popleft() - for alt_alt_site in alt_site_pairs.get(alt_site, []): - if ( alt_alt_site != site_name - and alt_alt_site not in alt_sites): - alt_site_pairs[site_name].append(alt_alt_site) - loop = True + # safety against wrong config + # {"SFTP": {"alternative_site": "SFTP"} + if alt_site == site_name or alt_site not in alt_site_pairs: + continue + + for alt_alt_site in alt_site_pairs[alt_site]: + if ( + alt_alt_site != site_name + and alt_alt_site not in alt_sites + ): + alt_sites.append(alt_alt_site) + sites_queue.append(alt_alt_site) return alt_site_pairs From 72ecb6192a0ac609b4ad951dbd35712144b012a7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 12:52:27 +0200 Subject: [PATCH 158/207] flame: fixing flame compatibility and python2 --- openpype/hosts/flame/api/__init__.py | 1 - openpype/hosts/flame/api/lib.py | 6 +++--- openpype/hosts/flame/api/plugin.py | 5 +---- openpype/hosts/flame/otio/flame_export.py | 5 ++--- 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 6744a7ff11..2c461e5f16 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -57,7 +57,6 @@ from .plugin import ( PublishableClip, ClipLoader, OpenClipSolver - ) from .workio import ( open_file, diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 91f5c26562..6d93018fef 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -708,7 +708,7 @@ def maintained_temp_file_path(suffix=None): except IOError as _error: raise IOError( - "Not able to create temp json file: {}".format(_error)) from _error + "Not able to create temp json file: {}".format(_error)) finally: # Remove the temporary json @@ -860,7 +860,7 @@ class MediaInfoFile: openpype.run_subprocess(cmd_args) except TypeError as error: raise TypeError( - "Error creating `{}` due: {}".format(fpath, error)) from error + "Error creating `{}` due: {}".format(fpath, error)) def _make_single_clip_media_info(self, fpath): with open(fpath) as f: @@ -923,4 +923,4 @@ class MediaInfoFile: ) except IOError as error: raise IOError( - "Not able to write data to file: {}".format(error)) from error + "Not able to write data to file: {}".format(error)) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index a23be946ba..ab74bb4605 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -1,15 +1,11 @@ -import itertools import os import re import shutil -import sys -import xml.etree.cElementTree as cET from copy import deepcopy from xml.etree import ElementTree as ET import openpype.api as openpype import qargparse -import six from openpype import style from openpype.pipeline import LegacyCreator, LoaderPlugin from Qt import QtCore, QtWidgets @@ -740,6 +736,7 @@ class OpenClipSolver(flib.MediaInfoFile): def _create_new_open_clip(self): self.log.info("Building new openClip") + self.log.debug(">> self.clip_data: {}".format(self.clip_data)) # clip data comming from MediaInfoFile tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 25be310d01..0b9c9ce817 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -7,13 +7,10 @@ import json import logging import opentimelineio as otio from . import utils -from openpype.hosts.flame.api import MediaInfoFile import flame from pprint import pformat -reload(utils) # noqa - log = logging.getLogger(__name__) @@ -331,6 +328,8 @@ def create_otio_reference(clip_data, fps=None): def create_otio_clip(clip_data): + from openpype.hosts.flame.api import MediaInfoFile + segment = clip_data["PySegment"] # calculate source in From 9256e022909bb363d8e4cf4be532248363579d40 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 13:09:29 +0200 Subject: [PATCH 159/207] flame: setter getter error --- openpype/hosts/flame/api/lib.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 6d93018fef..d68dd2a886 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -704,7 +704,7 @@ def maintained_temp_file_path(suffix=None): # Store dumped json to temporary file temporary_file = tempfile.mktemp( suffix=_suffix, prefix="flame_maintained_") - yield temporary_file.name.replace("\\", "/") + yield temporary_file.replace("\\", "/") except IOError as _error: raise IOError( @@ -787,7 +787,7 @@ class MediaInfoFile: # get all time related data and assign them self._get_time_info_from_origin(xml_data) - self.set_clip_data(xml_data) + self.clip_data = xml_data @property def clip_data(self): @@ -895,17 +895,17 @@ class MediaInfoFile: # start frame out_feed_nb_ticks_obj = out_feed.find( 'startTimecode/nbTicks') - self.start_frame(out_feed_nb_ticks_obj.text) + self.start_frame = out_feed_nb_ticks_obj.text # fps out_feed_fps_obj = out_feed.find( 'startTimecode/rate') - self.fps(out_feed_fps_obj.text) + self.fps = out_feed_fps_obj.text # drop frame mode out_feed_drop_mode_obj = out_feed.find( 'startTimecode/dropMode') - self.drop_mode(out_feed_drop_mode_obj.text) + self.drop_mode = out_feed_drop_mode_obj.text break else: continue From 3f9b06139d44448416deae9d251dabffdcfb9506 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 14:13:43 +0200 Subject: [PATCH 160/207] flame: fix getroot --- openpype/hosts/flame/api/lib.py | 2 +- openpype/hosts/flame/api/plugin.py | 11 +++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index d68dd2a886..6ee0eb6a82 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -739,7 +739,7 @@ def get_batch_group_from_desktop(name): return bgroup -class MediaInfoFile: +class MediaInfoFile(object): """Class to get media info file clip data Raises: diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index ab74bb4605..6136c4922c 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -687,7 +687,7 @@ class OpenClipSolver(flib.MediaInfoFile): feed_path = feed_data.pop("path") # initialize parent class - super(OpenClipSolver).__init__( + super(OpenClipSolver, self).__init__( feed_path, **feed_data ) @@ -756,7 +756,7 @@ class OpenClipSolver(flib.MediaInfoFile): xml_new_version.set('uid', self.feed_version_name) xml_new_version.set('type', 'version') - xml_data = self._fix_xml_data(self.clip_data) + xml_data = self._clear_handler(self.clip_data) self.log.info("Adding feed version: {}".format(self.feed_basename)) self.write_clip_data_to_file(self.out_file, xml_data) @@ -823,7 +823,7 @@ class OpenClipSolver(flib.MediaInfoFile): "version", {"type": "version", "uid": self.feed_version_name}) out_xml_versions_obj.insert(0, new_version_obj) - xml_data = self._fix_xml_data(out_xml) + xml_data = self._clear_handler(out_xml) # fist create backup self._create_openclip_backup_file(self.out_file) @@ -844,11 +844,6 @@ class OpenClipSolver(flib.MediaInfoFile): "Not appending file as it already is in .clip file") return True - def _fix_xml_data(self, xml_data): - xml_root = xml_data.getroot() - self._clear_handler(xml_root) - return xml_root - def _create_openclip_backup_file(self, file): bck_file = "{}.bak".format(file) # if backup does not exist From 9a8c41371bc3c7a7c3be37e655021005547b9a30 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 15:12:16 +0200 Subject: [PATCH 161/207] use current project for query asset --- openpype/tools/utils/lib.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 422d0f5389..efaf671915 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -409,6 +409,7 @@ class FamilyConfigCache: project_name = os.environ.get("AVALON_PROJECT") asset_name = os.environ.get("AVALON_ASSET") task_name = os.environ.get("AVALON_TASK") + host_name = os.environ.get("AVALON_APP") if not all((project_name, asset_name, task_name)): return @@ -422,15 +423,18 @@ class FamilyConfigCache: ["family_filter_profiles"] ) if profiles: - asset_doc = self.dbcon.find_one( + # Make sure connection is installed + # - accessing attribute which does not have auto-install + self.dbcon.install() + asset_doc = self.dbcon.database[project_name].find_one( {"type": "asset", "name": asset_name}, {"data.tasks": True} - ) + ) or {} tasks_info = asset_doc.get("data", {}).get("tasks") or {} task_type = tasks_info.get(task_name, {}).get("type") profiles_filter = { "task_types": task_type, - "hosts": os.environ["AVALON_APP"] + "hosts": host_name } matching_item = filter_profiles(profiles, profiles_filter) From 8420a164c2c5e7537c584f30e1e3b8469417cacf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 15:13:05 +0200 Subject: [PATCH 162/207] flame: temp reverse commit bfae95a8475981e014561c1eec8b9b54de5a8424 --- openpype/hosts/flame/api/plugin.py | 176 +++++++++++++++++++++++------ 1 file changed, 139 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 6136c4922c..bd0f9f1a81 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -1,11 +1,15 @@ +import itertools import os import re import shutil +import sys +import xml.etree.cElementTree as cET from copy import deepcopy from xml.etree import ElementTree as ET import openpype.api as openpype import qargparse +import six from openpype import style from openpype.pipeline import LegacyCreator, LoaderPlugin from Qt import QtCore, QtWidgets @@ -675,39 +679,53 @@ class ClipLoader(LoaderPlugin): ] -class OpenClipSolver(flib.MediaInfoFile): +# TODO: inheritance from flame.api.lib.MediaInfoFile +class OpenClipSolver: + media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" + tmp_name = "_tmp.clip" + tmp_file = None create_new_clip = False + out_feed_nb_ticks = None + out_feed_fps = None + out_feed_drop_mode = None + log = log def __init__(self, openclip_file_path, feed_data): - self.out_file = openclip_file_path + # test if media script paht exists + self._validate_media_script_path() # new feed variables: - feed_path = feed_data.pop("path") - - # initialize parent class - super(OpenClipSolver, self).__init__( - feed_path, - **feed_data - ) - - # get logger if any - if feed_data.get("logger"): - self.log = feed_data["logger"] - - # get other metadata + feed_path = feed_data["path"] self.feed_version_name = feed_data["version"] self.feed_colorspace = feed_data.get("colorspace") + if feed_data.get("logger"): + self.log = feed_data["logger"] + # derivate other feed variables self.feed_basename = os.path.basename(feed_path) self.feed_dir = os.path.dirname(feed_path) self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - if not self._is_valid_tmp_file(self.out_file): + if not self._is_valid_tmp_file(openclip_file_path): + # openclip does not exist yet and will be created + self.tmp_file = self.out_file = openclip_file_path self.create_new_clip = True + else: + # update already created clip + # output a temp file + self.out_file = openclip_file_path + self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) + + # remove previously generated temp files + # it will be regenerated + self._clear_tmp_file() + + self.log.info("Temp File: {}".format(self.tmp_file)) + def _is_valid_tmp_file(self, file): # check if file exists if os.path.isfile(file): @@ -722,6 +740,7 @@ class OpenClipSolver(flib.MediaInfoFile): return False def make(self): + self._generate_media_info_file() if self.create_new_clip: # New openClip @@ -729,6 +748,58 @@ class OpenClipSolver(flib.MediaInfoFile): else: self._update_open_clip() + def _validate_media_script_path(self): + if not os.path.isfile(self.media_script_path): + raise IOError("Media Scirpt does not exist: `{}`".format( + self.media_script_path)) + + def _generate_media_info_file(self): + # Create cmd arguments for gettig xml file info file + cmd_args = [ + self.media_script_path, + "-e", self.feed_ext, + "-o", self.tmp_file, + self.feed_dir + ] + + # execute creation of clip xml template data + try: + openpype.run_subprocess(cmd_args) + self._make_single_clip_media_info() + except TypeError: + self.log.error("Error creating self.tmp_file") + six.reraise(*sys.exc_info()) + + def _make_single_clip_media_info(self): + with open(self.tmp_file) as f: + lines = f.readlines() + _added_root = itertools.chain( + "", deepcopy(lines)[1:], "") + new_root = ET.fromstringlist(_added_root) + + # find the clip which is matching to my input name + xml_clips = new_root.findall("clip") + matching_clip = None + for xml_clip in xml_clips: + if xml_clip.find("name").text in self.feed_basename: + matching_clip = xml_clip + + if matching_clip is None: + # return warning there is missing clip + raise ET.ParseError( + "Missing clip in `{}`. Available clips {}".format( + self.feed_basename, [ + xml_clip.find("name").text + for xml_clip in xml_clips + ] + )) + + self._write_result_xml_to_file(self.tmp_file, matching_clip) + + def _clear_tmp_file(self): + if os.path.isfile(self.tmp_file): + os.remove(self.tmp_file) + def _clear_handler(self, xml_object): for handler in xml_object.findall("./handler"): self.log.debug("Handler found") @@ -736,10 +807,10 @@ class OpenClipSolver(flib.MediaInfoFile): def _create_new_open_clip(self): self.log.info("Building new openClip") - self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # clip data comming from MediaInfoFile - tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') + tmp_xml = ET.parse(self.tmp_file) + + tmp_xml_feeds = tmp_xml.find('tracks/track/feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) for tmp_feed in tmp_xml_feeds: tmp_feed.set('vuid', self.feed_version_name) @@ -750,47 +821,46 @@ class OpenClipSolver(flib.MediaInfoFile): self._clear_handler(tmp_feed) - tmp_xml_versions_obj = self.clip_data.find('versions') + tmp_xml_versions_obj = tmp_xml.find('versions') tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) for xml_new_version in tmp_xml_versions_obj: xml_new_version.set('uid', self.feed_version_name) xml_new_version.set('type', 'version') - xml_data = self._clear_handler(self.clip_data) + xml_data = self._fix_xml_data(tmp_xml) self.log.info("Adding feed version: {}".format(self.feed_basename)) - self.write_clip_data_to_file(self.out_file, xml_data) + self._write_result_xml_to_file(self.out_file, xml_data) + + self.log.info("openClip Updated: {}".format(self.tmp_file)) def _update_open_clip(self): self.log.info("Updating openClip ..") out_xml = ET.parse(self.out_file) + tmp_xml = ET.parse(self.tmp_file) self.log.debug(">> out_xml: {}".format(out_xml)) - self.log.debug(">> self.clip_data: {}".format(self.clip_data)) + self.log.debug(">> tmp_xml: {}".format(tmp_xml)) # Get new feed from tmp file - tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') + tmp_xml_feed = tmp_xml.find('tracks/track/feeds/feed') self._clear_handler(tmp_xml_feed) + self._get_time_info_from_origin(out_xml) - # update fps from MediaInfoFile class - if self.fps: + if self.out_feed_fps: tmp_feed_fps_obj = tmp_xml_feed.find( "startTimecode/rate") - tmp_feed_fps_obj.text = self.fps - - # update start_frame from MediaInfoFile class - if self.start_frame: + tmp_feed_fps_obj.text = self.out_feed_fps + if self.out_feed_nb_ticks: tmp_feed_nb_ticks_obj = tmp_xml_feed.find( "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = self.start_frame - - # update drop_mode from MediaInfoFile class - if self.drop_mode: + tmp_feed_nb_ticks_obj.text = self.out_feed_nb_ticks + if self.out_feed_drop_mode: tmp_feed_drop_mode_obj = tmp_xml_feed.find( "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = self.drop_mode + tmp_feed_drop_mode_obj.text = self.out_feed_drop_mode new_path_obj = tmp_xml_feed.find( "spans/span/path") @@ -823,7 +893,7 @@ class OpenClipSolver(flib.MediaInfoFile): "version", {"type": "version", "uid": self.feed_version_name}) out_xml_versions_obj.insert(0, new_version_obj) - xml_data = self._clear_handler(out_xml) + xml_data = self._fix_xml_data(out_xml) # fist create backup self._create_openclip_backup_file(self.out_file) @@ -831,10 +901,31 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.info("Adding feed version: {}".format( self.feed_version_name)) - self.write_clip_data_to_file(self.out_file, xml_data) + self._write_result_xml_to_file(self.out_file, xml_data) self.log.info("openClip Updated: {}".format(self.out_file)) + self._clear_tmp_file() + + def _get_time_info_from_origin(self, xml_data): + try: + for out_track in xml_data.iter('track'): + for out_feed in out_track.iter('feed'): + out_feed_nb_ticks_obj = out_feed.find( + 'startTimecode/nbTicks') + self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text + out_feed_fps_obj = out_feed.find( + 'startTimecode/rate') + self.out_feed_fps = out_feed_fps_obj.text + out_feed_drop_mode_obj = out_feed.find( + 'startTimecode/dropMode') + self.out_feed_drop_mode = out_feed_drop_mode_obj.text + break + else: + continue + except Exception as msg: + self.log.warning(msg) + def _feed_exists(self, xml_data, path): # loop all available feed paths and check if # the path is not already in file @@ -844,6 +935,17 @@ class OpenClipSolver(flib.MediaInfoFile): "Not appending file as it already is in .clip file") return True + def _fix_xml_data(self, xml_data): + xml_root = xml_data.getroot() + self._clear_handler(xml_root) + return xml_root + + def _write_result_xml_to_file(self, file, xml_data): + # save it as new file + tree = cET.ElementTree(xml_data) + tree.write(file, xml_declaration=True, + method='xml', encoding='UTF-8') + def _create_openclip_backup_file(self, file): bck_file = "{}.bak".format(file) # if backup does not exist From 2a618081fdf9a6f81e6b63b8c020fefa84a30586 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 15:58:59 +0200 Subject: [PATCH 163/207] added settings for white list of environment variables --- .../settings/defaults/system_settings/general.json | 1 + .../entities/schemas/system_schema/schema_general.json | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index 5a3e39e5b6..e1785f8709 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -12,6 +12,7 @@ "linux": [], "darwin": [] }, + "local_env_white_list": [], "openpype_path": { "windows": [], "darwin": [], diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index 6306317df8..997404b2e6 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -110,6 +110,16 @@ { "type": "splitter" }, + { + "type": "list", + "key": "local_env_white_list", + "label": "White list of local environment variables", + "use_label_wrap": true, + "object_type": "text" + }, + { + "type": "splitter" + }, { "type": "collapsible-wrap", "label": "OpenPype deployment control", From 29dca65202d45a79e66c619b95d3408e227a9c05 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 16:59:34 +0200 Subject: [PATCH 164/207] Refactor - changed to defaultdict --- openpype/plugins/publish/integrate_new.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 3eca460ba3..5dcbb8fabd 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -8,7 +8,7 @@ import errno import six import re import shutil -from collections import deque +from collections import deque, defaultdict from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne @@ -1185,21 +1185,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: (dict): {'site': [alternative sites]...} """ - alt_site_pairs = {} + alt_site_pairs = defaultdict(list) for site_name, site_info in conf_sites.items(): alt_sites = set(site_info.get("alternative_sites", [])) - if not alt_site_pairs.get(site_name): - alt_site_pairs[site_name] = [] - alt_site_pairs[site_name].extend(alt_sites) for alt_site in alt_sites: - if not alt_site_pairs.get(alt_site): - alt_site_pairs[alt_site] = [] - alt_site_pairs[alt_site].extend([site_name]) + alt_site_pairs[alt_site].append(site_name) - # transitive relationship, eg site is alternative to another which is - # alternative to nex site for site_name, alt_sites in alt_site_pairs.items(): sites_queue = deque(alt_sites) while sites_queue: From 0ab101a5b12b755f07551d32203779103ee059f7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 17:11:16 +0200 Subject: [PATCH 165/207] added widgets for environments settings --- .../settings/local_settings/constants.py | 1 + .../local_settings/environments_widget.py | 80 +++++++++++++++++++ .../tools/settings/local_settings/window.py | 31 ++++++- 3 files changed, 110 insertions(+), 2 deletions(-) create mode 100644 openpype/tools/settings/local_settings/environments_widget.py diff --git a/openpype/tools/settings/local_settings/constants.py b/openpype/tools/settings/local_settings/constants.py index 1836c579af..16f87b6f05 100644 --- a/openpype/tools/settings/local_settings/constants.py +++ b/openpype/tools/settings/local_settings/constants.py @@ -9,6 +9,7 @@ LABEL_DISCARD_CHANGES = "Discard changes" # TODO move to settings constants LOCAL_GENERAL_KEY = "general" LOCAL_PROJECTS_KEY = "projects" +LOCAL_ENV_KEY = "environments" LOCAL_APPS_KEY = "applications" # Roots key constant diff --git a/openpype/tools/settings/local_settings/environments_widget.py b/openpype/tools/settings/local_settings/environments_widget.py new file mode 100644 index 0000000000..70631e8f29 --- /dev/null +++ b/openpype/tools/settings/local_settings/environments_widget.py @@ -0,0 +1,80 @@ +from Qt import QtWidgets + +from openpype.tools.utils import PlaceholderLineEdit + + +class LocalEnvironmentsWidgets(QtWidgets.QWidget): + def __init__(self, system_settings_entity, parent): + super(LocalEnvironmentsWidgets, self).__init__(parent) + + self._widgets_by_env_key = {} + self.system_settings_entity = system_settings_entity + + content_widget = QtWidgets.QWidget(self) + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setContentsMargins(0, 0, 0, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + + self._layout = layout + self._content_layout = content_layout + self._content_widget = content_widget + + def _clear_layout(self, layout): + while layout.count() > 0: + item = layout.itemAt(0) + widget = item.widget() + layout.removeItem(item) + if widget is not None: + widget.setVisible(False) + widget.deleteLater() + + def _reset_env_widgets(self): + self._clear_layout(self._content_layout) + self._clear_layout(self._layout) + + content_widget = QtWidgets.QWidget(self) + content_layout = QtWidgets.QGridLayout(content_widget) + content_layout.setContentsMargins(0, 0, 0, 0) + content_layout.setColumnStretch(0, 0) + content_layout.setColumnStretch(1, 1) + white_list_entity = ( + self.system_settings_entity["general"]["local_env_white_list"] + ) + + for row, item in enumerate(white_list_entity): + key = item.value + label_widget = QtWidgets.QLabel(key, self) + input_widget = PlaceholderLineEdit(self) + input_widget.setPlaceholderText("< Keep studio value >") + + content_layout.addWidget(label_widget, row, 0) + content_layout.addWidget(input_widget, row, 1) + + self._widgets_by_env_key[key] = input_widget + + self._layout.addWidget(content_widget, 1) + + self._content_layout = content_layout + self._content_widget = content_widget + + def update_local_settings(self, value): + if not value: + value = {} + + self._reset_env_widgets() + + for env_key, widget in self._widgets_by_env_key.items(): + env_value = value.get(env_key) or "" + widget.setText(env_value) + + def settings_value(self): + output = {} + for env_key, widget in self._widgets_by_env_key.items(): + value = widget.text() + if value: + output[env_key] = value + if not output: + return None + return output diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index fb47e69a17..4db0e01476 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -25,11 +25,13 @@ from .experimental_widget import ( LOCAL_EXPERIMENTAL_KEY ) from .apps_widget import LocalApplicationsWidgets +from .environments_widget import LocalEnvironmentsWidgets from .projects_widget import ProjectSettingsWidget from .constants import ( LOCAL_GENERAL_KEY, LOCAL_PROJECTS_KEY, + LOCAL_ENV_KEY, LOCAL_APPS_KEY ) @@ -49,18 +51,20 @@ class LocalSettingsWidget(QtWidgets.QWidget): self.pype_mongo_widget = None self.general_widget = None self.experimental_widget = None + self.envs_widget = None self.apps_widget = None self.projects_widget = None - self._create_pype_mongo_ui() + self._create_mongo_url_ui() self._create_general_ui() self._create_experimental_ui() + self._create_environments_ui() self._create_app_ui() self._create_project_ui() self.main_layout.addStretch(1) - def _create_pype_mongo_ui(self): + def _create_mongo_url_ui(self): pype_mongo_expand_widget = ExpandingWidget("OpenPype Mongo URL", self) pype_mongo_content = QtWidgets.QWidget(self) pype_mongo_layout = QtWidgets.QVBoxLayout(pype_mongo_content) @@ -110,6 +114,22 @@ class LocalSettingsWidget(QtWidgets.QWidget): self.experimental_widget = experimental_widget + def _create_environments_ui(self): + envs_expand_widget = ExpandingWidget("Environments", self) + envs_content = QtWidgets.QWidget(self) + envs_layout = QtWidgets.QVBoxLayout(envs_content) + envs_layout.setContentsMargins(CHILD_OFFSET, 5, 0, 0) + envs_expand_widget.set_content_widget(envs_content) + + envs_widget = LocalEnvironmentsWidgets( + self.system_settings, envs_content + ) + envs_layout.addWidget(envs_widget) + + self.main_layout.addWidget(envs_expand_widget) + + self.envs_widget = envs_widget + def _create_app_ui(self): # Applications app_expand_widget = ExpandingWidget("Applications", self) @@ -154,6 +174,9 @@ class LocalSettingsWidget(QtWidgets.QWidget): self.general_widget.update_local_settings( value.get(LOCAL_GENERAL_KEY) ) + self.envs_widget.update_local_settings( + value.get(LOCAL_ENV_KEY) + ) self.app_widget.update_local_settings( value.get(LOCAL_APPS_KEY) ) @@ -170,6 +193,10 @@ class LocalSettingsWidget(QtWidgets.QWidget): if general_value: output[LOCAL_GENERAL_KEY] = general_value + envs_value = self.envs_widget.settings_value() + if envs_value: + output[LOCAL_ENV_KEY] = envs_value + app_value = self.app_widget.settings_value() if app_value: output[LOCAL_APPS_KEY] = app_value From d7262446f82d6adc08650853c7649cf133d0895c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 17:11:48 +0200 Subject: [PATCH 166/207] use local settings to override environments during launching of application --- openpype/lib/applications.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 5821c863d7..049658a548 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -13,7 +13,8 @@ import six from openpype.settings import ( get_system_settings, - get_project_settings + get_project_settings, + get_local_settings ) from openpype.settings.constants import ( METADATA_KEYS, @@ -1272,6 +1273,9 @@ class EnvironmentPrepData(dict): if data.get("env") is None: data["env"] = os.environ.copy() + if "system_settings" not in data: + data["system_settings"] = get_system_settings() + super(EnvironmentPrepData, self).__init__(data) @@ -1434,6 +1438,19 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): ) ) + # Use environments from local settings + filtered_local_envs = {} + system_settings = data["system_settings"] + whitelist_envs = system_settings["general"].get("local_env_white_list") + if whitelist_envs: + local_settings = get_local_settings() + local_envs = local_settings.get("environments") or {} + filtered_local_envs = { + key: value + for key, value in local_envs.items() + if key in whitelist_envs + } + env_values = {} for _env_values in environments: if not _env_values: @@ -1441,6 +1458,10 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): # Choose right platform tool_env = parse_environments(_env_values, env_group) + for key, value in filtered_local_envs.items(): + if key in tool_env: + tool_env[key] = value + # Merge dictionaries env_values = _merge_env(tool_env, env_values) @@ -1611,7 +1632,6 @@ def _prepare_last_workfile(data, workdir): result will be stored. workdir (str): Path to folder where workfiles should be stored. """ - import avalon.api from openpype.pipeline import HOST_WORKFILE_EXTENSIONS log = data["log"] From 483d97f71b4655a6c0391b52eedf2b5952780a35 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 17:37:01 +0200 Subject: [PATCH 167/207] OP-3073 - fix removed wrong hardcoded family --- .../webpublisher/plugins/publish/collect_published_files.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 65cef14703..56b2ef6e20 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -209,7 +209,6 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): msg = "No family found for combination of " +\ "task_type: {}, is_sequence:{}, extension: {}".format( task_type, is_sequence, extension) - found_family = "render" assert found_family, msg return (found_family, From 99c3cb50330decc5cdb8666b74eb6a04dc7577fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 17:48:58 +0200 Subject: [PATCH 168/207] fix dicionary loop --- .../modules/ftrack/plugins/publish/integrate_ftrack_api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 7ebf807f55..650c59fae8 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -263,7 +263,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.info("Creating asset types with short names: {}".format( ", ".join(asset_type_names_by_missing_shorts.keys()) )) - for missing_short, type_name in asset_type_names_by_missing_shorts: + for missing_short, type_name in ( + asset_type_names_by_missing_shorts.items() + ): # Use short for name if name is not defined if not type_name: type_name = missing_short From d4182e5cea64f6374b2a8ef443215f678b0c40bd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 18:02:48 +0200 Subject: [PATCH 169/207] dev_test_plugin --- openpype/hosts/flame/api/test_plugin.py | 428 ++++++++++++++++++++++++ openpype/version.py | 9 +- 2 files changed, 436 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/flame/api/test_plugin.py diff --git a/openpype/hosts/flame/api/test_plugin.py b/openpype/hosts/flame/api/test_plugin.py new file mode 100644 index 0000000000..d75819a9e3 --- /dev/null +++ b/openpype/hosts/flame/api/test_plugin.py @@ -0,0 +1,428 @@ +import os +import tempfile +import itertools +import contextlib +import xml.etree.cElementTree as cET +from copy import deepcopy +import shutil +from xml.etree import ElementTree as ET + +import openpype.api as openpype + +import logging + +log = logging.getLogger(__name__) + + +@contextlib.contextmanager +def maintained_temp_file_path(suffix=None): + _suffix = suffix or "" + + try: + # Store dumped json to temporary file + temporary_file = tempfile.mktemp( + suffix=_suffix, prefix="flame_maintained_") + yield temporary_file.replace("\\", "/") + + except IOError as _error: + raise IOError( + "Not able to create temp json file: {}".format(_error)) + + finally: + # Remove the temporary json + os.remove(temporary_file) + + +class MediaInfoFile(object): + """Class to get media info file clip data + + Raises: + IOError: MEDIA_SCRIPT_PATH path doesn't exists + TypeError: Not able to generate clip xml data file + ET.ParseError: Missing clip in xml clip data + IOError: Not able to save xml clip data to file + + Attributes: + str: `MEDIA_SCRIPT_PATH` path to flame binary + logging.Logger: `log` logger + + TODO: add method for getting metadata to dict + """ + MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info" + + log = log + + _clip_data = None + _start_frame = None + _fps = None + _drop_mode = None + + def __init__(self, path, **kwargs): + + # replace log if any + if kwargs.get("logger"): + self.log = kwargs["logger"] + + # test if `dl_get_media_info` paht exists + self._validate_media_script_path() + + # derivate other feed variables + self.feed_basename = os.path.basename(path) + self.feed_dir = os.path.dirname(path) + self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() + + with maintained_temp_file_path(".clip") as tmp_path: + self.log.info("Temp File: {}".format(tmp_path)) + self._generate_media_info_file(tmp_path) + + # get clip data and make them single if there is multiple + # clips data + xml_data = self._make_single_clip_media_info(tmp_path) + self.log.info("xml_data: {}".format(xml_data)) + self.log.info("type: {}".format(type(xml_data))) + + # get all time related data and assign them + self._get_time_info_from_origin(xml_data) + self.log.info("start_frame: {}".format(self.start_frame)) + self.log.info("fps: {}".format(self.fps)) + self.log.info("drop frame: {}".format(self.drop_mode)) + self.clip_data = xml_data + + @property + def clip_data(self): + """Clip's xml clip data + + Returns: + xml.etree.ElementTree: xml data + """ + return self._clip_data + + @clip_data.setter + def clip_data(self, data): + self._clip_data = data + + @property + def start_frame(self): + """ Clip's starting frame found in timecode + + Returns: + int: number of frames + """ + return self._start_frame + + @start_frame.setter + def start_frame(self, number): + self._start_frame = int(number) + + @property + def fps(self): + """ Clip's frame rate + + Returns: + float: frame rate + """ + return self._fps + + @fps.setter + def fps(self, fl_number): + self._fps = float(fl_number) + + @property + def drop_mode(self): + """ Clip's drop frame mode + + Returns: + str: drop frame flag + """ + return self._drop_mode + + @drop_mode.setter + def drop_mode(self, text): + self._drop_mode = str(text) + + def _validate_media_script_path(self): + if not os.path.isfile(self.MEDIA_SCRIPT_PATH): + raise IOError("Media Scirpt does not exist: `{}`".format( + self.MEDIA_SCRIPT_PATH)) + + def _generate_media_info_file(self, fpath): + # Create cmd arguments for gettig xml file info file + cmd_args = [ + self.MEDIA_SCRIPT_PATH, + "-e", self.feed_ext, + "-o", fpath, + self.feed_dir + ] + + try: + # execute creation of clip xml template data + openpype.run_subprocess(cmd_args) + except TypeError as error: + raise TypeError( + "Error creating `{}` due: {}".format(fpath, error)) + + def _make_single_clip_media_info(self, fpath): + with open(fpath) as f: + lines = f.readlines() + _added_root = itertools.chain( + "", deepcopy(lines)[1:], "") + new_root = ET.fromstringlist(_added_root) + + # find the clip which is matching to my input name + xml_clips = new_root.findall("clip") + matching_clip = None + for xml_clip in xml_clips: + if xml_clip.find("name").text in self.feed_basename: + matching_clip = xml_clip + + if matching_clip is None: + # return warning there is missing clip + raise ET.ParseError( + "Missing clip in `{}`. Available clips {}".format( + self.feed_basename, [ + xml_clip.find("name").text + for xml_clip in xml_clips + ] + )) + + return matching_clip + + def _get_time_info_from_origin(self, xml_data): + try: + for out_track in xml_data.iter('track'): + for out_feed in out_track.iter('feed'): + # start frame + out_feed_nb_ticks_obj = out_feed.find( + 'startTimecode/nbTicks') + self.start_frame = out_feed_nb_ticks_obj.text + + # fps + out_feed_fps_obj = out_feed.find( + 'startTimecode/rate') + self.fps = out_feed_fps_obj.text + + # drop frame mode + out_feed_drop_mode_obj = out_feed.find( + 'startTimecode/dropMode') + self.drop_mode = out_feed_drop_mode_obj.text + break + else: + continue + except Exception as msg: + self.log.warning(msg) + + @staticmethod + def write_clip_data_to_file(fpath, xml_data): + log.info(">>> type of xml_data: {}".format(type(xml_data))) + if isinstance(xml_data, ET.ElementTree): + xml_data = xml_data.getroot() + try: + # save it as new file + tree = cET.ElementTree(xml_data) + tree.write( + fpath, xml_declaration=True, + method='xml', encoding='UTF-8' + ) + except IOError as error: + raise IOError( + "Not able to write data to file: {}".format(error)) + + +class OpenClipSolver(MediaInfoFile): + create_new_clip = False + + log = log + + def __init__(self, openclip_file_path, feed_data): + self.out_file = openclip_file_path + + # new feed variables: + feed_path = feed_data.pop("path") + + # initialize parent class + super(OpenClipSolver, self).__init__( + feed_path, + **feed_data + ) + + # get other metadata + self.feed_version_name = feed_data["version"] + self.feed_colorspace = feed_data.get("colorspace") + self.log.info("feed_version_name: {}".format(self.feed_version_name)) + + # derivate other feed variables + self.feed_basename = os.path.basename(feed_path) + self.feed_dir = os.path.dirname(feed_path) + self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() + self.log.info("feed_ext: {}".format(self.feed_ext)) + self.log.info("out_file: {}".format(self.out_file)) + if not self._is_valid_tmp_file(self.out_file): + self.create_new_clip = True + + def _is_valid_tmp_file(self, file): + # check if file exists + if os.path.isfile(file): + # test also if file is not empty + with open(file) as f: + lines = f.readlines() + if len(lines) > 2: + return True + + # file is probably corrupted + os.remove(file) + return False + + def make(self): + + if self.create_new_clip: + # New openClip + self._create_new_open_clip() + else: + self._update_open_clip() + + def _clear_handler(self, xml_object): + for handler in xml_object.findall("./handler"): + self.log.info("Handler found") + xml_object.remove(handler) + + def _create_new_open_clip(self): + self.log.info("Building new openClip") + self.log.info(">> self.clip_data: {}".format(self.clip_data)) + + # clip data comming from MediaInfoFile + tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') + tmp_xml_feeds.set('currentVersion', self.feed_version_name) + for tmp_feed in tmp_xml_feeds: + tmp_feed.set('vuid', self.feed_version_name) + + # add colorspace if any is set + if self.feed_colorspace: + self._add_colorspace(tmp_feed, self.feed_colorspace) + + self._clear_handler(tmp_feed) + + tmp_xml_versions_obj = self.clip_data.find('versions') + tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) + for xml_new_version in tmp_xml_versions_obj: + xml_new_version.set('uid', self.feed_version_name) + xml_new_version.set('type', 'version') + + self._clear_handler(self.clip_data) + self.log.info("Adding feed version: {}".format(self.feed_basename)) + + self.write_clip_data_to_file(self.out_file, self.clip_data) + + def _update_open_clip(self): + self.log.info("Updating openClip ..") + + out_xml = ET.parse(self.out_file) + + self.log.info(">> out_xml: {}".format(out_xml)) + self.log.info(">> self.clip_data: {}".format(self.clip_data)) + + # Get new feed from tmp file + tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') + + self._clear_handler(tmp_xml_feed) + + # update fps from MediaInfoFile class + if self.fps: + tmp_feed_fps_obj = tmp_xml_feed.find( + "startTimecode/rate") + tmp_feed_fps_obj.text = str(self.fps) + + # update start_frame from MediaInfoFile class + if self.start_frame: + tmp_feed_nb_ticks_obj = tmp_xml_feed.find( + "startTimecode/nbTicks") + tmp_feed_nb_ticks_obj.text = str(self.start_frame) + + # update drop_mode from MediaInfoFile class + if self.drop_mode: + tmp_feed_drop_mode_obj = tmp_xml_feed.find( + "startTimecode/dropMode") + tmp_feed_drop_mode_obj.text = str(self.drop_mode) + + new_path_obj = tmp_xml_feed.find( + "spans/span/path") + new_path = new_path_obj.text + + feed_added = False + if not self._feed_exists(out_xml, new_path): + tmp_xml_feed.set('vuid', self.feed_version_name) + # Append new temp file feed to .clip source out xml + out_track = out_xml.find("tracks/track") + # add colorspace if any is set + if self.feed_colorspace: + self._add_colorspace(tmp_xml_feed, self.feed_colorspace) + + out_feeds = out_track.find('feeds') + out_feeds.set('currentVersion', self.feed_version_name) + out_feeds.append(tmp_xml_feed) + + self.log.info( + "Appending new feed: {}".format( + self.feed_version_name)) + feed_added = True + + if feed_added: + # Append vUID to versions + out_xml_versions_obj = out_xml.find('versions') + out_xml_versions_obj.set( + 'currentVersion', self.feed_version_name) + new_version_obj = ET.Element( + "version", {"type": "version", "uid": self.feed_version_name}) + out_xml_versions_obj.insert(0, new_version_obj) + + self._clear_handler(out_xml) + + # fist create backup + self._create_openclip_backup_file(self.out_file) + + self.log.info("Adding feed version: {}".format( + self.feed_version_name)) + + self.write_clip_data_to_file(self.out_file, out_xml) + + self.log.info("openClip Updated: {}".format(self.out_file)) + + def _feed_exists(self, xml_data, path): + # loop all available feed paths and check if + # the path is not already in file + for src_path in xml_data.iter('path'): + if path == src_path.text: + self.log.warning( + "Not appending file as it already is in .clip file") + return True + + def _create_openclip_backup_file(self, file): + bck_file = "{}.bak".format(file) + # if backup does not exist + if not os.path.isfile(bck_file): + shutil.copy2(file, bck_file) + else: + # in case it exists and is already multiplied + created = False + for _i in range(1, 99): + bck_file = "{name}.bak.{idx:0>2}".format( + name=file, + idx=_i) + # create numbered backup file + if not os.path.isfile(bck_file): + shutil.copy2(file, bck_file) + created = True + break + # in case numbered does not exists + if not created: + bck_file = "{}.bak.last".format(file) + shutil.copy2(file, bck_file) + + def _add_colorspace(self, feed_obj, profile_name): + feed_storage_obj = feed_obj.find("storageFormat") + feed_clr_obj = feed_storage_obj.find("colourSpace") + if feed_clr_obj is not None: + feed_clr_obj = ET.Element( + "colourSpace", {"type": "string"}) + feed_storage_obj.append(feed_clr_obj) + + feed_clr_obj.text = profile_name diff --git a/openpype/version.py b/openpype/version.py index 97aa585ca7..d447d27172 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,10 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.3" +__version__ = "3.9.3-nightly.1-upp220408" + + +''' +includes: + - Flame: integrate batch groups: + https://github.com/pypeclub/OpenPype/pull/2928 +''' From 06d2e898654b9aca82c9a8c979938cc44ee1f766 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 18:03:30 +0200 Subject: [PATCH 170/207] testing file --- openpype/hosts/flame/tests/flame_test.py | 30 ++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 openpype/hosts/flame/tests/flame_test.py diff --git a/openpype/hosts/flame/tests/flame_test.py b/openpype/hosts/flame/tests/flame_test.py new file mode 100644 index 0000000000..402983eeba --- /dev/null +++ b/openpype/hosts/flame/tests/flame_test.py @@ -0,0 +1,30 @@ +from openpype.lib import import_filepath + +plugin = import_filepath( + "/Users/pype.club/code/openpype/openpype/hosts/flame/api/test_plugin.py") + +openclip_file_path = "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/test.clip" +# feed_datas = [ +# { +# "path": "/Users/pype.club/pype_club_root/OP02_VFX_demo/shots/a/a0000001/publish/plate/plateMain/v007/op02vfx_a0000001_plateMain_v007_exr16fpdwaaCl.0997.exr", +# "version": "v007" +# }, +# { +# "path": "/Users/pype.club/pype_club_root/OP02_VFX_demo/shots/a/a0000001/publish/plate/plateMain/v008/op02vfx_a0000001_plateMain_v008_exr16fpdwaaCl.0997.exr", +# "version": "v008" +# } +# ] + +feed_datas = [ + { + "path": "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/v001/file_name_v001.1001.exr", + "version": "v001" + }, + { + "path": "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/v002/file_name_v002.1001.exr", + "version": "v002" + } +] +for feed_data in feed_datas: + oclip = plugin.OpenClipSolver(openclip_file_path, feed_data) + oclip.make() From b9a7d810cc20949452a4e5e069497b08ca72f8ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 18:32:40 +0200 Subject: [PATCH 171/207] fix applying of env variables that are not defined by applications or tools --- openpype/lib/applications.py | 43 ++++++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 049658a548..07b91dda03 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1399,8 +1399,27 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): app = data["app"] log = data["log"] + source_env = data["env"].copy() - _add_python_version_paths(app, data["env"], log) + _add_python_version_paths(app, source_env, log) + + # Use environments from local settings + filtered_local_envs = {} + system_settings = data["system_settings"] + whitelist_envs = system_settings["general"].get("local_env_white_list") + if whitelist_envs: + local_settings = get_local_settings() + local_envs = local_settings.get("environments") or {} + filtered_local_envs = { + key: value + for key, value in local_envs.items() + if key in whitelist_envs + } + + # Apply local environment variables for already existing values + for key, value in filtered_local_envs.items(): + if key in source_env: + source_env[key] = value # `added_env_keys` has debug purpose added_env_keys = {app.group.name, app.name} @@ -1438,19 +1457,6 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): ) ) - # Use environments from local settings - filtered_local_envs = {} - system_settings = data["system_settings"] - whitelist_envs = system_settings["general"].get("local_env_white_list") - if whitelist_envs: - local_settings = get_local_settings() - local_envs = local_settings.get("environments") or {} - filtered_local_envs = { - key: value - for key, value in local_envs.items() - if key in whitelist_envs - } - env_values = {} for _env_values in environments: if not _env_values: @@ -1458,6 +1464,10 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): # Choose right platform tool_env = parse_environments(_env_values, env_group) + + # Apply local environment variables + # - must happen between all values because they may be used during + # merge for key, value in filtered_local_envs.items(): if key in tool_env: tool_env[key] = value @@ -1465,7 +1475,8 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): # Merge dictionaries env_values = _merge_env(tool_env, env_values) - merged_env = _merge_env(env_values, data["env"]) + merged_env = _merge_env(env_values, source_env) + loaded_env = acre.compute(merged_env, cleanup=False) final_env = None @@ -1485,7 +1496,7 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): if final_env is None: final_env = loaded_env - keys_to_remove = set(data["env"].keys()) - set(final_env.keys()) + keys_to_remove = set(source_env.keys()) - set(final_env.keys()) # Update env data["env"].update(final_env) From 2af6e7140ec391ace6dd310f7744e6da09b74583 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 18:51:55 +0200 Subject: [PATCH 172/207] changed label and added tooltip --- .../entities/schemas/system_schema/schema_general.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index 997404b2e6..fcab4cd5d8 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -113,7 +113,8 @@ { "type": "list", "key": "local_env_white_list", - "label": "White list of local environment variables", + "label": "Local overrides of environment variable keys", + "tooltip": "Environment variable keys that can be changed per machine using Local settings UI.\nKey changes are applied only on applications and tools environments.", "use_label_wrap": true, "object_type": "text" }, From 9893903a5728bea368f033889bd352dd1854b1e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 8 Apr 2022 18:54:55 +0200 Subject: [PATCH 173/207] add label when there are not env keys to set --- .../local_settings/environments_widget.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/tools/settings/local_settings/environments_widget.py b/openpype/tools/settings/local_settings/environments_widget.py index 70631e8f29..14ca517851 100644 --- a/openpype/tools/settings/local_settings/environments_widget.py +++ b/openpype/tools/settings/local_settings/environments_widget.py @@ -37,12 +37,10 @@ class LocalEnvironmentsWidgets(QtWidgets.QWidget): content_widget = QtWidgets.QWidget(self) content_layout = QtWidgets.QGridLayout(content_widget) content_layout.setContentsMargins(0, 0, 0, 0) - content_layout.setColumnStretch(0, 0) - content_layout.setColumnStretch(1, 1) white_list_entity = ( self.system_settings_entity["general"]["local_env_white_list"] ) - + row = -1 for row, item in enumerate(white_list_entity): key = item.value label_widget = QtWidgets.QLabel(key, self) @@ -54,6 +52,21 @@ class LocalEnvironmentsWidgets(QtWidgets.QWidget): self._widgets_by_env_key[key] = input_widget + if row < 0: + label_widget = QtWidgets.QLabel( + ( + "Your studio does not allow to change" + " Environment variables locally." + ), + self + ) + content_layout.addWidget(label_widget, 0, 0) + content_layout.setColumnStretch(0, 1) + + else: + content_layout.setColumnStretch(0, 0) + content_layout.setColumnStretch(1, 1) + self._layout.addWidget(content_widget, 1) self._content_layout = content_layout From a103eba505d9358c0c7058f614e36dd14b2edd4b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 19:03:54 +0200 Subject: [PATCH 174/207] flame: fixing OpenClipSolver --- openpype/hosts/flame/api/lib.py | 18 +- openpype/hosts/flame/api/plugin.py | 183 +++------- openpype/hosts/flame/api/test_plugin.py | 428 ----------------------- openpype/hosts/flame/tests/flame_test.py | 30 -- 4 files changed, 56 insertions(+), 603 deletions(-) delete mode 100644 openpype/hosts/flame/api/test_plugin.py delete mode 100644 openpype/hosts/flame/tests/flame_test.py diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 6ee0eb6a82..c7c444c1fb 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -784,9 +784,14 @@ class MediaInfoFile(object): # get clip data and make them single if there is multiple # clips data xml_data = self._make_single_clip_media_info(tmp_path) + self.log.debug("xml_data: {}".format(xml_data)) + self.log.debug("type: {}".format(type(xml_data))) # get all time related data and assign them self._get_time_info_from_origin(xml_data) + self.log.debug("start_frame: {}".format(self.start_frame)) + self.log.debug("fps: {}".format(self.fps)) + self.log.debug("drop frame: {}".format(self.drop_mode)) self.clip_data = xml_data @property @@ -913,10 +918,19 @@ class MediaInfoFile(object): self.log.warning(msg) @staticmethod - def write_clip_data_to_file(fpath, xml_data): + def write_clip_data_to_file(fpath, xml_element_data): + """ Write xml element of clip data to file + + Args: + fpath (string): file path + xml_element_data (xml.etree.ElementTree.Element): xml data + + Raises: + IOError: If data could not be written to file + """ try: # save it as new file - tree = cET.ElementTree(xml_data) + tree = cET.ElementTree(xml_element_data) tree.write( fpath, xml_declaration=True, method='xml', encoding='UTF-8' diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index bd0f9f1a81..42e6e19931 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -1,15 +1,11 @@ -import itertools import os import re import shutil -import sys -import xml.etree.cElementTree as cET from copy import deepcopy from xml.etree import ElementTree as ET import openpype.api as openpype import qargparse -import six from openpype import style from openpype.pipeline import LegacyCreator, LoaderPlugin from Qt import QtCore, QtWidgets @@ -658,8 +654,8 @@ class PublishableClip: # Publishing plugin functions -# Loader plugin functions +# Loader plugin functions class ClipLoader(LoaderPlugin): """A basic clip loader for Flame @@ -679,53 +675,37 @@ class ClipLoader(LoaderPlugin): ] -# TODO: inheritance from flame.api.lib.MediaInfoFile -class OpenClipSolver: - media_script_path = "/opt/Autodesk/mio/current/dl_get_media_info" - tmp_name = "_tmp.clip" - tmp_file = None +class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False - out_feed_nb_ticks = None - out_feed_fps = None - out_feed_drop_mode = None - log = log def __init__(self, openclip_file_path, feed_data): - # test if media script paht exists - self._validate_media_script_path() + self.out_file = openclip_file_path # new feed variables: - feed_path = feed_data["path"] + feed_path = feed_data.pop("path") + + # initialize parent class + super(OpenClipSolver, self).__init__( + feed_path, + **feed_data + ) + + # get other metadata self.feed_version_name = feed_data["version"] self.feed_colorspace = feed_data.get("colorspace") - - if feed_data.get("logger"): - self.log = feed_data["logger"] + self.log.debug("feed_version_name: {}".format(self.feed_version_name)) # derivate other feed variables self.feed_basename = os.path.basename(feed_path) self.feed_dir = os.path.dirname(feed_path) self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - - if not self._is_valid_tmp_file(openclip_file_path): - # openclip does not exist yet and will be created - self.tmp_file = self.out_file = openclip_file_path + self.log.debug("feed_ext: {}".format(self.feed_ext)) + self.log.debug("out_file: {}".format(self.out_file)) + if not self._is_valid_tmp_file(self.out_file): self.create_new_clip = True - else: - # update already created clip - # output a temp file - self.out_file = openclip_file_path - self.tmp_file = os.path.join(self.feed_dir, self.tmp_name) - - # remove previously generated temp files - # it will be regenerated - self._clear_tmp_file() - - self.log.info("Temp File: {}".format(self.tmp_file)) - def _is_valid_tmp_file(self, file): # check if file exists if os.path.isfile(file): @@ -740,7 +720,6 @@ class OpenClipSolver: return False def make(self): - self._generate_media_info_file() if self.create_new_clip: # New openClip @@ -748,69 +727,17 @@ class OpenClipSolver: else: self._update_open_clip() - def _validate_media_script_path(self): - if not os.path.isfile(self.media_script_path): - raise IOError("Media Scirpt does not exist: `{}`".format( - self.media_script_path)) - - def _generate_media_info_file(self): - # Create cmd arguments for gettig xml file info file - cmd_args = [ - self.media_script_path, - "-e", self.feed_ext, - "-o", self.tmp_file, - self.feed_dir - ] - - # execute creation of clip xml template data - try: - openpype.run_subprocess(cmd_args) - self._make_single_clip_media_info() - except TypeError: - self.log.error("Error creating self.tmp_file") - six.reraise(*sys.exc_info()) - - def _make_single_clip_media_info(self): - with open(self.tmp_file) as f: - lines = f.readlines() - _added_root = itertools.chain( - "", deepcopy(lines)[1:], "") - new_root = ET.fromstringlist(_added_root) - - # find the clip which is matching to my input name - xml_clips = new_root.findall("clip") - matching_clip = None - for xml_clip in xml_clips: - if xml_clip.find("name").text in self.feed_basename: - matching_clip = xml_clip - - if matching_clip is None: - # return warning there is missing clip - raise ET.ParseError( - "Missing clip in `{}`. Available clips {}".format( - self.feed_basename, [ - xml_clip.find("name").text - for xml_clip in xml_clips - ] - )) - - self._write_result_xml_to_file(self.tmp_file, matching_clip) - - def _clear_tmp_file(self): - if os.path.isfile(self.tmp_file): - os.remove(self.tmp_file) - def _clear_handler(self, xml_object): for handler in xml_object.findall("./handler"): - self.log.debug("Handler found") + self.log.info("Handler found") xml_object.remove(handler) def _create_new_open_clip(self): self.log.info("Building new openClip") + self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - tmp_xml = ET.parse(self.tmp_file) - - tmp_xml_feeds = tmp_xml.find('tracks/track/feeds') + # clip data comming from MediaInfoFile + tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) for tmp_feed in tmp_xml_feeds: tmp_feed.set('vuid', self.feed_version_name) @@ -821,46 +748,48 @@ class OpenClipSolver: self._clear_handler(tmp_feed) - tmp_xml_versions_obj = tmp_xml.find('versions') + tmp_xml_versions_obj = self.clip_data.find('versions') tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) for xml_new_version in tmp_xml_versions_obj: xml_new_version.set('uid', self.feed_version_name) xml_new_version.set('type', 'version') - xml_data = self._fix_xml_data(tmp_xml) + self._clear_handler(self.clip_data) self.log.info("Adding feed version: {}".format(self.feed_basename)) - self._write_result_xml_to_file(self.out_file, xml_data) - - self.log.info("openClip Updated: {}".format(self.tmp_file)) + self.write_clip_data_to_file(self.out_file, self.clip_data) def _update_open_clip(self): self.log.info("Updating openClip ..") out_xml = ET.parse(self.out_file) - tmp_xml = ET.parse(self.tmp_file) + out_xml = out_xml.getroot() self.log.debug(">> out_xml: {}".format(out_xml)) - self.log.debug(">> tmp_xml: {}".format(tmp_xml)) + self.log.debug(">> self.clip_data: {}".format(self.clip_data)) # Get new feed from tmp file - tmp_xml_feed = tmp_xml.find('tracks/track/feeds/feed') + tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') self._clear_handler(tmp_xml_feed) - self._get_time_info_from_origin(out_xml) - if self.out_feed_fps: + # update fps from MediaInfoFile class + if self.fps: tmp_feed_fps_obj = tmp_xml_feed.find( "startTimecode/rate") - tmp_feed_fps_obj.text = self.out_feed_fps - if self.out_feed_nb_ticks: + tmp_feed_fps_obj.text = str(self.fps) + + # update start_frame from MediaInfoFile class + if self.start_frame: tmp_feed_nb_ticks_obj = tmp_xml_feed.find( "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = self.out_feed_nb_ticks - if self.out_feed_drop_mode: + tmp_feed_nb_ticks_obj.text = str(self.start_frame) + + # update drop_mode from MediaInfoFile class + if self.drop_mode: tmp_feed_drop_mode_obj = tmp_xml_feed.find( "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = self.out_feed_drop_mode + tmp_feed_drop_mode_obj.text = str(self.drop_mode) new_path_obj = tmp_xml_feed.find( "spans/span/path") @@ -893,7 +822,7 @@ class OpenClipSolver: "version", {"type": "version", "uid": self.feed_version_name}) out_xml_versions_obj.insert(0, new_version_obj) - xml_data = self._fix_xml_data(out_xml) + self._clear_handler(out_xml) # fist create backup self._create_openclip_backup_file(self.out_file) @@ -901,30 +830,9 @@ class OpenClipSolver: self.log.info("Adding feed version: {}".format( self.feed_version_name)) - self._write_result_xml_to_file(self.out_file, xml_data) + self.write_clip_data_to_file(self.out_file, out_xml) - self.log.info("openClip Updated: {}".format(self.out_file)) - - self._clear_tmp_file() - - def _get_time_info_from_origin(self, xml_data): - try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): - out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') - self.out_feed_nb_ticks = out_feed_nb_ticks_obj.text - out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') - self.out_feed_fps = out_feed_fps_obj.text - out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') - self.out_feed_drop_mode = out_feed_drop_mode_obj.text - break - else: - continue - except Exception as msg: - self.log.warning(msg) + self.log.debug("OpenClip Updated: {}".format(self.out_file)) def _feed_exists(self, xml_data, path): # loop all available feed paths and check if @@ -935,17 +843,6 @@ class OpenClipSolver: "Not appending file as it already is in .clip file") return True - def _fix_xml_data(self, xml_data): - xml_root = xml_data.getroot() - self._clear_handler(xml_root) - return xml_root - - def _write_result_xml_to_file(self, file, xml_data): - # save it as new file - tree = cET.ElementTree(xml_data) - tree.write(file, xml_declaration=True, - method='xml', encoding='UTF-8') - def _create_openclip_backup_file(self, file): bck_file = "{}.bak".format(file) # if backup does not exist diff --git a/openpype/hosts/flame/api/test_plugin.py b/openpype/hosts/flame/api/test_plugin.py deleted file mode 100644 index d75819a9e3..0000000000 --- a/openpype/hosts/flame/api/test_plugin.py +++ /dev/null @@ -1,428 +0,0 @@ -import os -import tempfile -import itertools -import contextlib -import xml.etree.cElementTree as cET -from copy import deepcopy -import shutil -from xml.etree import ElementTree as ET - -import openpype.api as openpype - -import logging - -log = logging.getLogger(__name__) - - -@contextlib.contextmanager -def maintained_temp_file_path(suffix=None): - _suffix = suffix or "" - - try: - # Store dumped json to temporary file - temporary_file = tempfile.mktemp( - suffix=_suffix, prefix="flame_maintained_") - yield temporary_file.replace("\\", "/") - - except IOError as _error: - raise IOError( - "Not able to create temp json file: {}".format(_error)) - - finally: - # Remove the temporary json - os.remove(temporary_file) - - -class MediaInfoFile(object): - """Class to get media info file clip data - - Raises: - IOError: MEDIA_SCRIPT_PATH path doesn't exists - TypeError: Not able to generate clip xml data file - ET.ParseError: Missing clip in xml clip data - IOError: Not able to save xml clip data to file - - Attributes: - str: `MEDIA_SCRIPT_PATH` path to flame binary - logging.Logger: `log` logger - - TODO: add method for getting metadata to dict - """ - MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info" - - log = log - - _clip_data = None - _start_frame = None - _fps = None - _drop_mode = None - - def __init__(self, path, **kwargs): - - # replace log if any - if kwargs.get("logger"): - self.log = kwargs["logger"] - - # test if `dl_get_media_info` paht exists - self._validate_media_script_path() - - # derivate other feed variables - self.feed_basename = os.path.basename(path) - self.feed_dir = os.path.dirname(path) - self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - - with maintained_temp_file_path(".clip") as tmp_path: - self.log.info("Temp File: {}".format(tmp_path)) - self._generate_media_info_file(tmp_path) - - # get clip data and make them single if there is multiple - # clips data - xml_data = self._make_single_clip_media_info(tmp_path) - self.log.info("xml_data: {}".format(xml_data)) - self.log.info("type: {}".format(type(xml_data))) - - # get all time related data and assign them - self._get_time_info_from_origin(xml_data) - self.log.info("start_frame: {}".format(self.start_frame)) - self.log.info("fps: {}".format(self.fps)) - self.log.info("drop frame: {}".format(self.drop_mode)) - self.clip_data = xml_data - - @property - def clip_data(self): - """Clip's xml clip data - - Returns: - xml.etree.ElementTree: xml data - """ - return self._clip_data - - @clip_data.setter - def clip_data(self, data): - self._clip_data = data - - @property - def start_frame(self): - """ Clip's starting frame found in timecode - - Returns: - int: number of frames - """ - return self._start_frame - - @start_frame.setter - def start_frame(self, number): - self._start_frame = int(number) - - @property - def fps(self): - """ Clip's frame rate - - Returns: - float: frame rate - """ - return self._fps - - @fps.setter - def fps(self, fl_number): - self._fps = float(fl_number) - - @property - def drop_mode(self): - """ Clip's drop frame mode - - Returns: - str: drop frame flag - """ - return self._drop_mode - - @drop_mode.setter - def drop_mode(self, text): - self._drop_mode = str(text) - - def _validate_media_script_path(self): - if not os.path.isfile(self.MEDIA_SCRIPT_PATH): - raise IOError("Media Scirpt does not exist: `{}`".format( - self.MEDIA_SCRIPT_PATH)) - - def _generate_media_info_file(self, fpath): - # Create cmd arguments for gettig xml file info file - cmd_args = [ - self.MEDIA_SCRIPT_PATH, - "-e", self.feed_ext, - "-o", fpath, - self.feed_dir - ] - - try: - # execute creation of clip xml template data - openpype.run_subprocess(cmd_args) - except TypeError as error: - raise TypeError( - "Error creating `{}` due: {}".format(fpath, error)) - - def _make_single_clip_media_info(self, fpath): - with open(fpath) as f: - lines = f.readlines() - _added_root = itertools.chain( - "", deepcopy(lines)[1:], "") - new_root = ET.fromstringlist(_added_root) - - # find the clip which is matching to my input name - xml_clips = new_root.findall("clip") - matching_clip = None - for xml_clip in xml_clips: - if xml_clip.find("name").text in self.feed_basename: - matching_clip = xml_clip - - if matching_clip is None: - # return warning there is missing clip - raise ET.ParseError( - "Missing clip in `{}`. Available clips {}".format( - self.feed_basename, [ - xml_clip.find("name").text - for xml_clip in xml_clips - ] - )) - - return matching_clip - - def _get_time_info_from_origin(self, xml_data): - try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): - # start frame - out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') - self.start_frame = out_feed_nb_ticks_obj.text - - # fps - out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') - self.fps = out_feed_fps_obj.text - - # drop frame mode - out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') - self.drop_mode = out_feed_drop_mode_obj.text - break - else: - continue - except Exception as msg: - self.log.warning(msg) - - @staticmethod - def write_clip_data_to_file(fpath, xml_data): - log.info(">>> type of xml_data: {}".format(type(xml_data))) - if isinstance(xml_data, ET.ElementTree): - xml_data = xml_data.getroot() - try: - # save it as new file - tree = cET.ElementTree(xml_data) - tree.write( - fpath, xml_declaration=True, - method='xml', encoding='UTF-8' - ) - except IOError as error: - raise IOError( - "Not able to write data to file: {}".format(error)) - - -class OpenClipSolver(MediaInfoFile): - create_new_clip = False - - log = log - - def __init__(self, openclip_file_path, feed_data): - self.out_file = openclip_file_path - - # new feed variables: - feed_path = feed_data.pop("path") - - # initialize parent class - super(OpenClipSolver, self).__init__( - feed_path, - **feed_data - ) - - # get other metadata - self.feed_version_name = feed_data["version"] - self.feed_colorspace = feed_data.get("colorspace") - self.log.info("feed_version_name: {}".format(self.feed_version_name)) - - # derivate other feed variables - self.feed_basename = os.path.basename(feed_path) - self.feed_dir = os.path.dirname(feed_path) - self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower() - self.log.info("feed_ext: {}".format(self.feed_ext)) - self.log.info("out_file: {}".format(self.out_file)) - if not self._is_valid_tmp_file(self.out_file): - self.create_new_clip = True - - def _is_valid_tmp_file(self, file): - # check if file exists - if os.path.isfile(file): - # test also if file is not empty - with open(file) as f: - lines = f.readlines() - if len(lines) > 2: - return True - - # file is probably corrupted - os.remove(file) - return False - - def make(self): - - if self.create_new_clip: - # New openClip - self._create_new_open_clip() - else: - self._update_open_clip() - - def _clear_handler(self, xml_object): - for handler in xml_object.findall("./handler"): - self.log.info("Handler found") - xml_object.remove(handler) - - def _create_new_open_clip(self): - self.log.info("Building new openClip") - self.log.info(">> self.clip_data: {}".format(self.clip_data)) - - # clip data comming from MediaInfoFile - tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') - tmp_xml_feeds.set('currentVersion', self.feed_version_name) - for tmp_feed in tmp_xml_feeds: - tmp_feed.set('vuid', self.feed_version_name) - - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_feed, self.feed_colorspace) - - self._clear_handler(tmp_feed) - - tmp_xml_versions_obj = self.clip_data.find('versions') - tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) - for xml_new_version in tmp_xml_versions_obj: - xml_new_version.set('uid', self.feed_version_name) - xml_new_version.set('type', 'version') - - self._clear_handler(self.clip_data) - self.log.info("Adding feed version: {}".format(self.feed_basename)) - - self.write_clip_data_to_file(self.out_file, self.clip_data) - - def _update_open_clip(self): - self.log.info("Updating openClip ..") - - out_xml = ET.parse(self.out_file) - - self.log.info(">> out_xml: {}".format(out_xml)) - self.log.info(">> self.clip_data: {}".format(self.clip_data)) - - # Get new feed from tmp file - tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') - - self._clear_handler(tmp_xml_feed) - - # update fps from MediaInfoFile class - if self.fps: - tmp_feed_fps_obj = tmp_xml_feed.find( - "startTimecode/rate") - tmp_feed_fps_obj.text = str(self.fps) - - # update start_frame from MediaInfoFile class - if self.start_frame: - tmp_feed_nb_ticks_obj = tmp_xml_feed.find( - "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = str(self.start_frame) - - # update drop_mode from MediaInfoFile class - if self.drop_mode: - tmp_feed_drop_mode_obj = tmp_xml_feed.find( - "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = str(self.drop_mode) - - new_path_obj = tmp_xml_feed.find( - "spans/span/path") - new_path = new_path_obj.text - - feed_added = False - if not self._feed_exists(out_xml, new_path): - tmp_xml_feed.set('vuid', self.feed_version_name) - # Append new temp file feed to .clip source out xml - out_track = out_xml.find("tracks/track") - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_xml_feed, self.feed_colorspace) - - out_feeds = out_track.find('feeds') - out_feeds.set('currentVersion', self.feed_version_name) - out_feeds.append(tmp_xml_feed) - - self.log.info( - "Appending new feed: {}".format( - self.feed_version_name)) - feed_added = True - - if feed_added: - # Append vUID to versions - out_xml_versions_obj = out_xml.find('versions') - out_xml_versions_obj.set( - 'currentVersion', self.feed_version_name) - new_version_obj = ET.Element( - "version", {"type": "version", "uid": self.feed_version_name}) - out_xml_versions_obj.insert(0, new_version_obj) - - self._clear_handler(out_xml) - - # fist create backup - self._create_openclip_backup_file(self.out_file) - - self.log.info("Adding feed version: {}".format( - self.feed_version_name)) - - self.write_clip_data_to_file(self.out_file, out_xml) - - self.log.info("openClip Updated: {}".format(self.out_file)) - - def _feed_exists(self, xml_data, path): - # loop all available feed paths and check if - # the path is not already in file - for src_path in xml_data.iter('path'): - if path == src_path.text: - self.log.warning( - "Not appending file as it already is in .clip file") - return True - - def _create_openclip_backup_file(self, file): - bck_file = "{}.bak".format(file) - # if backup does not exist - if not os.path.isfile(bck_file): - shutil.copy2(file, bck_file) - else: - # in case it exists and is already multiplied - created = False - for _i in range(1, 99): - bck_file = "{name}.bak.{idx:0>2}".format( - name=file, - idx=_i) - # create numbered backup file - if not os.path.isfile(bck_file): - shutil.copy2(file, bck_file) - created = True - break - # in case numbered does not exists - if not created: - bck_file = "{}.bak.last".format(file) - shutil.copy2(file, bck_file) - - def _add_colorspace(self, feed_obj, profile_name): - feed_storage_obj = feed_obj.find("storageFormat") - feed_clr_obj = feed_storage_obj.find("colourSpace") - if feed_clr_obj is not None: - feed_clr_obj = ET.Element( - "colourSpace", {"type": "string"}) - feed_storage_obj.append(feed_clr_obj) - - feed_clr_obj.text = profile_name diff --git a/openpype/hosts/flame/tests/flame_test.py b/openpype/hosts/flame/tests/flame_test.py deleted file mode 100644 index 402983eeba..0000000000 --- a/openpype/hosts/flame/tests/flame_test.py +++ /dev/null @@ -1,30 +0,0 @@ -from openpype.lib import import_filepath - -plugin = import_filepath( - "/Users/pype.club/code/openpype/openpype/hosts/flame/api/test_plugin.py") - -openclip_file_path = "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/test.clip" -# feed_datas = [ -# { -# "path": "/Users/pype.club/pype_club_root/OP02_VFX_demo/shots/a/a0000001/publish/plate/plateMain/v007/op02vfx_a0000001_plateMain_v007_exr16fpdwaaCl.0997.exr", -# "version": "v007" -# }, -# { -# "path": "/Users/pype.club/pype_club_root/OP02_VFX_demo/shots/a/a0000001/publish/plate/plateMain/v008/op02vfx_a0000001_plateMain_v008_exr16fpdwaaCl.0997.exr", -# "version": "v008" -# } -# ] - -feed_datas = [ - { - "path": "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/v001/file_name_v001.1001.exr", - "version": "v001" - }, - { - "path": "/Users/pype.club/FLAME_STORAGE/test_shot_fps_float/v002/file_name_v002.1001.exr", - "version": "v002" - } -] -for feed_data in feed_datas: - oclip = plugin.OpenClipSolver(openclip_file_path, feed_data) - oclip.make() From bb222642fa295e46357f1a6cc364ac34b5f6bce1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 19:06:32 +0200 Subject: [PATCH 175/207] reversing version rename --- openpype/version.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/version.py b/openpype/version.py index d447d27172..97aa585ca7 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,10 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.3-nightly.1-upp220408" - - -''' -includes: - - Flame: integrate batch groups: - https://github.com/pypeclub/OpenPype/pull/2928 -''' +__version__ = "3.9.3" From c5683f50d44f99469cc6393c68c2047dd40efd49 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 8 Apr 2022 20:07:53 +0200 Subject: [PATCH 176/207] flame: fixing skip condition --- openpype/hosts/flame/otio/flame_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 0b9c9ce817..9f0bec62ea 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -558,7 +558,7 @@ def create_otio_timeline(sequence): len(track.segments) == 0 or track.hidden.get_value() ): - return None + continue # convert track to otio otio_track = create_otio_track( From 22918ac0d6c10a9ba827e0d2d8a94fa821e2f80c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 9 Apr 2022 03:39:12 +0000 Subject: [PATCH 177/207] [Automated] Bump version --- CHANGELOG.md | 38 ++++++++++++++++++++++++-------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f1e7d5d9e0..c216dd0595 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,27 @@ # Changelog +## [3.9.4-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.3...HEAD) + +### 📖 Documentation + +- Documentation: Python requirements to 3.7.9 [\#3035](https://github.com/pypeclub/OpenPype/pull/3035) +- Website Docs: Remove unused pages [\#2974](https://github.com/pypeclub/OpenPype/pull/2974) + +**🚀 Enhancements** + +- Resolve environment variable in google drive credential path [\#3008](https://github.com/pypeclub/OpenPype/pull/3008) + +**🐛 Bug fixes** + +- Ftrack: Integrate ftrack api fix [\#3044](https://github.com/pypeclub/OpenPype/pull/3044) +- Webpublisher - removed wrong hardcoded family [\#3043](https://github.com/pypeclub/OpenPype/pull/3043) +- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) + ## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.3-nightly.2...3.9.3) ### 📖 Documentation @@ -20,7 +39,6 @@ - Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) - Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) - Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) -- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) - Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) @@ -32,10 +50,12 @@ - Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) - AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) - General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) - Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) - Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) - Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) - Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) +- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) - Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) **Merged pull requests:** @@ -51,7 +71,6 @@ - Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999) - Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) -- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896) **🆕 New features** @@ -60,6 +79,7 @@ **🚀 Enhancements** +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) - Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) - General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) @@ -70,13 +90,11 @@ - TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) - Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) - General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) -- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) - Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) **🐛 Bug fixes** - Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004) -- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) - PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991) - Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990) - AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989) @@ -120,21 +138,13 @@ **🚀 Enhancements** +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) - General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) -- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) -- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) **🐛 Bug fixes** - General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) - Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) -- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) -- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) -- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) - -**🔀 Refactored code** - -- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) diff --git a/openpype/version.py b/openpype/version.py index 97aa585ca7..08dcbb5aed 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.3" +__version__ = "3.9.4-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 006f6eb4e5..adec7ab158 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.3" # OpenPype +version = "3.9.4-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 16e84073329b5dbc992dfadfd0818233f794cb0d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Apr 2022 10:50:34 +0200 Subject: [PATCH 178/207] flame: processing comments --- openpype/hosts/flame/api/plugin.py | 14 ++++++++------ openpype/hosts/flame/api/scripts/wiretap_com.py | 4 ++++ openpype/hosts/flame/otio/flame_export.py | 16 ++++++---------- .../plugins/publish/integrate_batch_group.py | 9 ++------- 4 files changed, 20 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 42e6e19931..c87445fdd3 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -4,11 +4,12 @@ import shutil from copy import deepcopy from xml.etree import ElementTree as ET +from Qt import QtCore, QtWidgets + import openpype.api as openpype import qargparse from openpype import style from openpype.pipeline import LegacyCreator, LoaderPlugin -from Qt import QtCore, QtWidgets from . import constants from . import lib as flib @@ -712,12 +713,13 @@ class OpenClipSolver(flib.MediaInfoFile): # test also if file is not empty with open(file) as f: lines = f.readlines() - if len(lines) > 2: - return True - # file is probably corrupted - os.remove(file) - return False + if len(lines) > 2: + return True + + # file is probably corrupted + os.remove(file) + return False def make(self): diff --git a/openpype/hosts/flame/api/scripts/wiretap_com.py b/openpype/hosts/flame/api/scripts/wiretap_com.py index f78102c0a1..4825ff4386 100644 --- a/openpype/hosts/flame/api/scripts/wiretap_com.py +++ b/openpype/hosts/flame/api/scripts/wiretap_com.py @@ -461,6 +461,10 @@ class WireTapCom(object): def _subprocess_preexec_fn(): + """ Helper function + + Setting permission mask to 0777 + """ os.setpgrp() os.umask(0o000) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 9f0bec62ea..4fe05ec1d8 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -455,15 +455,10 @@ def add_otio_metadata(otio_item, item, **kwargs): otio_item.metadata.update({key: value}) -def _get_shot_tokens_values(clip, tokens, from_clip=False): +def _get_shot_tokens_values(clip, tokens): old_value = None output = {} - # in case it is segment from reel clip - # avoiding duplicity of segement data - if from_clip: - return {} - old_value = clip.shot_name.get_value() for token in tokens: @@ -480,7 +475,7 @@ def _get_shot_tokens_values(clip, tokens, from_clip=False): return output -def _get_segment_attributes(segment, from_clip=False): +def _get_segment_attributes(segment): log.debug("Segment name|hidden: {}|{}".format( segment.name.get_value(), segment.hidden @@ -503,9 +498,10 @@ def _get_segment_attributes(segment, from_clip=False): } # add all available shot tokens - shot_tokens = _get_shot_tokens_values(segment, [ - "", "", "", "", - ], from_clip) + shot_tokens = _get_shot_tokens_values( + segment, + ["", "", "", ""] + ) clip_data.update(shot_tokens) # populate shot source metadata diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index cac99a25ac..3615f06a3d 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -299,10 +299,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): version_name = "v" version_padding = 3 - # return it as ordered dict - reutrn_dict = OrderedDict() # need to make sure the order of keys is correct - for item in ( + return OrderedDict( ("name", name), ("media_path", media_path), ("media_path_pattern", media_path_pattern), @@ -320,10 +318,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): ("version_mode", version_mode), ("version_name", version_name), ("version_padding", version_padding) - ): - reutrn_dict.update({item[0]: item[1]}) - - return reutrn_dict + ) def _get_shot_task_dir_path(self, instance, task_data): project_doc = instance.data["projectEntity"] From e8ffb06434b26b6f9245362dd71a457dcab828e8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Apr 2022 11:42:27 +0200 Subject: [PATCH 179/207] Fix publishing tvp workfile in webpublisher --- .../publish/collect_published_files.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 56b2ef6e20..8edaf4f67b 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -108,15 +108,18 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["representations"] = self._get_single_repre( task_dir, task_data["files"], tags ) - file_url = os.path.join(task_dir, task_data["files"][0]) - no_of_frames = self._get_number_of_frames(file_url) - if no_of_frames: + if family != 'workfile': + file_url = os.path.join(task_dir, task_data["files"][0]) try: - frame_end = int(frame_start) + math.ceil(no_of_frames) - instance.data["frameEnd"] = math.ceil(frame_end) - 1 - self.log.debug("frameEnd:: {}".format( - instance.data["frameEnd"])) - except ValueError: + no_of_frames = self._get_number_of_frames(file_url) + if no_of_frames: + frame_end = int(frame_start) + \ + math.ceil(no_of_frames) + frame_end = math.ceil(frame_end) - 1 + instance.data["frameEnd"] = frame_end + self.log.debug("frameEnd:: {}".format( + instance.data["frameEnd"])) + except Exception: self.log.warning("Unable to count frames " "duration {}".format(no_of_frames)) From a198b7f3f6ef04f48fe43165f7ea17d32134876d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 12:02:08 +0200 Subject: [PATCH 180/207] add format arguments to concatenation arguments --- .../plugins/publish/extract_review_slate.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 505ae75169..49f0eac41d 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -158,13 +158,15 @@ class ExtractReviewSlate(openpype.api.Extractor): ]) if use_legacy_code: + format_args = [] codec_args = repre["_profile"].get('codec', []) output_args.extend(codec_args) # preset's output data output_args.extend(repre["_profile"].get('output', [])) else: # Codecs are copied from source for whole input - codec_args = self._get_codec_args(repre) + format_args, codec_args = self._get_format_codec_args(repre) + output_args.extend(format_args) output_args.extend(codec_args) # make sure colors are correct @@ -266,8 +268,14 @@ class ExtractReviewSlate(openpype.api.Extractor): "-safe", "0", "-i", conc_text_path, "-c", "copy", - output_path ] + # NOTE: Added because of OP Atom demuxers + # Add format arguments if there are any + # - keep format of output + if format_args: + concat_args.extend(format_args) + # Add final output path + concat_args.append(output_path) # ffmpeg concat subprocess self.log.debug( @@ -338,7 +346,7 @@ class ExtractReviewSlate(openpype.api.Extractor): return vf_back - def _get_codec_args(self, repre): + def _get_format_codec_args(self, repre): """Detect possible codec arguments from representation.""" codec_args = [] @@ -361,13 +369,9 @@ class ExtractReviewSlate(openpype.api.Extractor): return codec_args source_ffmpeg_cmd = repre.get("ffmpeg_cmd") - codec_args.extend( - get_ffmpeg_format_args(ffprobe_data, source_ffmpeg_cmd) - ) - codec_args.extend( - get_ffmpeg_codec_args( - ffprobe_data, source_ffmpeg_cmd, logger=self.log - ) + format_args = get_ffmpeg_format_args(ffprobe_data, source_ffmpeg_cmd) + codec_args = get_ffmpeg_codec_args( + ffprobe_data, source_ffmpeg_cmd, logger=self.log ) - return codec_args + return format_args, codec_args From aefaac6e5cd4530185656077567e38586e0586b4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 14:49:58 +0200 Subject: [PATCH 181/207] added ability to change general environments --- openpype/settings/lib.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 54502292dc..937329b417 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -1113,6 +1113,14 @@ def get_general_environments(): clear_metadata_from_settings(environments) + whitelist_envs = result["general"].get("local_env_white_list") + if whitelist_envs: + local_settings = get_local_settings() + local_envs = local_settings.get("environments") or {} + for key, value in local_envs.items(): + if key in whitelist_envs and key in environments: + environments[key] = value + return environments From 2b65b2d4381be1776176678e644ba6e50b8f4272 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 17:11:00 +0200 Subject: [PATCH 182/207] don't check AOVs regex for multipart exrs --- .../deadline/plugins/publish/submit_publish_job.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index e5bda43d07..63f9e35720 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -536,15 +536,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # should be review made. # - "review" tag is never added when is set to 'False' if instance["useSequenceForReview"]: - render_file_name = list(collection[0]) - app = os.environ.get("AVALON_APP", "") - aov_patterns = self.aov_filter - # if filtered aov name is found in filename, toggle it for - # preview video rendering - preview = match_aov_pattern(app, aov_patterns, render_file_name) # toggle preview on if multipart is on if instance.get("multipartExr", False): preview = True + else: + render_file_name = list(collection[0]) + host_name = os.environ.get("AVALON_APP", "") + # if filtered aov name is found in filename, toggle it for + # preview video rendering + preview = match_aov_pattern( + host_name, self.aov_filter, render_file_name + ) staging = os.path.dirname(list(collection)[0]) success, rootless_staging_dir = ( From 7a40cc269239e522155692c4f4899f8bcc9f6cc4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 11 Apr 2022 17:19:36 +0200 Subject: [PATCH 183/207] fix orderdict --- openpype/hosts/flame/plugins/publish/integrate_batch_group.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index 3615f06a3d..da9553cc2a 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -300,7 +300,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): version_padding = 3 # need to make sure the order of keys is correct - return OrderedDict( + return OrderedDict(( ("name", name), ("media_path", media_path), ("media_path_pattern", media_path_pattern), @@ -318,7 +318,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): ("version_mode", version_mode), ("version_name", version_name), ("version_padding", version_padding) - ) + )) def _get_shot_task_dir_path(self, instance, task_data): project_doc = instance.data["projectEntity"] From 10ce2511f0be2a5627de0f1527e6128e66304dc1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 17:40:44 +0200 Subject: [PATCH 184/207] make sure temporary staging dir is removed --- openpype/plugins/publish/extract_review.py | 238 +++++++++++---------- 1 file changed, 127 insertions(+), 111 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 3ecea1f8bd..d569d82762 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -188,8 +188,7 @@ class ExtractReview(pyblish.api.InstancePlugin): outputs_per_repres = self._get_outputs_per_representations( instance, profile_outputs ) - fill_data = copy.deepcopy(instance.data["anatomyData"]) - for repre, outputs in outputs_per_repres: + for repre, outpu_defs in outputs_per_repres: # Check if input should be preconverted before processing # Store original staging dir (it's value may change) src_repre_staging_dir = repre["stagingDir"] @@ -241,126 +240,143 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log ) - for _output_def in outputs: - output_def = copy.deepcopy(_output_def) - # Make sure output definition has "tags" key - if "tags" not in output_def: - output_def["tags"] = [] - - if "burnins" not in output_def: - output_def["burnins"] = [] - - # Create copy of representation - new_repre = copy.deepcopy(repre) - # Make sure new representation has origin staging dir - # - this is because source representation may change - # it's staging dir because of ffmpeg conversion - new_repre["stagingDir"] = src_repre_staging_dir - - # Remove "delete" tag from new repre if there is - if "delete" in new_repre["tags"]: - new_repre["tags"].remove("delete") - - # Add additional tags from output definition to representation - for tag in output_def["tags"]: - if tag not in new_repre["tags"]: - new_repre["tags"].append(tag) - - # Add burnin link from output definition to representation - for burnin in output_def["burnins"]: - if burnin not in new_repre.get("burnins", []): - if not new_repre.get("burnins"): - new_repre["burnins"] = [] - new_repre["burnins"].append(str(burnin)) - - self.log.debug( - "Linked burnins: `{}`".format(new_repre.get("burnins")) + try: + self._render_output_definitions( + instance, repre, src_repre_staging_dir, outpu_defs ) - self.log.debug( - "New representation tags: `{}`".format( - new_repre.get("tags")) + finally: + # Make sure temporary staging is cleaned up and representation + # has set origin stagingDir + if do_convert: + # Set staging dir of source representation back to previous + # value + repre["stagingDir"] = src_repre_staging_dir + if os.path.exists(new_staging_dir): + shutil.rmtree(new_staging_dir) + + def _render_output_definitions( + self, instance, repre, src_repre_staging_dir, outpu_defs + ): + fill_data = copy.deepcopy(instance.data["anatomyData"]) + for _output_def in outpu_defs: + output_def = copy.deepcopy(_output_def) + # Make sure output definition has "tags" key + if "tags" not in output_def: + output_def["tags"] = [] + + if "burnins" not in output_def: + output_def["burnins"] = [] + + # Create copy of representation + new_repre = copy.deepcopy(repre) + # Make sure new representation has origin staging dir + # - this is because source representation may change + # it's staging dir because of ffmpeg conversion + new_repre["stagingDir"] = src_repre_staging_dir + + # Remove "delete" tag from new repre if there is + if "delete" in new_repre["tags"]: + new_repre["tags"].remove("delete") + + # Add additional tags from output definition to representation + for tag in output_def["tags"]: + if tag not in new_repre["tags"]: + new_repre["tags"].append(tag) + + # Add burnin link from output definition to representation + for burnin in output_def["burnins"]: + if burnin not in new_repre.get("burnins", []): + if not new_repre.get("burnins"): + new_repre["burnins"] = [] + new_repre["burnins"].append(str(burnin)) + + self.log.debug( + "Linked burnins: `{}`".format(new_repre.get("burnins")) + ) + + self.log.debug( + "New representation tags: `{}`".format( + new_repre.get("tags")) + ) + + temp_data = self.prepare_temp_data(instance, repre, output_def) + files_to_clean = [] + if temp_data["input_is_sequence"]: + self.log.info("Filling gaps in sequence.") + files_to_clean = self.fill_sequence_gaps( + temp_data["origin_repre"]["files"], + new_repre["stagingDir"], + temp_data["frame_start"], + temp_data["frame_end"]) + + # create or update outputName + output_name = new_repre.get("outputName", "") + output_ext = new_repre["ext"] + if output_name: + output_name += "_" + output_name += output_def["filename_suffix"] + if temp_data["without_handles"]: + output_name += "_noHandles" + + # add outputName to anatomy format fill_data + fill_data.update({ + "output": output_name, + "ext": output_ext + }) + + try: # temporary until oiiotool is supported cross platform + ffmpeg_args = self._ffmpeg_arguments( + output_def, instance, new_repre, temp_data, fill_data ) - - temp_data = self.prepare_temp_data( - instance, repre, output_def) - files_to_clean = [] - if temp_data["input_is_sequence"]: - self.log.info("Filling gaps in sequence.") - files_to_clean = self.fill_sequence_gaps( - temp_data["origin_repre"]["files"], - new_repre["stagingDir"], - temp_data["frame_start"], - temp_data["frame_end"]) - - # create or update outputName - output_name = new_repre.get("outputName", "") - output_ext = new_repre["ext"] - if output_name: - output_name += "_" - output_name += output_def["filename_suffix"] - if temp_data["without_handles"]: - output_name += "_noHandles" - - # add outputName to anatomy format fill_data - fill_data.update({ - "output": output_name, - "ext": output_ext - }) - - try: # temporary until oiiotool is supported cross platform - ffmpeg_args = self._ffmpeg_arguments( - output_def, instance, new_repre, temp_data, fill_data + except ZeroDivisionError: + # TODO recalculate width and height using OIIO before + # conversion + if 'exr' in temp_data["origin_repre"]["ext"]: + self.log.warning( + ( + "Unsupported compression on input files." + " Skipping!!!" + ), + exc_info=True ) - except ZeroDivisionError: - if 'exr' in temp_data["origin_repre"]["ext"]: - self.log.debug("Unsupported compression on input " + - "files. Skipping!!!") - return - raise NotImplementedError + return + raise NotImplementedError - subprcs_cmd = " ".join(ffmpeg_args) + subprcs_cmd = " ".join(ffmpeg_args) - # run subprocess - self.log.debug("Executing: {}".format(subprcs_cmd)) + # run subprocess + self.log.debug("Executing: {}".format(subprcs_cmd)) - openpype.api.run_subprocess( - subprcs_cmd, shell=True, logger=self.log - ) + openpype.api.run_subprocess( + subprcs_cmd, shell=True, logger=self.log + ) - # delete files added to fill gaps - if files_to_clean: - for f in files_to_clean: - os.unlink(f) + # delete files added to fill gaps + if files_to_clean: + for f in files_to_clean: + os.unlink(f) - new_repre.update({ - "name": "{}_{}".format(output_name, output_ext), - "outputName": output_name, - "outputDef": output_def, - "frameStartFtrack": temp_data["output_frame_start"], - "frameEndFtrack": temp_data["output_frame_end"], - "ffmpeg_cmd": subprcs_cmd - }) + new_repre.update({ + "name": "{}_{}".format(output_name, output_ext), + "outputName": output_name, + "outputDef": output_def, + "frameStartFtrack": temp_data["output_frame_start"], + "frameEndFtrack": temp_data["output_frame_end"], + "ffmpeg_cmd": subprcs_cmd + }) - # Force to pop these key if are in new repre - new_repre.pop("preview", None) - new_repre.pop("thumbnail", None) - if "clean_name" in new_repre.get("tags", []): - new_repre.pop("outputName") + # Force to pop these key if are in new repre + new_repre.pop("preview", None) + new_repre.pop("thumbnail", None) + if "clean_name" in new_repre.get("tags", []): + new_repre.pop("outputName") - # adding representation - self.log.debug( - "Adding new representation: {}".format(new_repre) - ) - instance.data["representations"].append(new_repre) - - # Cleanup temp staging dir after procesisng of output definitions - if do_convert: - temp_dir = repre["stagingDir"] - shutil.rmtree(temp_dir) - # Set staging dir of source representation back to previous - # value - repre["stagingDir"] = src_repre_staging_dir + # adding representation + self.log.debug( + "Adding new representation: {}".format(new_repre) + ) + instance.data["representations"].append(new_repre) def input_is_sequence(self, repre): """Deduce from representation data if input is sequence.""" From 1bd174c37557dbe06c3c65d1cc621491bf7b2947 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 17:50:27 +0200 Subject: [PATCH 185/207] simplified how to avoid adding attributes during oiio procesing --- openpype/lib/transcoding.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 8e79aba0ae..448c9eefe0 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -422,7 +422,12 @@ def convert_for_ffmpeg( compression = "none" # Prepare subprocess arguments - oiio_cmd = [get_oiio_tools_path()] + oiio_cmd = [ + get_oiio_tools_path(), + + # Don't add any additional attributes + "--nosoftwareattrib", + ] # Add input compression if available if compression: oiio_cmd.extend(["--compression", compression]) @@ -458,7 +463,6 @@ def convert_for_ffmpeg( "--frames", "{}-{}".format(input_frame_start, input_frame_end) ]) - ignore_attr_changes_added = False for attr_name, attr_value in input_info["attribs"].items(): if not isinstance(attr_value, str): continue @@ -466,10 +470,6 @@ def convert_for_ffmpeg( # Remove attributes that have string value longer than allowed length # for ffmpeg if len(attr_value) > MAX_FFMPEG_STRING_LEN: - if not ignore_attr_changes_added: - # Attrite changes won't be added to attributes itself - ignore_attr_changes_added = True - oiio_cmd.append("--sansattrib") # Set attribute to empty string logger.info(( "Removed attribute \"{}\" from metadata" From b40e5ba0b8bb697e15ff14e0f50ef45da61759c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 11 Apr 2022 17:50:53 +0200 Subject: [PATCH 186/207] added check for invalid characters in attribute value --- openpype/lib/transcoding.py | 38 +++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 448c9eefe0..c2fecf6628 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -17,6 +17,9 @@ from .vendor_bin_utils import ( # Max length of string that is supported by ffmpeg MAX_FFMPEG_STRING_LEN = 8196 +# Not allowed symbols in attributes for ffmpeg +NOT_ALLOWED_FFMPEG_CHARS = ("\"", ) + # OIIO known xml tags STRING_TAGS = { "format" @@ -367,11 +370,15 @@ def should_convert_for_ffmpeg(src_filepath): return None for attr_value in input_info["attribs"].values(): - if ( - isinstance(attr_value, str) - and len(attr_value) > MAX_FFMPEG_STRING_LEN - ): + if not isinstance(attr_value, str): + continue + + if len(attr_value) > MAX_FFMPEG_STRING_LEN: return True + + for char in NOT_ALLOWED_FFMPEG_CHARS: + if char in attr_value: + return True return False @@ -468,13 +475,28 @@ def convert_for_ffmpeg( continue # Remove attributes that have string value longer than allowed length - # for ffmpeg + # for ffmpeg or when containt unallowed symbols + erase_reason = "Missing reason" + erase_attribute = False if len(attr_value) > MAX_FFMPEG_STRING_LEN: + erase_reason = "has too long value ({} chars).".format( + len(attr_value) + ) + + if erase_attribute: + for char in NOT_ALLOWED_FFMPEG_CHARS: + if char in attr_value: + erase_attribute = True + erase_reason = ( + "contains unsupported character \"{}\"." + ).format(char) + break + + if erase_attribute: # Set attribute to empty string logger.info(( - "Removed attribute \"{}\" from metadata" - " because has too long value ({} chars)." - ).format(attr_name, len(attr_value))) + "Removed attribute \"{}\" from metadata because {}." + ).format(attr_name, erase_reason)) oiio_cmd.extend(["--eraseattrib", attr_name]) # Add last argument - path to output From 87fa3bae64f7d380eddac5c1bf2498f0c381ce32 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 11:44:34 +0200 Subject: [PATCH 187/207] OP-2005 - refactor yanked _requests_get, _requests_post into lib file --- .../maya/plugins/create/create_render.py | 79 ++--------------- .../maya/plugins/create/create_vrayscene.py | 86 +++---------------- .../plugins/publish/submit_maya_muster.py | 19 +--- .../publish/validate_muster_connection.py | 34 +------- openpype/lib/__init__.py | 9 ++ openpype/lib/connections.py | 38 ++++++++ .../plugins/publish/submit_maya_deadline.py | 47 ++-------- 7 files changed, 78 insertions(+), 234 deletions(-) create mode 100644 openpype/lib/connections.py diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4f0a394f85..2ded7c720d 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -4,8 +4,6 @@ import os import json import appdirs import requests -import six -import sys from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup @@ -14,6 +12,7 @@ from openpype.hosts.maya.api import ( lib, plugin ) +from openpype.lib import requests_get from openpype.api import ( get_system_settings, get_project_settings, @@ -117,6 +116,8 @@ class CreateRender(plugin.Creator): except KeyError: self.aov_separator = "_" + manager = ModulesManager() + self.deadline_module = manager.modules_by_name["deadline"] try: default_servers = deadline_settings["deadline_urls"] project_servers = ( @@ -133,10 +134,8 @@ class CreateRender(plugin.Creator): except AttributeError: # Handle situation were we had only one url for deadline. - manager = ModulesManager() - deadline_module = manager.modules_by_name["deadline"] # get default deadline webservice url from deadline module - self.deadline_servers = deadline_module.deadline_urls + self.deadline_servers = self.deadline_module.deadline_urls def process(self): """Entry point.""" @@ -211,7 +210,7 @@ class CreateRender(plugin.Creator): cmds.getAttr("{}.deadlineServers".format(self.instance)) ] ] - pools = self._get_deadline_pools(webservice) + pools = self.deadline_module.get_deadline_pools(webservice, self.log) cmds.deleteAttr("{}.primaryPool".format(self.instance)) cmds.deleteAttr("{}.secondaryPool".format(self.instance)) cmds.addAttr(self.instance, longName="primaryPool", @@ -221,33 +220,6 @@ class CreateRender(plugin.Creator): attributeType="enum", enumName=":".join(["-"] + pools)) - def _get_deadline_pools(self, webservice): - # type: (str) -> list - """Get pools from Deadline. - Args: - webservice (str): Server url. - Returns: - list: Pools. - Throws: - RuntimeError: If deadline webservice is unreachable. - - """ - argument = "{}/api/pools?NamesOnly=true".format(webservice) - try: - response = self._requests_get(argument) - except requests.exceptions.ConnectionError as exc: - msg = 'Cannot connect to deadline web service' - self.log.error(msg) - six.reraise( - RuntimeError, - RuntimeError('{} - {}'.format(msg, exc)), - sys.exc_info()[2]) - if not response.ok: - self.log.warning("No pools retrieved") - return [] - - return response.json() - def _create_render_settings(self): """Create instance settings.""" # get pools @@ -295,7 +267,8 @@ class CreateRender(plugin.Creator): # use first one for initial list of pools. deadline_url = next(iter(self.deadline_servers.values())) - pool_names = self._get_deadline_pools(deadline_url) + pool_names = self.deadline_module.get_deadline_pools(deadline_url, + self.log) maya_submit_dl = self._project_settings.get( "deadline", {}).get( "publish", {}).get( @@ -366,7 +339,7 @@ class CreateRender(plugin.Creator): """ params = {"authToken": self._token} api_entry = "/api/pools/list" - response = self._requests_get(self.MUSTER_REST_URL + api_entry, + response = requests_get(self.MUSTER_REST_URL + api_entry, params=params) if response.status_code != 200: if response.status_code == 401: @@ -392,45 +365,11 @@ class CreateRender(plugin.Creator): api_url = "{}/muster/show_login".format( os.environ["OPENPYPE_WEBSERVER_URL"]) self.log.debug(api_url) - login_response = self._requests_get(api_url, timeout=1) + login_response = requests_get(api_url, timeout=1) if login_response.status_code != 200: self.log.error("Cannot show login form to Muster") raise Exception("Cannot show login form to Muster") - def _requests_post(self, *args, **kwargs): - """Wrap request post method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - return requests.post(*args, **kwargs) - - def _requests_get(self, *args, **kwargs): - """Wrap request get method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - return requests.get(*args, **kwargs) - def _set_default_renderer_settings(self, renderer): """Set basic settings based on renderer. diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index fa9c59e016..98dfabbbcb 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -4,8 +4,6 @@ import os import json import appdirs import requests -import six -import sys from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup @@ -19,6 +17,7 @@ from openpype.api import ( get_project_settings ) +from openpype.lib import requests_get from openpype.pipeline import CreatorError from openpype.modules import ModulesManager @@ -40,6 +39,10 @@ class CreateVRayScene(plugin.Creator): self._rs = renderSetup.instance() self.data["exportOnFarm"] = False deadline_settings = get_system_settings()["modules"]["deadline"] + + manager = ModulesManager() + self.deadline_module = manager.modules_by_name["deadline"] + if not deadline_settings["enabled"]: self.deadline_servers = {} return @@ -62,10 +65,8 @@ class CreateVRayScene(plugin.Creator): except AttributeError: # Handle situation were we had only one url for deadline. - manager = ModulesManager() - deadline_module = manager.modules_by_name["deadline"] # get default deadline webservice url from deadline module - self.deadline_servers = deadline_module.deadline_urls + self.deadline_servers = self.deadline_module.deadline_urls def process(self): """Entry point.""" @@ -128,7 +129,7 @@ class CreateVRayScene(plugin.Creator): cmds.getAttr("{}.deadlineServers".format(self.instance)) ] ] - pools = self._get_deadline_pools(webservice) + pools = self.deadline_module.get_deadline_pools(webservice) cmds.deleteAttr("{}.primaryPool".format(self.instance)) cmds.deleteAttr("{}.secondaryPool".format(self.instance)) cmds.addAttr(self.instance, longName="primaryPool", @@ -138,33 +139,6 @@ class CreateVRayScene(plugin.Creator): attributeType="enum", enumName=":".join(["-"] + pools)) - def _get_deadline_pools(self, webservice): - # type: (str) -> list - """Get pools from Deadline. - Args: - webservice (str): Server url. - Returns: - list: Pools. - Throws: - RuntimeError: If deadline webservice is unreachable. - - """ - argument = "{}/api/pools?NamesOnly=true".format(webservice) - try: - response = self._requests_get(argument) - except requests.exceptions.ConnectionError as exc: - msg = 'Cannot connect to deadline web service' - self.log.error(msg) - six.reraise( - CreatorError, - CreatorError('{} - {}'.format(msg, exc)), - sys.exc_info()[2]) - if not response.ok: - self.log.warning("No pools retrieved") - return [] - - return response.json() - def _create_vray_instance_settings(self): # get pools pools = [] @@ -195,7 +169,7 @@ class CreateVRayScene(plugin.Creator): for k in self.deadline_servers.keys() ][0] - pool_names = self._get_deadline_pools(deadline_url) + pool_names = self.deadline_module.get_deadline_pools(deadline_url) if muster_enabled: self.log.info(">>> Loading Muster credentials ...") @@ -259,8 +233,8 @@ class CreateVRayScene(plugin.Creator): """ params = {"authToken": self._token} api_entry = "/api/pools/list" - response = self._requests_get(self.MUSTER_REST_URL + api_entry, - params=params) + response = requests_get(self.MUSTER_REST_URL + api_entry, + params=params) if response.status_code != 200: if response.status_code == 401: self.log.warning("Authentication token expired.") @@ -285,45 +259,7 @@ class CreateVRayScene(plugin.Creator): api_url = "{}/muster/show_login".format( os.environ["OPENPYPE_WEBSERVER_URL"]) self.log.debug(api_url) - login_response = self._requests_get(api_url, timeout=1) + login_response = requests_get(api_url, timeout=1) if login_response.status_code != 200: self.log.error("Cannot show login form to Muster") raise CreatorError("Cannot show login form to Muster") - - def _requests_post(self, *args, **kwargs): - """Wrap request post method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = ( - False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True - ) # noqa - return requests.post(*args, **kwargs) - - def _requests_get(self, *args, **kwargs): - """Wrap request get method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = ( - False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True - ) # noqa - return requests.get(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index f852904580..255ed96901 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -4,13 +4,13 @@ import getpass import platform import appdirs -import requests from maya import cmds from avalon import api import pyblish.api +from openpype.lib import requests_post from openpype.hosts.maya.api import lib from openpype.api import get_system_settings @@ -184,7 +184,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "select": "name" } api_entry = '/api/templates/list' - response = self._requests_post( + response = requests_post( self.MUSTER_REST_URL + api_entry, params=params) if response.status_code != 200: self.log.error( @@ -235,7 +235,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "name": "submit" } api_entry = '/api/queue/actions' - response = self._requests_post( + response = requests_post( self.MUSTER_REST_URL + api_entry, params=params, json=payload) if response.status_code != 200: @@ -549,16 +549,3 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): % (value, int(value)) ) - def _requests_post(self, *args, **kwargs): - """ Wrapper for requests, disabling SSL certificate validation if - DONT_VERIFY_SSL environment variable is found. This is useful when - Deadline or Muster server are running with self-signed certificates - and their certificate is not added to trusted certificates on - client machines. - - WARNING: disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - return requests.post(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py index af32c82f97..6dc7bd3bc4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py +++ b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py @@ -2,9 +2,9 @@ import os import json import appdirs -import requests import pyblish.api +from openpype.lib import requests_get from openpype.plugin import contextplugin_should_run import openpype.hosts.maya.api.action @@ -51,7 +51,7 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin): 'authToken': self._token } api_entry = '/api/pools/list' - response = self._requests_get( + response = requests_get( MUSTER_REST_URL + api_entry, params=params) assert response.status_code == 200, "invalid response from server" assert response.json()['ResponseData'], "invalid data in response" @@ -88,35 +88,7 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin): api_url = "{}/muster/show_login".format( os.environ["OPENPYPE_WEBSERVER_URL"]) cls.log.debug(api_url) - response = cls._requests_get(api_url, timeout=1) + response = requests_get(api_url, timeout=1) if response.status_code != 200: cls.log.error('Cannot show login form to Muster') raise Exception('Cannot show login form to Muster') - - def _requests_post(self, *args, **kwargs): - """ Wrapper for requests, disabling SSL certificate validation if - DONT_VERIFY_SSL environment variable is found. This is useful when - Deadline or Muster server are running with self-signed certificates - and their certificate is not added to trusted certificates on - client machines. - - WARNING: disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - return requests.post(*args, **kwargs) - - def _requests_get(self, *args, **kwargs): - """ Wrapper for requests, disabling SSL certificate validation if - DONT_VERIFY_SSL environment variable is found. This is useful when - Deadline or Muster server are running with self-signed certificates - and their certificate is not added to trusted certificates on - client machines. - - WARNING: disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - return requests.get(*args, **kwargs) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index e8b6d18f4e..b57e469f5b 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -221,6 +221,12 @@ from .openpype_version import ( is_current_version_higher_than_expected ) + +from .connections import ( + requests_get, + requests_post +) + terminal = Terminal __all__ = [ @@ -390,4 +396,7 @@ __all__ = [ "is_running_from_build", "is_running_staging", "is_current_version_studio_latest", + + "requests_get", + "requests_post" ] diff --git a/openpype/lib/connections.py b/openpype/lib/connections.py new file mode 100644 index 0000000000..91b745a4c1 --- /dev/null +++ b/openpype/lib/connections.py @@ -0,0 +1,38 @@ +import requests +import os + + +def requests_post(*args, **kwargs): + """Wrap request post method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if "verify" not in kwargs: + kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) + return requests.post(*args, **kwargs) + + +def requests_get(*args, **kwargs): + """Wrap request get method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if "verify" not in kwargs: + kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) + return requests.get(*args, **kwargs) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 34147712bc..02e89edd1e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -35,6 +35,7 @@ from maya import cmds from avalon import api import pyblish.api +from openpype.lib import requests_post from openpype.hosts.maya.api import lib # Documentation for keys available at: @@ -700,7 +701,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 for tile_job in frame_payloads: - response = self._requests_post(url, json=tile_job) + response = requests_post(url, json=tile_job) if not response.ok: raise Exception(response.text) @@ -763,7 +764,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): job_idx, len(assembly_payloads) )) self.log.debug(json.dumps(ass_job, indent=4, sort_keys=True)) - response = self._requests_post(url, json=ass_job) + response = requests_post(url, json=ass_job) if not response.ok: raise Exception(response.text) @@ -781,7 +782,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # E.g. http://192.168.0.1:8082/api/jobs url = "{}/api/jobs".format(self.deadline_url) - response = self._requests_post(url, json=payload) + response = requests_post(url, json=payload) if not response.ok: raise Exception(response.text) instance.data["deadlineSubmissionJob"] = response.json() @@ -989,7 +990,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.log.info("Submitting ass export job.") url = "{}/api/jobs".format(self.deadline_url) - response = self._requests_post(url, json=payload) + response = requests_post(url, json=payload) if not response.ok: self.log.error("Submition failed!") self.log.error(response.status_code) @@ -1013,44 +1014,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): % (value, int(value)) ) - def _requests_post(self, *args, **kwargs): - """Wrap request post method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if 'verify' not in kwargs: - kwargs['verify'] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - # add 10sec timeout before bailing out - kwargs['timeout'] = 10 - return requests.post(*args, **kwargs) - - def _requests_get(self, *args, **kwargs): - """Wrap request get method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline or Muster server are - running with self-signed certificates and their certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - - """ - if 'verify' not in kwargs: - kwargs['verify'] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - # add 10sec timeout before bailing out - kwargs['timeout'] = 10 - return requests.get(*args, **kwargs) - def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. From f637db72d3da547085c94a867f92e591c5969da7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 11:45:58 +0200 Subject: [PATCH 188/207] OP-2005 - added new collector and validator for Deadline pools Refactored old usage --- .../deadline/plugins/publish/collect_pools.py | 24 ++++++++++ .../publish/help/validate_deadline_pools.xml | 24 ++++++++++ .../publish/submit_aftereffects_deadline.py | 6 +-- .../publish/submit_harmony_deadline.py | 6 +-- .../publish/submit_houdini_render_deadline.py | 5 +- .../plugins/publish/submit_nuke_deadline.py | 6 +-- .../plugins/publish/submit_publish_job.py | 4 +- .../publish/validate_deadline_pools.py | 47 ++++++++++++++++++ .../defaults/project_settings/deadline.json | 10 ++-- .../schema_project_deadline.json | 48 +++++++------------ 10 files changed, 128 insertions(+), 52 deletions(-) create mode 100644 openpype/modules/deadline/plugins/publish/collect_pools.py create mode 100644 openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml create mode 100644 openpype/modules/deadline/plugins/publish/validate_deadline_pools.py diff --git a/openpype/modules/deadline/plugins/publish/collect_pools.py b/openpype/modules/deadline/plugins/publish/collect_pools.py new file mode 100644 index 0000000000..4f54cdf211 --- /dev/null +++ b/openpype/modules/deadline/plugins/publish/collect_pools.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +"""Collect Deadline pools. Choose default one from Settings + +""" +import pyblish.api + + +class CollectDeadlinePools(pyblish.api.InstancePlugin): + """Collect pools from Deadline, if set on instance use these.""" + + order = pyblish.api.CollectorOrder + 0.04 + label = "Deadline Webservice from the Instance" + families = ["rendering", "render.farm", "renderFarm"] + + primary_pool = None + secondary_pool = None + + def process(self, instance): + + if not instance.data.get("primaryPool"): + self.instance.data["primaryPool"] = self.primary_pool + + if not instance.data.get("secondaryPool"): + self.instance.data["secondaryPool"] = self.secondary_pool diff --git a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml new file mode 100644 index 0000000000..5478ce08e0 --- /dev/null +++ b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml @@ -0,0 +1,24 @@ + + + +Scene setting + +## Invalid Deadline pools found + +Configured pools don't match what is set in Deadline. + +{invalid_setting_str} + +### How to repair? + + If your instance had deadline pools set on creation, remove or change them.
+In other cases inform admin to change them in Settings. + + Available deadline pools {pools_str}. + +### __Detailed Info__ + +This error is shown when deadline pool is not on Deadline anymore. It could happen in case of republish old workfile which was created with previous deadline pools, + or someone changed pools on Deadline side, but didn't modify Openpype Settings. + +
\ No newline at end of file diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c499c14d40..1295d40654 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -37,8 +37,6 @@ class AfterEffectsSubmitDeadline( priority = 50 chunk_size = 1000000 - primary_pool = None - secondary_pool = None group = None department = None multiprocess = True @@ -62,8 +60,8 @@ class AfterEffectsSubmitDeadline( dln_job_info.Frames = frame_range dln_job_info.Priority = self.priority - dln_job_info.Pool = self.primary_pool - dln_job_info.SecondaryPool = self.secondary_pool + dln_job_info.Pool = self._instance.data.get("primaryPool") + dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") dln_job_info.Group = self.group dln_job_info.Department = self.department dln_job_info.ChunkSize = self.chunk_size diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 918efb6630..e320b6df4b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -241,8 +241,6 @@ class HarmonySubmitDeadline( optional = True use_published = False - primary_pool = "" - secondary_pool = "" priority = 50 chunk_size = 1000000 group = "none" @@ -259,8 +257,8 @@ class HarmonySubmitDeadline( # for now, get those from presets. Later on it should be # configurable in Harmony UI directly. job_info.Priority = self.priority - job_info.Pool = self.primary_pool - job_info.SecondaryPool = self.secondary_pool + job_info.Pool = self._instance.data.get("primaryPool") + job_info.SecondaryPool = self._instance.data.get("secondaryPool") job_info.ChunkSize = self.chunk_size job_info.BatchName = os.path.basename(self._instance.data["source"]) job_info.Department = self.department diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 59aeb68b79..82ff723e84 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -7,7 +7,7 @@ from avalon import api import pyblish.api -import hou +# import hou ??? class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): @@ -71,7 +71,8 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): "UserName": deadline_user, "Plugin": "Houdini", - "Pool": "houdini_redshift", # todo: remove hardcoded pool + "Pool": instance.data.get("primaryPool"), + "secondaryPool": instance.data.get("secondaryPool"), "Frames": frames, "ChunkSize": instance.data.get("chunkSize", 10), diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 9b5800c33f..2980193254 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -28,8 +28,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): priority = 50 chunk_size = 1 concurrent_tasks = 1 - primary_pool = "" - secondary_pool = "" group = "" department = "" limit_groups = {} @@ -187,8 +185,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Department": self.department, - "Pool": self.primary_pool, - "SecondaryPool": self.secondary_pool, + "Pool": instance.data.get("primaryPool"), + "SecondaryPool": instance.data.get("secondaryPool"), "Group": self.group, "Plugin": "Nuke", diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 3c4e0d2913..74592e4552 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -259,8 +259,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "Priority": priority, "Group": self.deadline_group, - "Pool": self.deadline_pool, - "SecondaryPool": self.deadline_pool_secondary, + "Pool": instance.data.get("primaryPool"), + "SecondaryPool": instance.data.get("secondaryPool"), "OutputDirectory0": output_dir }, diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py new file mode 100644 index 0000000000..147829456f --- /dev/null +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -0,0 +1,47 @@ +import pyblish.api + +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) +from openpype.modules.deadline.deadline_module import DeadlineModule + + +class ValidateDeadlinePools(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validate primaryPool and secondaryPool on instance. + + Values are on instance based on value insertion when Creating instance or + by Settings in CollectDeadlinePools. + """ + + label = "Validate Deadline Web Service" + order = pyblish.api.ValidatorOrder + families = ["rendering", "render.farm", "renderFarm"] + optional = True + + def process(self, instance): + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + self.log.info("deadline_url::{}".format(deadline_url)) + pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log) + self.log.info("pools::{}".format(pools)) + + formatting_data = { + "pools_str": ",".join(pools) + } + + primary_pool = instance.data.get("primaryPool") + if primary_pool and primary_pool not in pools: + msg = "Configured primary '{}' not present on Deadline".format( + instance.data["primaryPool"]) + + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) + + secondary_pool = instance.data.get("secondaryPool") + if secondary_pool and secondary_pool not in pools: + msg = "Configured secondary '{}' not present on Deadline".format( + instance.data["secondaryPool"]) + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 1ef169e387..ef017dd709 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -4,6 +4,10 @@ "CollectDefaultDeadlineServer": { "pass_mongo_url": false }, + "CollectDeadlinePools": { + "primary_pool": "", + "secondary_pool": "" + }, "ValidateExpectedFiles": { "enabled": true, "active": true, @@ -38,8 +42,6 @@ "priority": 50, "chunk_size": 10, "concurrent_tasks": 1, - "primary_pool": "", - "secondary_pool": "", "group": "", "department": "", "use_gpu": true, @@ -54,8 +56,6 @@ "use_published": true, "priority": 50, "chunk_size": 10000, - "primary_pool": "", - "secondary_pool": "", "group": "", "department": "" }, @@ -66,8 +66,6 @@ "use_published": true, "priority": 50, "chunk_size": 10000, - "primary_pool": "", - "secondary_pool": "", "group": "", "department": "", "multiprocess": true diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 5bf0a81a4d..cd1741ba8b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -30,6 +30,24 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectDeadlinePools", + "label": "Default Deadline Pools", + "children": [ + { + "type": "text", + "key": "primary_pool", + "label": "Primary Pool" + }, + { + "type": "text", + "key": "secondary_pool", + "label": "Secondary Pool" + } + ] + }, { "type": "dict", "collapsible": true, @@ -223,16 +241,6 @@ { "type": "splitter" }, - { - "type": "text", - "key": "primary_pool", - "label": "Primary Pool" - }, - { - "type": "text", - "key": "secondary_pool", - "label": "Secondary Pool" - }, { "type": "text", "key": "group", @@ -313,16 +321,6 @@ "key": "chunk_size", "label": "Chunk Size" }, - { - "type": "text", - "key": "primary_pool", - "label": "Primary Pool" - }, - { - "type": "text", - "key": "secondary_pool", - "label": "Secondary Pool" - }, { "type": "text", "key": "group", @@ -372,16 +370,6 @@ "key": "chunk_size", "label": "Chunk Size" }, - { - "type": "text", - "key": "primary_pool", - "label": "Primary Pool" - }, - { - "type": "text", - "key": "secondary_pool", - "label": "Secondary Pool" - }, { "type": "text", "key": "group", From 71d2185593eab6e619e5412763fdd479643b6448 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 11:47:06 +0200 Subject: [PATCH 189/207] OP-2005 - added new get_deadline_pools method --- openpype/modules/deadline/deadline_module.py | 46 ++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index 1a179e9aaf..0ac41ca874 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -1,8 +1,20 @@ import os +import requests +import six +import sys + + +from openpype.lib import requests_get from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths +class DeadlineWebserviceError(Exception): + """ + Exception to throw when connection to Deadline server fails. + """ + + class DeadlineModule(OpenPypeModule, IPluginPaths): name = "deadline" @@ -32,3 +44,37 @@ class DeadlineModule(OpenPypeModule, IPluginPaths): return { "publish": [os.path.join(current_dir, "plugins", "publish")] } + + @staticmethod + def get_deadline_pools(webservice, log=None): + # type: (str) -> list + """Get pools from Deadline. + Args: + webservice (str): Server url. + log (Logger) + Returns: + list: Pools. + Throws: + RuntimeError: If deadline webservice is unreachable. + + """ + if not log: + from openpype.lib import PypeLogger + + log = PypeLogger().get_logger(__name__) + + argument = "{}/api/pools?NamesOnly=true".format(webservice) + try: + response = requests_get(argument) + except requests.exceptions.ConnectionError as exc: + msg = 'Cannot connect to DL web service {}'.format(webservice) + log.error(msg) + six.reraise( + DeadlineWebserviceError, + DeadlineWebserviceError('{} - {}'.format(msg, exc)), + sys.exc_info()[2]) + if not response.ok: + log.warning("No pools retrieved") + return [] + + return response.json() From 92d30d5d19fb77d9fdaf3f0c288871dd080969eb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 12:48:26 +0200 Subject: [PATCH 190/207] OP-2005 - push through pool values to renderlayer instance --- openpype/hosts/maya/plugins/publish/collect_render.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index a525b562f3..5a8ea7efaf 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -342,6 +342,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "tileRendering": render_instance.data.get("tileRendering") or False, # noqa: E501 "tilesX": render_instance.data.get("tilesX") or 2, "tilesY": render_instance.data.get("tilesY") or 2, + "primaryPool": render_instance.data.get("primaryPool"), + "secondaryPool": render_instance.data.get("secondaryPool"), "priority": render_instance.data.get("priority"), "convertToScanline": render_instance.data.get( "convertToScanline") or False, From de9e762fc08d837c9a0bbb83107310dda1e2596f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 13:27:33 +0200 Subject: [PATCH 191/207] OP-2005 - get string values for pool instead enum index --- openpype/hosts/maya/plugins/publish/collect_render.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 5a8ea7efaf..0e4e27ab51 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -342,8 +342,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "tileRendering": render_instance.data.get("tileRendering") or False, # noqa: E501 "tilesX": render_instance.data.get("tilesX") or 2, "tilesY": render_instance.data.get("tilesY") or 2, - "primaryPool": render_instance.data.get("primaryPool"), - "secondaryPool": render_instance.data.get("secondaryPool"), "priority": render_instance.data.get("priority"), "convertToScanline": render_instance.data.get( "convertToScanline") or False, @@ -388,6 +386,12 @@ class CollectMayaRender(pyblish.api.ContextPlugin): overrides = self.parse_options(str(render_globals)) data.update(**overrides) + # get string values for pools + primary_pool = overrides["renderGlobals"]["Pool"] + secondary_pool = overrides["renderGlobals"]["SecondaryPool"] + data["primaryPool"] = primary_pool + data["secondaryPool"] = secondary_pool + # Define nice label label = "{0} ({1})".format(expected_layer_name, data["asset"]) label += " [{0}-{1}]".format( From 4a8fb100013d7a8b8b16b4a0452b6be634431f6c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 13:28:22 +0200 Subject: [PATCH 192/207] OP-2005 - fix content of validation xml --- .../publish/help/validate_deadline_pools.xml | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml index 5478ce08e0..0e7d72910e 100644 --- a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml +++ b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml @@ -1,24 +1,31 @@ - -Scene setting - -## Invalid Deadline pools found + + Scene setting + + ## Invalid Deadline pools found -Configured pools don't match what is set in Deadline. + Configured pools don't match what is set in Deadline. -{invalid_setting_str} + {invalid_value_str} -### How to repair? + ### How to repair? - If your instance had deadline pools set on creation, remove or change them.
-In other cases inform admin to change them in Settings. + If your instance had deadline pools set on creation, remove or + change them. - Available deadline pools {pools_str}. - -### __Detailed Info__ + In other cases inform admin to change them in Settings. -This error is shown when deadline pool is not on Deadline anymore. It could happen in case of republish old workfile which was created with previous deadline pools, - or someone changed pools on Deadline side, but didn't modify Openpype Settings. - + Available deadline pools {pools_str}. +
+ + ### __Detailed Info__ + + This error is shown when deadline pool is not on Deadline anymore. It + could happen in case of republish old workfile which was created with + previous deadline pools, + or someone changed pools on Deadline side, but didn't modify Openpype + Settings. + +
\ No newline at end of file From e9794d0367f1d14013f7005354735d5f3c4ce886 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 13:54:25 +0200 Subject: [PATCH 193/207] OP-2005 - refactor - moved deadline plugins later in order --- .../collect_deadline_server_from_instance.py | 2 +- .../publish/collect_default_deadline_server.py | 2 +- .../deadline/plugins/publish/collect_pools.py | 13 ++++++------- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index 1bc4eaa067..a7035cd99f 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -11,7 +11,7 @@ import pyblish.api class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): """Collect Deadline Webservice URL from instance.""" - order = pyblish.api.CollectorOrder + 0.02 + order = pyblish.api.CollectorOrder + 0.415 label = "Deadline Webservice from the Instance" families = ["rendering"] diff --git a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py b/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py index fc056342a8..e6ad6a9aa1 100644 --- a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py +++ b/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py @@ -6,7 +6,7 @@ import pyblish.api class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin): """Collect default Deadline Webservice URL.""" - order = pyblish.api.CollectorOrder + 0.01 + order = pyblish.api.CollectorOrder + 0.410 label = "Default Deadline Webservice" pass_mongo_url = False diff --git a/openpype/modules/deadline/plugins/publish/collect_pools.py b/openpype/modules/deadline/plugins/publish/collect_pools.py index 4f54cdf211..48130848d5 100644 --- a/openpype/modules/deadline/plugins/publish/collect_pools.py +++ b/openpype/modules/deadline/plugins/publish/collect_pools.py @@ -6,19 +6,18 @@ import pyblish.api class CollectDeadlinePools(pyblish.api.InstancePlugin): - """Collect pools from Deadline, if set on instance use these.""" + """Collect pools from instance if present, from Setting otherwise.""" - order = pyblish.api.CollectorOrder + 0.04 - label = "Deadline Webservice from the Instance" - families = ["rendering", "render.farm", "renderFarm"] + order = pyblish.api.CollectorOrder + 0.420 + label = "Collect Deadline Pools" + families = ["rendering", "render.farm", "renderFarm", "renderlayer"] primary_pool = None secondary_pool = None def process(self, instance): - if not instance.data.get("primaryPool"): - self.instance.data["primaryPool"] = self.primary_pool + instance.data["primaryPool"] = self.primary_pool or "none" if not instance.data.get("secondaryPool"): - self.instance.data["secondaryPool"] = self.secondary_pool + instance.data["secondaryPool"] = self.secondary_pool or "none" From 62c30d499bdd881b4054391601539a0e5177b7d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 13:55:50 +0200 Subject: [PATCH 194/207] OP-2005 - updated validation content --- .../deadline/plugins/publish/validate_deadline_pools.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py index 147829456f..e8f0d95ffb 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -15,9 +15,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, by Settings in CollectDeadlinePools. """ - label = "Validate Deadline Web Service" + label = "Validate Deadline Pools" order = pyblish.api.ValidatorOrder - families = ["rendering", "render.farm", "renderFarm"] + families = ["rendering", "render.farm", "renderFarm", "renderlayer"] optional = True def process(self, instance): @@ -35,7 +35,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, if primary_pool and primary_pool not in pools: msg = "Configured primary '{}' not present on Deadline".format( instance.data["primaryPool"]) - + formatting_data["invalid_value_str"] = msg raise PublishXmlValidationError(self, msg, formatting_data=formatting_data) @@ -43,5 +43,6 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, if secondary_pool and secondary_pool not in pools: msg = "Configured secondary '{}' not present on Deadline".format( instance.data["secondaryPool"]) + formatting_data["invalid_value_str"] = msg raise PublishXmlValidationError(self, msg, formatting_data=formatting_data) From aa78ddf5be8937dce5d1c9cb2762a8be84e49e27 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 13:57:15 +0200 Subject: [PATCH 195/207] OP-2005 - changed order of create render for AE It should be before DL plugins as they react on family (render.farm in this case). --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 2a4b773681..3e44acd7e9 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -25,7 +25,7 @@ class AERenderInstance(RenderInstance): class CollectAERender(abstract_collect_render.AbstractCollectRender): - order = pyblish.api.CollectorOrder + 0.498 + order = pyblish.api.CollectorOrder + 0.400 label = "Collect After Effects Render Layers" hosts = ["aftereffects"] From b4f8e28e4a4d6f26ba6b5c0cd79115105059dc11 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 14:06:06 +0200 Subject: [PATCH 196/207] OP-2005 - Hound --- .../deadline/plugins/publish/validate_deadline_pools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py index e8f0d95ffb..78eed17c98 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -34,7 +34,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, primary_pool = instance.data.get("primaryPool") if primary_pool and primary_pool not in pools: msg = "Configured primary '{}' not present on Deadline".format( - instance.data["primaryPool"]) + instance.data["primaryPool"]) formatting_data["invalid_value_str"] = msg raise PublishXmlValidationError(self, msg, formatting_data=formatting_data) @@ -42,7 +42,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, secondary_pool = instance.data.get("secondaryPool") if secondary_pool and secondary_pool not in pools: msg = "Configured secondary '{}' not present on Deadline".format( - instance.data["secondaryPool"]) + instance.data["secondaryPool"]) formatting_data["invalid_value_str"] = msg raise PublishXmlValidationError(self, msg, formatting_data=formatting_data) From 1f5be56ae2a2e930b2451fd91e120194b14d8d18 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Apr 2022 14:39:56 +0200 Subject: [PATCH 197/207] added more logs --- .../hosts/nuke/plugins/publish/extract_slate_frame.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 6935afe144..fb52fc18b4 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -189,6 +189,7 @@ class ExtractSlateFrame(openpype.api.Extractor): for key, value in self.key_value_mapping.items(): enabled, template = value if not enabled: + self.log.debug("Key \"{}\" is disabled".format(key)) continue try: @@ -205,13 +206,19 @@ class ExtractSlateFrame(openpype.api.Extractor): except KeyError: self.log.warning( - "Template contains unknown key", + ( + "Template contains unknown key." + " Template \"{}\" Data: {}" + ).format(template, fill_data), exc_info=True ) continue try: node[key].setValue(value) + self.log.info("Change key \"{}\" to value \"{}\"".format( + key, value + )) except NameError: self.log.warning(( "Failed to set value \"{}\" on node attribute \"{}\"" From 786a6494eb32e9bbe0dd448482067647d6fddd12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Apr 2022 14:40:08 +0200 Subject: [PATCH 198/207] fixed settings --- .../defaults/project_settings/nuke.json | 26 ++++---- .../schemas/schema_nuke_publish.json | 66 ++++++++++--------- 2 files changed, 50 insertions(+), 42 deletions(-) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index bdccb9b38e..ab015271ff 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -161,18 +161,20 @@ }, "ExtractSlateFrame": { "viewer_lut_raw": false, - "f_submission_note": [ - true, - "{comment}" - ], - "f_submitting_for": [ - true, - "{intent[value]}" - ], - "f_vfx_scope_of_work": [ - false, - "" - ] + "key_value_mapping": { + "f_submission_note": [ + true, + "{comment}" + ], + "f_submitting_for": [ + true, + "{intent[value]}" + ], + "f_vfx_scope_of_work": [ + false, + "" + ] + } }, "IncrementScriptVersion": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 3bf0eb3214..4a796f1933 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -399,41 +399,47 @@ "word_wrap": true }, { - "type": "list-strict", - "key": "f_submission_note", - "label": "Submission Note:", - "object_types": [ + "type": "dict", + "key": "key_value_mapping", + "children": [ { - "type": "boolean" + "type": "list-strict", + "key": "f_submission_note", + "label": "Submission Note:", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] }, { - "type": "text" - } - ] - }, - { - "type": "list-strict", - "key": "f_submitting_for", - "label": "Submission For:", - "object_types": [ - { - "type": "boolean" + "type": "list-strict", + "key": "f_submitting_for", + "label": "Submission For:", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] }, { - "type": "text" - } - ] - }, - { - "type": "list-strict", - "key": "f_vfx_scope_of_work", - "label": "VFX Scope Of Work:", - "object_types": [ - { - "type": "boolean" - }, - { - "type": "text" + "type": "list-strict", + "key": "f_vfx_scope_of_work", + "label": "VFX Scope Of Work:", + "object_types": [ + { + "type": "boolean" + }, + { + "type": "text" + } + ] } ] } From f49d01559a25e084236a5b5ed936eb16a9006545 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Apr 2022 14:40:27 +0200 Subject: [PATCH 199/207] fix database access in loader --- openpype/tools/utils/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index efaf671915..5abbe01144 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -426,7 +426,10 @@ class FamilyConfigCache: # Make sure connection is installed # - accessing attribute which does not have auto-install self.dbcon.install() - asset_doc = self.dbcon.database[project_name].find_one( + database = getattr(self.dbcon, "database", None) + if database is None: + database = self.dbcon._database + asset_doc = database[project_name].find_one( {"type": "asset", "name": asset_name}, {"data.tasks": True} ) or {} From 0edeb4c6dc174c251e501f45e02ff37e6f95dbfc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 12 Apr 2022 15:42:45 +0200 Subject: [PATCH 200/207] added init file for worker which triggers missing sound file dialog on worker open --- openpype/hosts/tvpaint/worker/init_file.tvpp | Bin 0 -> 59333 bytes openpype/hosts/tvpaint/worker/worker.py | 23 +++++++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 openpype/hosts/tvpaint/worker/init_file.tvpp diff --git a/openpype/hosts/tvpaint/worker/init_file.tvpp b/openpype/hosts/tvpaint/worker/init_file.tvpp new file mode 100644 index 0000000000000000000000000000000000000000..572d278fdb49c619d3cf040f59238377d1062d6a GIT binary patch literal 59333 zcmeI5U2Gjk6~`xy#|eL$!}Lal@Z5(1%?N=Qf`0dGjWgWwfZR9<*R;v@XejPJcO&bn2| z8yq`(_sp3yXU_a)c5Ub7@4tNO;a@y`;`g`T6~~`{_lG|c>gz8*_HSR9ApbjGG5w>D zq5E^*zm%SvOE= zj{5anGivQOF<%NM+B<^Z-b9@_?ipGRA|*agsrxwA@T7PE8CTSf z&Y`L*R{IGtjXGju+>Ekg@hIk^`8+X!dw@?OM_IL9hli?pSj=MtRWprMMT$ZU=K36b zT$Tp1(4J5o`UKHQ8sB&39@^Kt{{)`c*MHENIeu3h&GvEU_VInA?)?-$x-SFm7VT4n zeWuSYKI!z(iP|5l&)ZE<_i;Ap)C`}2NA~4iL9TV|=H%X<(sT0ZkBJ$#vM#FAS;9Da z4lB6y_owbD3!Q$QLk=>yIQ#RczbS5@!j^i5kK*ih;G;JNo%FuH1V2&fiko{Y7UGjQ zxsljbtLtGro$?v9=q-{eCMj~zyMmrU*NDFeFERV*_+x|oXAw{5nON_rJvoXe&h(8T z|DrgF`P#TT>M5ot`Xru@lj4kc4sVcYtU-bx`9Atd>OTVxPW?qZ(F^MR<719-?0OCN zDz1upe~pQAh*?BEV~7io+XYX0H~FZUp7;)~6racO7VKix9b~1I+yJc}?!!04QRMV6 zM+4tAb@_Oj_m484YMf918w-D?pB1m2Gk;cGd%=I>trGcPIb!a?KLk^=p z5B1tVM;=gbqYNfC(@GCcp%k025#W zOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XBzyz286JP>NfC(@GCcp%k025#W zOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XBzyz286JP>NfC(@GCcp%k025#W zOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<3wz6p$6Oin%Z_KDxUebUqg5NH4P^Iu-+ zX1%m^wH&2smPdJ#rQ=O_^3W}}l5Vq|b>kN7&0ZekmUp7&jeh@j*6wY_p)8mEH_Lw| z-GR6NZRUUQ*OdQyFS*{ydRte^&8(euTsN$Fj{fIWDtk^DdV%XyD)evY^+MQ=lobb^ zL$|Npb>Hzk!ySaOfiM?&YKFTK);&koU8mMCyw$KKmDiJI>RMP099cnT%Uutvt`n&7 zMvg`(E3Q+m+QHIA9nz_x@S2fY%24{!@fwcw>%$~cuhDhPP(M^H%NM|>y#o-k64Gj= zx{Q?7z2O9J7+h1-$Y)Mf52aUg=(3}+wxMsB#d8N)_WJb(dRX0FyzSrH!r>fy@qY2V z9yW2XB%@AGFO`<7uKvOCs)Ks^^{3Hhj6*gPuSdOhURF&RPHMvor?laR)7l8a8EsU; z)7q$pv)ZVIXS7id&uXI)&M5d`=tH@cHsq)a~BPIxQ@IXiLNP4BI#C zz_2TZT{Y~QVb=}2F|+|?FfUH~&^G3k#=O#)7e{#*Z_F!=d8IM0H0A}e!C0Kbp>53T z8S~sd+iY32cvdYyZbn+Q;BhwlsFv-do2(`6B;Nsg zX*Tg{F`J&<#EZo2#rwl-R_tcgZr1E3-i78^yaJ6Tz1Gd9T@_yAR=h}COXMekZ;Mc5OoNmZ5oe|+Zy`oaEs&-}qY0)-;}VggLy z|CNCMihN(a7gF=RkiX@>_Q!9N|64yX{SYq`5KNfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l z2`~XBzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l z2`~XBzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e*Fb7$tv)I78mN)PTH{<}D| zyF0UVn%a-y8WD$~pzr{$L%2?e(#-tgG{wSq2zN1c=KPF`gKu(qk`x_E;*qJR>6%`i zU*79O+>#`lc*7=biIERK{P51?!oni#FF=(bO48iPw&Jwww6d(zb@I&F$$A|p*^I6y zsg}8O#H^kLayD0gH9I#?vzKrk!FA~T>yhWVvA-77eYac| z7iQFs2AALX=I&2_d+ylF?>%w*^Jo6>-Mu0{^ZwLJUwZblr~mcUm*)N{*S}WV^J8y> zSXrH>y?9K?73NSNBrQR$QCOaxo;?GMPf%afYbDu#A!P{Nqk)v6PV$snm_{iWCaqi0 zs~U#8MSePRMgD{8q(fH}Py12ir(;*(SLd$Cf53X7U|5{zfb~MtusHsJ z^+M&aIR1e3LhZ9S{($wW6IifbD1sK|QD?9y{($wWQ&<##zMCtk)}rm}(i+;V zuA$B98rrO`q0Ql&J{IQ;{SSJtA{iner6hx)d3Jclhy{}oWp zK1=^qy;cU}Sc}$_OvORAnzp_wjBsd86Cgg!VaP+Fks%T8A(@#MvcGH*h@Phh%&mGV2g{hu4J=d&6@o>YvhHN4Inu z4F{U=62SoipyM7u0zJSm(tmek6@xuWB%ONQsYAFOTJz;*Cyw%XZ71B?*@~O(-kP&I zKejY2A!AQHNtUkll6EUxZ01Uv-u2|5r-hNI+4CmcU_wHY9-s9IBVZ%z<{|#`!kJ9* zYYipC^v)?M$16_`vSi)x(%eEd7&x}$P8Xqf54 Date: Tue, 12 Apr 2022 17:04:54 +0200 Subject: [PATCH 201/207] fix docstring --- openpype/pipeline/farm/patterning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 5ba7a8df4b..457f3afbca 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -12,7 +12,7 @@ def match_aov_pattern(host_name, aov_patterns, render_file_name): Args: app (str): Host name. - aov_patterns (list): List of AOV patterns from AOV filters. + aov_patterns (dict): AOV patterns from AOV filters. render_file_name (str): Incoming file name to match against. Returns: From 052392ac353198d95c95ae89ea5baff1690e80e1 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 12 Apr 2022 17:10:11 +0200 Subject: [PATCH 202/207] fixed default filter and list handling --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++-- openpype/pipeline/farm/patterning.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 63f9e35720..75c8edc8d4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -108,7 +108,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): families = ["render.farm", "prerender.farm", "renderlayer", "imagesequence", "vrayscene"] - aov_filter = {"maya": [r".*(?:[\._-])*([Bb]eauty)(?:[\.|_])*.*"], + aov_filter = {"maya": [r".*([Bb]eauty).*"], "aftereffects": [r".*"], # for everything from AE "harmony": [r".*"], # for everything from AE "celaction": [r".*"]} @@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_PUBLISH_JOB" ] - # custom deadline atributes + # custom deadline attributes deadline_department = "" deadline_pool = "" deadline_pool_secondary = "" diff --git a/openpype/pipeline/farm/patterning.py b/openpype/pipeline/farm/patterning.py index 457f3afbca..1e4b5bf37d 100644 --- a/openpype/pipeline/farm/patterning.py +++ b/openpype/pipeline/farm/patterning.py @@ -21,4 +21,4 @@ def match_aov_pattern(host_name, aov_patterns, render_file_name): aov_pattern = aov_patterns.get(host_name, []) if not aov_pattern: return False - return re.match(aov_pattern, render_file_name) is not None + return any(re.match(p, render_file_name) for p in aov_pattern) From 8c3e63c35dc8af7a2ceac5339e7963048ff5b773 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 17:54:56 +0200 Subject: [PATCH 203/207] OP-2005 - set default from Settings to dropdown --- .../maya/plugins/create/create_render.py | 37 ++++++++++++++++--- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 2ded7c720d..15230519d2 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -204,7 +204,6 @@ class CreateRender(plugin.Creator): def _deadline_webservice_changed(self): """Refresh Deadline server dependent options.""" # get selected server - from maya import cmds webservice = self.deadline_servers[ self.server_aliases[ cmds.getAttr("{}.deadlineServers".format(self.instance)) @@ -213,12 +212,23 @@ class CreateRender(plugin.Creator): pools = self.deadline_module.get_deadline_pools(webservice, self.log) cmds.deleteAttr("{}.primaryPool".format(self.instance)) cmds.deleteAttr("{}.secondaryPool".format(self.instance)) + + pool_setting = (self._project_settings["deadline"] + ["publish"] + ["CollectDeadlinePools"]) + + primary_pool = pool_setting["primary_pool"] + sorted_pools = self._set_default_pool(list(pools), primary_pool) cmds.addAttr(self.instance, longName="primaryPool", attributeType="enum", - enumName=":".join(pools)) - cmds.addAttr(self.instance, longName="secondaryPool", + enumName=":".join(sorted_pools)) + + pools = ["-"] + pools + secondary_pool = pool_setting["secondary_pool"] + sorted_pools = self._set_default_pool(list(pools), secondary_pool) + cmds.addAttr("{}.secondaryPool".format(self.instance), attributeType="enum", - enumName=":".join(["-"] + pools)) + enumName=":".join(sorted_pools)) def _create_render_settings(self): """Create instance settings.""" @@ -299,12 +309,27 @@ class CreateRender(plugin.Creator): self.log.info(" - pool: {}".format(pool["name"])) pool_names.append(pool["name"]) - self.data["primaryPool"] = pool_names + pool_setting = (self._project_settings["deadline"] + ["publish"] + ["CollectDeadlinePools"]) + primary_pool = pool_setting["primary_pool"] + self.data["primaryPool"] = self._set_default_pool(pool_names, + primary_pool) # We add a string "-" to allow the user to not # set any secondary pools - self.data["secondaryPool"] = ["-"] + pool_names + pool_names = ["-"] + pool_names + secondary_pool = pool_setting["secondary_pool"] + self.data["secondaryPool"] = self._set_default_pool(pool_names, + secondary_pool) self.options = {"useSelection": False} # Force no content + def _set_default_pool(self, pool_names, pool_value): + """Reorder pool names, default should come first""" + if pool_value and pool_value in pool_names: + pool_names.remove(pool_value) + pool_names = [pool_value] + pool_names + return pool_names + def _load_credentials(self): """Load Muster credentials. From 9b1a739e18fa1229d1ceb3e257979eba8e132c40 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 17:56:18 +0200 Subject: [PATCH 204/207] OP-2005 - set default from Settings to dropdown --- openpype/hosts/maya/plugins/publish/collect_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 0e4e27ab51..c229ca226f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -388,7 +388,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): # get string values for pools primary_pool = overrides["renderGlobals"]["Pool"] - secondary_pool = overrides["renderGlobals"]["SecondaryPool"] + secondary_pool = overrides["renderGlobals"].get("SecondaryPool") data["primaryPool"] = primary_pool data["secondaryPool"] = secondary_pool From e14a5eb98aff1eb61920fc70d08291ad3e8af119 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 12 Apr 2022 20:02:43 +0200 Subject: [PATCH 205/207] OP-2005 - refactor - changed logger import --- openpype/modules/deadline/deadline_module.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index 0ac41ca874..c30db75188 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -3,8 +3,7 @@ import requests import six import sys - -from openpype.lib import requests_get +from openpype.lib import requests_get, PypeLogger from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths @@ -59,9 +58,7 @@ class DeadlineModule(OpenPypeModule, IPluginPaths): """ if not log: - from openpype.lib import PypeLogger - - log = PypeLogger().get_logger(__name__) + log = PypeLogger.get_logger(__name__) argument = "{}/api/pools?NamesOnly=true".format(webservice) try: From c5069096d6dbe369fe1bb25cffec0888172ae62a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 13 Apr 2022 13:38:05 +0200 Subject: [PATCH 206/207] fixing module file and list indexing --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- openpype/pipeline/farm/__init__.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 openpype/pipeline/farm/__init__.py diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 5befae0794..c444c3aa53 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -540,7 +540,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance.get("multipartExr", False): preview = True else: - render_file_name = list(collection[0]) + render_file_name = list(collection)[0] host_name = os.environ.get("AVALON_APP", "") # if filtered aov name is found in filename, toggle it for # preview video rendering diff --git a/openpype/pipeline/farm/__init__.py b/openpype/pipeline/farm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From 7d521c15ea634476efca81f5e5ab16b3f764f8b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 13 Apr 2022 22:21:56 +0200 Subject: [PATCH 207/207] fixed typos in imports from openpype.hosts.nuke.api.command --- openpype/hosts/nuke/plugins/inventory/select_containers.py | 2 +- openpype/hosts/nuke/plugins/load/load_backdrop.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/inventory/select_containers.py b/openpype/hosts/nuke/plugins/inventory/select_containers.py index d7d5f00b87..4e7a20fb26 100644 --- a/openpype/hosts/nuke/plugins/inventory/select_containers.py +++ b/openpype/hosts/nuke/plugins/inventory/select_containers.py @@ -1,5 +1,5 @@ from openpype.pipeline import InventoryAction -from openpype.hosts.nuke.api.commands import viewer_update_and_undo_stop +from openpype.hosts.nuke.api.command import viewer_update_and_undo_stop class SelectContainers(InventoryAction): diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 36cec6f4c5..d55dd4cf71 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -14,7 +14,7 @@ from openpype.hosts.nuke.api.lib import ( get_avalon_knob_data, set_avalon_knob_data ) -from openpype.hosts.nuke.api.commands import viewer_update_and_undo_stop +from openpype.hosts.nuke.api.command import viewer_update_and_undo_stop from openpype.hosts.nuke.api import containerise, update_container