From 16b169205ef8816099d1d94ff263069298d406cc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 01:29:51 +0200 Subject: [PATCH 01/25] Allow camera path to not be set correctly in review instance until validation --- .../plugins/publish/collect_review_data.py | 10 +++--- .../plugins/publish/validate_scene_review.py | 33 ++++++++++++++----- 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_review_data.py b/openpype/hosts/houdini/plugins/publish/collect_review_data.py index e321dcb2fa..3ab93dc491 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_review_data.py +++ b/openpype/hosts/houdini/plugins/publish/collect_review_data.py @@ -18,6 +18,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): instance.data["handleStart"] = 0 instance.data["handleEnd"] = 0 + # Enable ftrack functionality + instance.data.setdefault("families", []).append('ftrack') + # Get the camera from the rop node to collect the focal length ropnode_path = instance.data["instance_node"] ropnode = hou.node(ropnode_path) @@ -25,8 +28,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): camera_path = ropnode.parm("camera").eval() camera_node = hou.node(camera_path) if not camera_node: - raise RuntimeError("No valid camera node found on review node: " - "{}".format(camera_path)) + self.log.warning("No valid camera node found on review node: " + "{}".format(camera_path)) + return # Collect focal length. focal_length_parm = camera_node.parm("focal") @@ -48,5 +52,3 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): # Store focal length in `burninDataMembers` burnin_members = instance.data.setdefault("burninDataMembers", {}) burnin_members["focalLength"] = focal_length - - instance.data.setdefault("families", []).append('ftrack') diff --git a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py index ade01d4b90..58d8a37240 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py +++ b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py @@ -16,13 +16,17 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): label = "Scene Setting for review" def process(self, instance): - invalid = self.get_invalid_scene_path(instance) report = [] + instance_node = hou.node(instance.data.get("instance_node")) + + invalid = self.get_invalid_scene_path(instance_node) if invalid: - report.append( - "Scene path does not exist: '%s'" % invalid[0], - ) + report.append(invalid) + + invalid = self.get_invalid_camera_path(instance_node) + if invalid: + report.append(invalid) invalid = self.get_invalid_resolution(instance) if invalid: @@ -33,13 +37,24 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): "\n\n".join(report), title=self.label) - def get_invalid_scene_path(self, instance): - - node = hou.node(instance.data.get("instance_node")) - scene_path_parm = node.parm("scenepath") + def get_invalid_scene_path(self, rop_node): + scene_path_parm = rop_node.parm("scenepath") scene_path_node = scene_path_parm.evalAsNode() if not scene_path_node: - return [scene_path_parm.evalAsString()] + path = scene_path_parm.evalAsString() + return "Scene path does not exist: '{}'".format(path) + + def get_invalid_camera_path(self, rop_node): + camera_path_parm = rop_node.parm("camera") + camera_node = camera_path_parm.evalAsNode() + path = camera_path_parm.evalAsString() + if not camera_node: + return "Camera path does not exist: '{}'".format(path) + type_name = camera_node.type().name() + if type_name != "cam": + return "Camera path is not a camera: '{}' (type: {})".format( + path, type_name + ) def get_invalid_resolution(self, instance): node = hou.node(instance.data.get("instance_node")) From 0424f66164717b5127f89612f0d83b7865bece63 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 01:34:01 +0200 Subject: [PATCH 02/25] Re-use instance node --- .../houdini/plugins/publish/validate_scene_review.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py index 58d8a37240..a44b7e1597 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py +++ b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py @@ -28,7 +28,7 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): if invalid: report.append(invalid) - invalid = self.get_invalid_resolution(instance) + invalid = self.get_invalid_resolution(instance_node) if invalid: report.extend(invalid) @@ -56,18 +56,17 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): path, type_name ) - def get_invalid_resolution(self, instance): - node = hou.node(instance.data.get("instance_node")) + def get_invalid_resolution(self, rop_node): # The resolution setting is only used when Override Camera Resolution # is enabled. So we skip validation if it is disabled. - override = node.parm("tres").eval() + override = rop_node.parm("tres").eval() if not override: return invalid = [] - res_width = node.parm("res1").eval() - res_height = node.parm("res2").eval() + res_width = rop_node.parm("res1").eval() + res_height = rop_node.parm("res2").eval() if res_width == 0: invalid.append("Override Resolution width is set to zero.") if res_height == 0: From 95c802047cff3dc211c7f0ad037497befbff0c14 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 16:40:49 +0200 Subject: [PATCH 03/25] Don't make ExtractOpenGL optional --- .../hosts/houdini/plugins/publish/extract_opengl.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_opengl.py b/openpype/hosts/houdini/plugins/publish/extract_opengl.py index c26d0813a6..6c36dec5f5 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_opengl.py +++ b/openpype/hosts/houdini/plugins/publish/extract_opengl.py @@ -2,27 +2,20 @@ import os import pyblish.api -from openpype.pipeline import ( - publish, - OptionalPyblishPluginMixin -) +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop import hou -class ExtractOpenGL(publish.Extractor, - OptionalPyblishPluginMixin): +class ExtractOpenGL(publish.Extractor): order = pyblish.api.ExtractorOrder - 0.01 label = "Extract OpenGL" families = ["review"] hosts = ["houdini"] - optional = True def process(self, instance): - if not self.is_active(instance.data): - return ropnode = hou.node(instance.data.get("instance_node")) output = ropnode.evalParm("picture") From 60d386b127badba113199c94111bd76de1dee041 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Apr 2023 12:53:17 +0200 Subject: [PATCH 04/25] :bug: fix missing review flag on instance with pre-render --- openpype/hosts/nuke/plugins/publish/collect_writes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 536a0698f3..6697a1e59a 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -190,7 +190,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, # make sure rendered sequence on farm will # be used for extract review - if not instance.data["review"]: + if not instance.data.get("review"): instance.data["useSequenceForReview"] = False self.log.debug("instance.data: {}".format(pformat(instance.data))) From a1b264de9b2b910f1c7a5b7aadd0b931103fcb5d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 16:19:44 +0200 Subject: [PATCH 05/25] Fix houdini workfile icon --- openpype/hosts/houdini/plugins/create/create_workfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 0c6d840810..5f5aa306ee 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -14,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "document" + icon = "file-o" default_variant = "Main" @@ -90,4 +90,4 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): for created_inst, _changes in update_list: if created_inst["creator_identifier"] == self.identifier: workfile_data = {"workfile": created_inst.data_to_store()} - imprint(op_ctx, workfile_data, update=True) + imprint(op_ctx, workfile_data, update=True) \ No newline at end of file From e2fc8564e6e2fe64b47d3d8561f0f288dec35b98 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 16:23:41 +0200 Subject: [PATCH 06/25] Fix accidental newline at end of file removal --- openpype/hosts/houdini/plugins/create/create_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 5f5aa306ee..9884fca325 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -90,4 +90,4 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): for created_inst, _changes in update_list: if created_inst["creator_identifier"] == self.identifier: workfile_data = {"workfile": created_inst.data_to_store()} - imprint(op_ctx, workfile_data, update=True) \ No newline at end of file + imprint(op_ctx, workfile_data, update=True) From ebcd48d13875f472a4c5d1eddc9e4a834b37133d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 24 Apr 2023 17:36:26 +0200 Subject: [PATCH 07/25] Publisher: Keep track about current context and fix context selection widget (#4892) * keep track about last context so it can be updated on context change * don't use '_asset_name' attribute for validation of selected asset * use current context after publisher window close --- .../tools/publisher/widgets/create_widget.py | 39 ++++++++++++++++++- openpype/tools/publisher/window.py | 3 ++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index ef9c5b98fe..db20b21ed7 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -282,6 +282,9 @@ class CreateWidget(QtWidgets.QWidget): thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) thumbnail_widget.thumbnail_cleared.connect(self._on_thumbnail_clear) + controller.event_system.add_callback( + "main.window.closed", self._on_main_window_close + ) controller.event_system.add_callback( "plugins.refresh.finished", self._on_plugins_refresh ) @@ -316,6 +319,10 @@ class CreateWidget(QtWidgets.QWidget): self._first_show = True self._last_thumbnail_path = None + self._last_current_context_asset = None + self._last_current_context_task = None + self._use_current_context = True + @property def current_asset_name(self): return self._controller.current_asset_name @@ -356,12 +363,39 @@ class CreateWidget(QtWidgets.QWidget): if check_prereq: self._invalidate_prereq() + def _on_main_window_close(self): + """Publisher window was closed.""" + + # Use current context on next refresh + self._use_current_context = True + def refresh(self): + current_asset_name = self._controller.current_asset_name + current_task_name = self._controller.current_task_name + # Get context before refresh to keep selection of asset and # task widgets asset_name = self._get_asset_name() task_name = self._get_task_name() + # Replace by current context if last loaded context was + # 'current context' before reset + if ( + self._use_current_context + or ( + self._last_current_context_asset + and asset_name == self._last_current_context_asset + and task_name == self._last_current_context_task + ) + ): + asset_name = current_asset_name + task_name = current_task_name + + # Store values for future refresh + self._last_current_context_asset = current_asset_name + self._last_current_context_task = current_task_name + self._use_current_context = False + self._prereq_available = False # Disable context widget so refresh of asset will use context asset @@ -398,7 +432,10 @@ class CreateWidget(QtWidgets.QWidget): prereq_available = False creator_btn_tooltips.append("Creator is not selected") - if self._context_change_is_enabled() and self._asset_name is None: + if ( + self._context_change_is_enabled() + and self._get_asset_name() is None + ): # QUESTION how to handle invalid asset? prereq_available = False creator_btn_tooltips.append("Context is not selected") diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 0615157e1b..e94979142a 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -406,6 +406,9 @@ class PublisherWindow(QtWidgets.QDialog): self._comment_input.setText("") # clear comment self._reset_on_show = True self._controller.clear_thumbnail_temp_dir_path() + # Trigger custom event that should be captured only in UI + # - backend (controller) must not be dependent on this event topic!!! + self._controller.event_system.emit("main.window.closed", {}, "window") super(PublisherWindow, self).closeEvent(event) def leaveEvent(self, event): From afa3f563e43be117af30bd2896983b7bd7027d9f Mon Sep 17 00:00:00 2001 From: Ynbot Date: Mon, 24 Apr 2023 15:41:57 +0000 Subject: [PATCH 08/25] [Automated] Release --- CHANGELOG.md | 303 ++++++++++++++++++++++++++++++++++++++++++++ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 305 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5aeb546c14..16deaaa4fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,309 @@ # Changelog +## [3.15.5](https://github.com/ynput/OpenPype/tree/3.15.5) + + +[Full Changelog](https://github.com/ynput/OpenPype/compare/3.15.4...3.15.5) + +### **πŸš€ Enhancements** + + +
+Maya: Playblast profiles #4777 + +Support playblast profiles.This enables studios to customize what playblast settings should be on a per task and/or subset basis. For example `modeling` should have `Wireframe On Shaded` enabled, while all other tasks should have it disabled. + + +___ + +
+ + +
+Maya: Support .abc files directly for Arnold standin look assignment #4856 + +If `.abc` file is loaded into arnold standin support look assignment through the `cbId` attributes in the alembic file. + + +___ + +
+ + +
+Maya: Hide animation instance in creator #4872 + +- Hide animation instance in creator +- Add inventory action to recreate animation publish instance for loaded rigs + + +___ + +
+ + +
+Unreal: Render Creator enhancements #4477 + +Improvements to the creator for render family + +This PR introduces some enhancements to the creator for the render family in Unreal Engine: +- Added the option to create a new, empty sequence for the render. +- Added the option to not include the whole hierarchy for the selected sequence. +- Improvements of the error messages. + + +___ + +
+ + +
+Unreal: Added settings for rendering #4575 + +Added settings for rendering in Unreal Engine. + +Two settings has been added: +- Pre roll frames, to set how many frames are used to load the scene before starting the actual rendering. +- Configuration path, to allow to save a preset of settings from Unreal, and use it for rendering. + + +___ + +
+ + +
+Global: Optimize anatomy formatting by only formatting used templates instead #4784 + +Optimization to not format full anatomy when only a single template is used. Instead format only the single template instead. + + +___ + +
+ + +
+Patchelf version locked #4853 + +For Centos dockerfile it is necessary to lock the patchelf version to the older, otherwise the build process fails. + +___ + +
+ + +
+Houdini: Implement `switch` method on loaders #4866 + +Implement `switch` method on loaders + + +___ + +
+ + +
+Code: Tweak docstrings and return type hints #4875 + +Tweak docstrings and return type hints for functions in `openpype.client.entities`. + + +___ + +
+ + +
+Publisher: Clear comment on successful publish and on window close #4885 + +Clear comment text field on successful publish and on window close. + + +___ + +
+ + +
+Publisher: Make sure to reset asset widget when hidden and reshown #4886 + +Make sure to reset asset widget when hidden and reshown. Without this the asset list would never refresh in the set asset widget when changing context on an existing instance and thus would not show new assets from after the first time launching that widget. + + +___ + +
+ +### **πŸ› Bug fixes** + + +
+Maya: Fix nested model instances. #4852 + +Fix nested model instance under review instance, where data collection was not including "Display Lights" and "Focal Length". + + +___ + +
+ + +
+Maya: Make default namespace naming backwards compatible #4873 + +Namespaces of loaded references are now _by default_ back to what they were before #4511 + + +___ + +
+ + +
+Nuke: Legacy convertor skips deprecation warnings #4846 + +Nuke legacy convertor was triggering deprecated function which is causing a lot of logs which slows down whole process. Changed the convertor to skip all nodes without `AVALON_TAB` to avoid the warnings. + + +___ + +
+ + +
+3dsmax: move startup script logic to hook #4849 + +Startup script for OpenPype was interfering with Open Last Workfile feature. Moving this loggic from simple command line argument in the Settings to pre-launch hook is solving the order of command line arguments and making both features work. + + +___ + +
+ + +
+Maya: Don't change time slider ranges in `get_frame_range` #4858 + +Don't change time slider ranges in `get_frame_range` + + +___ + +
+ + +
+Maya: Looks - calculate hash for tx texture #4878 + +Texture hash is calculated for textures used in published look and it is used as key in dictionary. In recent changes, this hash is not calculated for TX files, resulting in `None` value as key in dictionary, crashing publishing. This PR is adding texture hash for TX files to solve that issue. + + +___ + +
+ + +
+Houdini: Collect `currentFile` context data separate from workfile instance #4883 + +Fix publishing without an active workfile instance due to missing `currentFile` data.Now collect `currentFile` into context in houdini through context plugin no matter the active instances. + + +___ + +
+ + +
+Nuke: fixed broken slate workflow once published on deadline #4887 + +Slate workflow is now working as expected and Validate Sequence Frames is not raising the once slate frame is included. + + +___ + +
+ + +
+Add fps as instance.data in collect review in Houdini. #4888 + +fix the bug of failing to publish extract review in HoudiniOriginal error: +```python + File "OpenPype\build\exe.win-amd64-3.9\openpype\plugins\publish\extract_review.py", line 516, in prepare_temp_data + "fps": float(instance.data["fps"]), +KeyError: 'fps' +``` + + +___ + +
+ + +
+TrayPublisher: Fill missing data for instances with review #4891 + +Fill required data to instance in traypublisher if instance has review family. The data are required by ExtractReview and it would be complicated to do proper fix at this moment! The collector does for review instances what did https://github.com/ynput/OpenPype/pull/4383 + + +___ + +
+ + +
+Publisher: Keep track about current context and fix context selection widget #4892 + +Change selected context to current context on reset. Fix bug when context widget is re-enabled. + + +___ + +
+ + +
+Scene inventory: Model refresh fix with cherry picking #4895 + +Fix cherry pick issue in scene inventory. + + +___ + +
+ + +
+Nuke: Pre-render and missing review flag on instance causing crash #4897 + +If instance created in nuke was missing `review` flag, collector crashed. + + +___ + +
+ +### **Merged pull requests** + + +
+After Effects: fix handles KeyError #4727 + +Sometimes when publishing with AE (we only saw this error on AE 2023), we got a KeyError for the handles in the "Collect Workfile" step. So I did get the handles from the context if ther's no handles in the asset entity. + + +___ + +
+ + + + ## [3.15.4](https://github.com/ynput/OpenPype/tree/3.15.4) diff --git a/openpype/version.py b/openpype/version.py index b43cc436bb..02537af762 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.5-nightly.2" +__version__ = "3.15.5" diff --git a/pyproject.toml b/pyproject.toml index b97ad8923c..2f40d58f56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.15.4" # OpenPype +version = "3.15.5" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 55957621645e3ddb6e313916509cbcad275a76e8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 17:47:33 +0200 Subject: [PATCH 09/25] Fusion: Simplify creator icons code (#4899) * Simplify setting creator icons * Use font-awesome 5 explicitly --- openpype/hosts/fusion/plugins/create/create_saver.py | 6 +----- openpype/hosts/fusion/plugins/create/create_workfile.py | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/fusion/plugins/create/create_saver.py b/openpype/hosts/fusion/plugins/create/create_saver.py index 56085b0a06..cedc4029fa 100644 --- a/openpype/hosts/fusion/plugins/create/create_saver.py +++ b/openpype/hosts/fusion/plugins/create/create_saver.py @@ -1,7 +1,5 @@ import os -import qtawesome - from openpype.hosts.fusion.api import ( get_current_comp, comp_lock_and_undo_chunk, @@ -28,6 +26,7 @@ class CreateSaver(Creator): family = "render" default_variants = ["Main", "Mask"] description = "Fusion Saver to generate image sequence" + icon = "fa5.eye" instance_attributes = ["reviewable"] @@ -89,9 +88,6 @@ class CreateSaver(Creator): self._add_instance_to_context(created_instance) - def get_icon(self): - return qtawesome.icon("fa.eye", color="white") - def update_instances(self, update_list): for created_inst, _changes in update_list: new_data = created_inst.data_to_store() diff --git a/openpype/hosts/fusion/plugins/create/create_workfile.py b/openpype/hosts/fusion/plugins/create/create_workfile.py index 0bb3a0d3d4..40721ea88a 100644 --- a/openpype/hosts/fusion/plugins/create/create_workfile.py +++ b/openpype/hosts/fusion/plugins/create/create_workfile.py @@ -1,5 +1,3 @@ -import qtawesome - from openpype.hosts.fusion.api import ( get_current_comp ) @@ -15,6 +13,7 @@ class FusionWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" label = "Workfile" + icon = "fa5.file" default_variant = "Main" @@ -104,6 +103,3 @@ class FusionWorkfileCreator(AutoCreator): existing_instance["asset"] = asset_name existing_instance["task"] = task_name existing_instance["subset"] = subset_name - - def get_icon(self): - return qtawesome.icon("fa.file-o", color="white") From 3a096bcf8bf4ff60ead25495a63ec2bcf6054d18 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 17:51:40 +0200 Subject: [PATCH 10/25] Use explicit font awesome 5 name --- openpype/hosts/houdini/plugins/create/create_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 9884fca325..1a8537adcd 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -14,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "file-o" + icon = "fa5.file" default_variant = "Main" From 0ef59fcb39a033a11b94e0d3884b1b48029e75eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 25 Apr 2023 08:18:46 +0200 Subject: [PATCH 11/25] adding ci user and email --- .github/workflows/update_bug_report.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/update_bug_report.yml b/.github/workflows/update_bug_report.yml index 9f44d7c7a6..7a1bfb7bfd 100644 --- a/.github/workflows/update_bug_report.yml +++ b/.github/workflows/update_bug_report.yml @@ -18,6 +18,8 @@ jobs: uses: ynput/gha-populate-form-version@main with: github_token: ${{ secrets.YNPUT_BOT_TOKEN }} + github_user: ${{ secrets.CI_USER }} + github_email: ${{ secrets.CI_EMAIL }} registry: github dropdown: _version limit_to: 100 From 0567701ddb827f9644e9f9631f56d4b3c73d01c5 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Tue, 25 Apr 2023 06:32:26 +0000 Subject: [PATCH 12/25] chore(): update bug report / version --- .github/ISSUE_TEMPLATE/bug_report.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index c4073ed1af..fe86a8400b 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,10 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.15.5 + - 3.15.5-nightly.2 + - 3.15.5-nightly.1 + - 3.15.4 - 3.15.4-nightly.3 - 3.15.4-nightly.2 - 3.15.4-nightly.1 @@ -131,10 +135,6 @@ body: - 3.13.1-nightly.2 - 3.13.1-nightly.1 - 3.13.0 - - 3.13.0-nightly.1 - - 3.12.3-nightly.3 - - 3.12.3-nightly.2 - - 3.12.3-nightly.1 validations: required: true - type: dropdown @@ -166,8 +166,8 @@ body: label: Are there any labels you wish to add? description: Please search labels and identify those related to your bug. options: - - label: I have added the relevant labels to the bug report. - required: true + - label: I have added the relevant labels to the bug report. + required: true - type: textarea id: logs attributes: From 4ed1c1f65d6f99ece0f35c404e6ca40c3ee2c5fd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 25 Apr 2023 10:29:12 +0200 Subject: [PATCH 13/25] Enhancement: Fix PySide 6.5 support for loader (#4900) * Reverse inheritance order to avoid PySide6.5 bug `PYSIDE-2294` & `PYSIDE-2304` * Fix PySide6 support --- openpype/tools/loader/model.py | 2 +- openpype/tools/publisher/widgets/list_view_widgets.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 14671e341f..e5d8400031 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -123,7 +123,7 @@ class BaseRepresentationModel(object): self.remote_provider = remote_provider -class SubsetsModel(TreeModel, BaseRepresentationModel): +class SubsetsModel(BaseRepresentationModel, TreeModel): doc_fetched = QtCore.Signal() refreshed = QtCore.Signal(bool) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 227ae7bda9..cb5a203130 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -1039,7 +1039,8 @@ class InstanceListView(AbstractInstanceView): proxy_index = proxy_model.mapFromSource(select_indexes[0]) selection_model.setCurrentIndex( proxy_index, - selection_model.ClearAndSelect | selection_model.Rows + QtCore.QItemSelectionModel.ClearAndSelect + | QtCore.QItemSelectionModel.Rows ) return From 38347ece5a7e60f23d643568e1268e3900f8fa21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 25 Apr 2023 10:37:49 +0200 Subject: [PATCH 14/25] Publisher: Small style changes (#4894) * border hover has color without alpha * changed border radius to 0.2em * removed border from scroll area * variant hint button has 0.5em width * inputs in attribute definitions have smaller padding * label is shown only to value inputs and added tooltips * change spacing for attribute befinitions * align labels to right * implemented 'ComboBox' which ignores wheel events and has styled delegate * PixmalLabel has minimum sizeHint * cards are smaller * renamed 'Options' to 'Context' * implemented active state changes in card view * set object name of main window to "PublishWindow" * plugin don't have to pass 'title' to an error * fix PySide6 support for custom keysequences * check for exact match for all bindings * added validation of exact match for save shortcut --- openpype/pipeline/publish/publish_plugins.py | 2 +- openpype/style/data.json | 2 +- openpype/style/style.css | 29 ++++++-- openpype/tools/attribute_defs/widgets.py | 10 ++- openpype/tools/publisher/constants.py | 5 +- openpype/tools/publisher/control.py | 11 ++- .../publisher/widgets/card_view_widgets.py | 72 ++++++++++++++++--- .../tools/publisher/widgets/create_widget.py | 4 ++ .../publisher/widgets/list_view_widgets.py | 5 +- .../publisher/widgets/precreate_widget.py | 14 +++- openpype/tools/publisher/widgets/widgets.py | 33 +++++++-- openpype/tools/publisher/window.py | 19 +++-- openpype/tools/utils/__init__.py | 2 + openpype/tools/utils/widgets.py | 34 ++++++++- 14 files changed, 208 insertions(+), 34 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 331235fadc..a38896ec8e 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -45,7 +45,7 @@ class PublishValidationError(Exception): def __init__(self, message, title=None, description=None, detail=None): self.message = message - self.title = title or "< Missing title >" + self.title = title self.description = description or message self.detail = detail super(PublishValidationError, self).__init__(message) diff --git a/openpype/style/data.json b/openpype/style/data.json index 404ca6944c..bea2a3d407 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -48,7 +48,7 @@ "bg-view-selection-hover": "rgba(92, 173, 214, .8)", "border": "#373D48", - "border-hover": "rgba(168, 175, 189, .3)", + "border-hover": "rgb(92, 99, 111)", "border-focus": "rgb(92, 173, 214)", "restart-btn-bg": "#458056", diff --git a/openpype/style/style.css b/openpype/style/style.css index da477eeefa..29abb1d351 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -35,6 +35,11 @@ QWidget:disabled { color: {color:font-disabled}; } +/* Some DCCs have set borders to solid color */ +QScrollArea { + border: none; +} + QLabel { background: transparent; } @@ -42,7 +47,7 @@ QLabel { /* Inputs */ QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { border: 1px solid {color:border}; - border-radius: 0.3em; + border-radius: 0.2em; background: {color:bg-inputs}; padding: 0.1em; } @@ -226,7 +231,7 @@ QMenu::separator { /* Combobox */ QComboBox { border: 1px solid {color:border}; - border-radius: 3px; + border-radius: 0.2em; padding: 1px 3px 1px 3px; background: {color:bg-inputs}; } @@ -474,7 +479,6 @@ QAbstractItemView:disabled{ } QAbstractItemView::item:hover { - /* color: {color:bg-view-hover}; */ background: {color:bg-view-hover}; } @@ -743,7 +747,7 @@ OverlayMessageWidget QWidget { #TypeEditor, #ToolEditor, #NameEditor, #NumberEditor { background: transparent; - border-radius: 0.3em; + border-radius: 0.2em; } #TypeEditor:focus, #ToolEditor:focus, #NameEditor:focus, #NumberEditor:focus { @@ -860,7 +864,13 @@ OverlayMessageWidget QWidget { background: {color:bg-view-hover}; } -/* New Create/Publish UI */ +/* Publisher UI (Create/Publish) */ +#PublishWindow QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { + padding: 1px; +} +#PublishWindow QComboBox { + padding: 1px 1px 1px 0.2em; +} PublisherTabsWidget { background: {color:publisher:tab-bg}; } @@ -944,6 +954,7 @@ PixmapButton:disabled { border-top-left-radius: 0px; padding-top: 0.5em; padding-bottom: 0.5em; + width: 0.5em; } #VariantInput[state="new"], #VariantInput[state="new"]:focus, #VariantInput[state="new"]:hover { border-color: {color:publisher:success}; @@ -1072,7 +1083,7 @@ ValidationArtistMessage QLabel { #AssetNameInputWidget { background: {color:bg-inputs}; border: 1px solid {color:border}; - border-radius: 0.3em; + border-radius: 0.2em; } #AssetNameInputWidget QWidget { @@ -1465,6 +1476,12 @@ CreateNextPageOverlay { } /* Attribute Definition widgets */ +AttributeDefinitionsWidget QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { + padding: 1px; +} +AttributeDefinitionsWidget QComboBox { + padding: 1px 1px 1px 0.2em; +} InViewButton, InViewButton:disabled { background: transparent; } diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 0d4e1e88a9..d46c238da1 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -1,4 +1,3 @@ -import uuid import copy from qtpy import QtWidgets, QtCore @@ -126,7 +125,7 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): row = 0 for attr_def in attr_defs: - if not isinstance(attr_def, UIDef): + if attr_def.is_value_def: if attr_def.key in self._current_keys: raise KeyError( "Duplicated key \"{}\"".format(attr_def.key)) @@ -144,11 +143,16 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - if attr_def.label: + if attr_def.is_value_def and attr_def.label: label_widget = QtWidgets.QLabel(attr_def.label, self) tooltip = attr_def.tooltip if tooltip: label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) layout.addWidget( label_widget, row, 0, 1, expand_cols ) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 5d23886aa8..660fccecf1 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -2,7 +2,7 @@ from qtpy import QtCore, QtGui # ID of context item in instance view CONTEXT_ID = "context" -CONTEXT_LABEL = "Options" +CONTEXT_LABEL = "Context" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" @@ -15,6 +15,9 @@ VARIANT_TOOLTIP = ( "\nnumerical characters (0-9) dot (\".\") or underscore (\"_\")." ) +INPUTS_LAYOUT_HSPACING = 4 +INPUTS_LAYOUT_VSPACING = 2 + # Roles for instance views INSTANCE_ID_ROLE = QtCore.Qt.UserRole + 1 SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7754e4aa02..4b083d4bc8 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -163,7 +163,7 @@ class AssetDocsCache: return copy.deepcopy(self._full_asset_docs_by_name[asset_name]) -class PublishReport: +class PublishReportMaker: """Report for single publishing process. Report keeps current state of publishing and currently processed plugin. @@ -784,6 +784,13 @@ class PublishValidationErrors: # Make sure the cached report is cleared plugin_id = self._plugins_proxy.get_plugin_id(plugin) + if not error.title: + if hasattr(plugin, "label") and plugin.label: + plugin_label = plugin.label + else: + plugin_label = plugin.__name__ + error.title = plugin_label + self._error_items.append( ValidationErrorItem.from_result(plugin_id, error, instance) ) @@ -1674,7 +1681,7 @@ class PublisherController(BasePublisherController): # pyblish.api.Context self._publish_context = None # Pyblish report - self._publish_report = PublishReport(self) + self._publish_report = PublishReportMaker(self) # Store exceptions of validation error self._publish_validation_errors = PublishValidationErrors() diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 0734e1bc27..13715bc73c 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -9,7 +9,7 @@ Only one item can be selected at a time. ``` : Icon. Can have Warning icon when context is not right β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Options β”‚ +β”‚ Context β”‚ β”‚ ────────── β”‚ β”‚ [x]β”‚ β”‚ [x]β”‚ @@ -202,7 +202,7 @@ class ConvertorItemsGroupWidget(BaseGroupWidget): class InstanceGroupWidget(BaseGroupWidget): """Widget wrapping instances under group.""" - active_changed = QtCore.Signal() + active_changed = QtCore.Signal(str, str, bool) def __init__(self, group_icons, *args, **kwargs): super(InstanceGroupWidget, self).__init__(*args, **kwargs) @@ -253,13 +253,16 @@ class InstanceGroupWidget(BaseGroupWidget): instance, group_icon, self ) widget.selected.connect(self._on_widget_selection) - widget.active_changed.connect(self.active_changed) + widget.active_changed.connect(self._on_active_changed) self._widgets_by_id[instance.id] = widget self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 self._update_ordered_item_ids() + def _on_active_changed(self, instance_id, value): + self.active_changed.emit(self.group_name, instance_id, value) + class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" @@ -332,7 +335,7 @@ class ContextCardWidget(CardWidget): icon_layout.addWidget(icon_widget) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(icon_layout, 0) layout.addWidget(label_widget, 1) @@ -363,7 +366,7 @@ class ConvertorItemCardWidget(CardWidget): icon_layout.addWidget(icon_widget) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(icon_layout, 0) layout.addWidget(label_widget, 1) @@ -377,7 +380,7 @@ class ConvertorItemCardWidget(CardWidget): class InstanceCardWidget(CardWidget): """Card widget representing instance.""" - active_changed = QtCore.Signal() + active_changed = QtCore.Signal(str, bool) def __init__(self, instance, group_icon, parent): super(InstanceCardWidget, self).__init__(parent) @@ -424,7 +427,7 @@ class InstanceCardWidget(CardWidget): top_layout.addWidget(expand_btn, 0) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(top_layout) layout.addWidget(detail_widget) @@ -445,6 +448,10 @@ class InstanceCardWidget(CardWidget): def set_active_toggle_enabled(self, enabled): self._active_checkbox.setEnabled(enabled) + @property + def is_active(self): + return self._active_checkbox.isChecked() + def set_active(self, new_value): """Set instance as active.""" checkbox_value = self._active_checkbox.isChecked() @@ -515,7 +522,7 @@ class InstanceCardWidget(CardWidget): return self.instance["active"] = new_value - self.active_changed.emit() + self.active_changed.emit(self._id, new_value) def _on_expend_clicked(self): self._set_expanded() @@ -584,6 +591,45 @@ class InstanceCardView(AbstractInstanceView): result.setWidth(width) return result + def _toggle_instances(self, value): + if not self._active_toggle_enabled: + return + + widgets = self._get_selected_widgets() + changed = False + for widget in widgets: + if not isinstance(widget, InstanceCardWidget): + continue + + is_active = widget.is_active + if value == -1: + widget.set_active(not is_active) + changed = True + continue + + _value = bool(value) + if is_active is not _value: + widget.set_active(_value) + changed = True + + if changed: + self.active_changed.emit() + + def keyPressEvent(self, event): + if event.key() == QtCore.Qt.Key_Space: + self._toggle_instances(-1) + return True + + elif event.key() == QtCore.Qt.Key_Backspace: + self._toggle_instances(0) + return True + + elif event.key() == QtCore.Qt.Key_Return: + self._toggle_instances(1) + return True + + return super(InstanceCardView, self).keyPressEvent(event) + def _get_selected_widgets(self): output = [] if ( @@ -742,7 +788,15 @@ class InstanceCardView(AbstractInstanceView): for widget in self._widgets_by_group.values(): widget.update_instance_values() - def _on_active_changed(self): + def _on_active_changed(self, group_name, instance_id, value): + group_widget = self._widgets_by_group[group_name] + instance_widget = group_widget.get_widget_by_item_id(instance_id) + if instance_widget.is_selected: + for widget in self._get_selected_widgets(): + if isinstance(widget, InstanceCardWidget): + widget.set_active(value) + else: + self._select_item_clear(instance_id, group_name, instance_widget) self.active_changed.emit() def _on_widget_selection(self, instance_id, group_name, selection_type): diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index db20b21ed7..30980af03d 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -22,6 +22,8 @@ from ..constants import ( CREATOR_IDENTIFIER_ROLE, CREATOR_THUMBNAIL_ENABLED_ROLE, CREATOR_SORT_ROLE, + INPUTS_LAYOUT_HSPACING, + INPUTS_LAYOUT_VSPACING, ) SEPARATORS = ("---separator---", "---") @@ -198,6 +200,8 @@ class CreateWidget(QtWidgets.QWidget): variant_subset_layout = QtWidgets.QFormLayout(variant_subset_widget) variant_subset_layout.setContentsMargins(0, 0, 0, 0) + variant_subset_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + variant_subset_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) variant_subset_layout.addRow("Variant", variant_widget) variant_subset_layout.addRow("Subset", subset_name_input) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index cb5a203130..557e6559c8 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -11,7 +11,7 @@ selection can be enabled disabled using checkbox or keyboard key presses: - Backspace - disable selection ``` -|- Options +|- Context |- [x] | |- [x] | |- [x] @@ -486,6 +486,9 @@ class InstanceListView(AbstractInstanceView): group_widget.set_expanded(expanded) def _on_toggle_request(self, toggle): + if not self._active_toggle_enabled: + return + selected_instance_ids = self._instance_view.get_selected_instance_ids() if toggle == -1: active = None diff --git a/openpype/tools/publisher/widgets/precreate_widget.py b/openpype/tools/publisher/widgets/precreate_widget.py index 3037a0e12d..3bf0bc3657 100644 --- a/openpype/tools/publisher/widgets/precreate_widget.py +++ b/openpype/tools/publisher/widgets/precreate_widget.py @@ -2,6 +2,8 @@ from qtpy import QtWidgets, QtCore from openpype.tools.attribute_defs import create_widget_for_attr_def +from ..constants import INPUTS_LAYOUT_HSPACING, INPUTS_LAYOUT_VSPACING + class PreCreateWidget(QtWidgets.QWidget): def __init__(self, parent): @@ -81,6 +83,8 @@ class AttributesWidget(QtWidgets.QWidget): layout = QtWidgets.QGridLayout(self) layout.setContentsMargins(0, 0, 0, 0) + layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) self._layout = layout @@ -117,8 +121,16 @@ class AttributesWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - if attr_def.label: + if attr_def.is_value_def and attr_def.label: label_widget = QtWidgets.QLabel(attr_def.label, self) + tooltip = attr_def.tooltip + if tooltip: + label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) self._layout.addWidget( label_widget, row, 0, 1, expand_cols ) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d2ce1fbcb2..cd1f1f5a96 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,7 +9,7 @@ import collections from qtpy import QtWidgets, QtCore, QtGui import qtawesome -from openpype.lib.attribute_definitions import UnknownDef, UIDef +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -36,6 +36,8 @@ from .icons import ( from ..constants import ( VARIANT_TOOLTIP, ResetKeySequence, + INPUTS_LAYOUT_HSPACING, + INPUTS_LAYOUT_VSPACING, ) @@ -1098,6 +1100,8 @@ class GlobalAttrsWidget(QtWidgets.QWidget): btns_layout.addWidget(cancel_btn) main_layout = QtWidgets.QFormLayout(self) + main_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + main_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) main_layout.addRow("Variant", variant_input) main_layout.addRow("Asset", asset_value_widget) main_layout.addRow("Task", task_value_widget) @@ -1346,6 +1350,8 @@ class CreatorAttrsWidget(QtWidgets.QWidget): content_layout.setColumnStretch(0, 0) content_layout.setColumnStretch(1, 1) content_layout.setAlignment(QtCore.Qt.AlignTop) + content_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + content_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) row = 0 for attr_def, attr_instances, values in result: @@ -1371,9 +1377,19 @@ class CreatorAttrsWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - label = attr_def.label or attr_def.key + label = None + if attr_def.is_value_def: + label = attr_def.label or attr_def.key if label: label_widget = QtWidgets.QLabel(label, self) + tooltip = attr_def.tooltip + if tooltip: + label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) content_layout.addWidget( label_widget, row, 0, 1, expand_cols ) @@ -1474,6 +1490,8 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): attr_def_layout = QtWidgets.QGridLayout(attr_def_widget) attr_def_layout.setColumnStretch(0, 0) attr_def_layout.setColumnStretch(1, 1) + attr_def_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + attr_def_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) content_layout = QtWidgets.QVBoxLayout(content_widget) content_layout.addWidget(attr_def_widget, 0) @@ -1501,12 +1519,19 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): expand_cols = 1 col_num = 2 - expand_cols - label = attr_def.label or attr_def.key + label = None + if attr_def.is_value_def: + label = attr_def.label or attr_def.key if label: label_widget = QtWidgets.QLabel(label, content_widget) tooltip = attr_def.tooltip if tooltip: label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) attr_def_layout.addWidget( label_widget, row, 0, 1, expand_cols ) @@ -1517,7 +1542,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): ) row += 1 - if isinstance(attr_def, UIDef): + if not attr_def.is_value_def: continue widget.value_changed.connect(self._input_value_changed) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index e94979142a..b3471163ae 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -46,6 +46,8 @@ class PublisherWindow(QtWidgets.QDialog): def __init__(self, parent=None, controller=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) + self.setObjectName("PublishWindow") + self.setWindowTitle("OpenPype publisher") icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) @@ -440,15 +442,24 @@ class PublisherWindow(QtWidgets.QDialog): event.accept() return - if event.matches(QtGui.QKeySequence.Save): + save_match = event.matches(QtGui.QKeySequence.Save) + if save_match == QtGui.QKeySequence.ExactMatch: if not self._controller.publish_has_started: self._save_changes(True) event.accept() return - if ResetKeySequence.matches( - QtGui.QKeySequence(event.key() | event.modifiers()) - ): + # PySide6 Support + if hasattr(event, "keyCombination"): + reset_match_result = ResetKeySequence.matches( + QtGui.QKeySequence(event.keyCombination()) + ) + else: + reset_match_result = ResetKeySequence.matches( + QtGui.QKeySequence(event.modifiers() | event.key()) + ) + + if reset_match_result == QtGui.QKeySequence.ExactMatch: if not self.controller.publish_is_running: self.reset() event.accept() diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index 4292e2d726..4149763f80 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -1,6 +1,7 @@ from .widgets import ( FocusSpinBox, FocusDoubleSpinBox, + ComboBox, CustomTextComboBox, PlaceholderLineEdit, BaseClickableFrame, @@ -38,6 +39,7 @@ from .overlay_messages import ( __all__ = ( "FocusSpinBox", "FocusDoubleSpinBox", + "ComboBox", "CustomTextComboBox", "PlaceholderLineEdit", "BaseClickableFrame", diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index b416c56797..bae89aeb09 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -41,7 +41,28 @@ class FocusDoubleSpinBox(QtWidgets.QDoubleSpinBox): super(FocusDoubleSpinBox, self).wheelEvent(event) -class CustomTextComboBox(QtWidgets.QComboBox): +class ComboBox(QtWidgets.QComboBox): + """Base of combobox with pre-implement changes used in tools. + + Combobox is using styled delegate by default so stylesheets are propagated. + + Items are not changed on scroll until the combobox is in focus. + """ + + def __init__(self, *args, **kwargs): + super(ComboBox, self).__init__(*args, **kwargs) + delegate = QtWidgets.QStyledItemDelegate() + self.setItemDelegate(delegate) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self._delegate = delegate + + def wheelEvent(self, event): + if self.hasFocus(): + return super(ComboBox, self).wheelEvent(event) + + +class CustomTextComboBox(ComboBox): """Combobox which can have different text showed.""" def __init__(self, *args, **kwargs): @@ -253,6 +274,9 @@ class PixmapLabel(QtWidgets.QLabel): self._empty_pixmap = QtGui.QPixmap(0, 0) self._source_pixmap = pixmap + self._last_width = 0 + self._last_height = 0 + def set_source_pixmap(self, pixmap): """Change source image.""" self._source_pixmap = pixmap @@ -263,6 +287,12 @@ class PixmapLabel(QtWidgets.QLabel): size += size % 2 return size, size + def minimumSizeHint(self): + width, height = self._get_pix_size() + if width != self._last_width or height != self._last_height: + self._set_resized_pix() + return QtCore.QSize(width, height) + def _set_resized_pix(self): if self._source_pixmap is None: self.setPixmap(self._empty_pixmap) @@ -276,6 +306,8 @@ class PixmapLabel(QtWidgets.QLabel): QtCore.Qt.SmoothTransformation ) ) + self._last_width = width + self._last_height = height def resizeEvent(self, event): self._set_resized_pix() From a724bd1c77ca9ded191967648e96c2adda8619ea Mon Sep 17 00:00:00 2001 From: Ynbot Date: Wed, 26 Apr 2023 03:25:35 +0000 Subject: [PATCH 15/25] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 02537af762..080fd6eece 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.5" +__version__ = "3.15.6-nightly.1" From 61c37ebb2263af58666b314186652636186f3896 Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Tue, 25 Apr 2023 15:54:25 +0200 Subject: [PATCH 16/25] add display handle setting for maya load references --- openpype/hosts/maya/plugins/load/load_reference.py | 9 ++++++--- openpype/settings/defaults/project_settings/maya.json | 3 ++- .../projects_schema/schemas/schema_maya_load.json | 8 ++++++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 0dbdb03bb7..3309d7c207 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -162,9 +162,12 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with parent_nodes(roots, parent=None): cmds.xform(group_name, zeroTransformPivots=True) - cmds.setAttr("{}.displayHandle".format(group_name), 1) - settings = get_project_settings(os.environ['AVALON_PROJECT']) + + display_handle = settings['maya']['load'].get('reference_loader', {}).get( + 'display_handle', True) + cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + colors = settings['maya']['load']['colors'] c = colors.get(family) if c is not None: @@ -174,7 +177,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[1]) / 255), (float(c[2]) / 255)) - cmds.setAttr("{}.displayHandle".format(group_name), 1) + cmds.setAttr("{}.displayHandle".format(group_name), display_handle) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 12223216cd..72b330ce7a 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1460,7 +1460,8 @@ }, "reference_loader": { "namespace": "{asset_name}_{subset}_##_", - "group_name": "_GRP" + "group_name": "_GRP", + "display_handle": true } }, "workfile_build": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json index c1895c4824..4b6b97ab4e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json @@ -111,6 +111,14 @@ { "type": "label", "label": "Here's a link to the doc where you can find explanations about customing the naming of referenced assets: https://openpype.io/docs/admin_hosts_maya#load-plugins" + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "display_handle", + "label": "Display Handle On Load References" } ] } From 0d4fb1d8162f5647f53abc6d66419bd5f7cce5ba Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Wed, 26 Apr 2023 10:06:00 +0200 Subject: [PATCH 17/25] linting clean up --- openpype/hosts/maya/plugins/load/load_reference.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 3309d7c207..86c2a92a07 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -164,9 +164,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): settings = get_project_settings(os.environ['AVALON_PROJECT']) - display_handle = settings['maya']['load'].get('reference_loader', {}).get( - 'display_handle', True) - cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + display_handle = settings['maya']['load'].get( + 'reference_loader', {}).get('display_handle', True) + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -177,7 +178,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[1]) / 255), (float(c[2]) / 255)) - cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space From 37ea36b811d427f1c31563967789837c26b96cd6 Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Wed, 26 Apr 2023 10:38:00 +0200 Subject: [PATCH 18/25] cosmetiques --- openpype/hosts/maya/plugins/load/load_reference.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 86c2a92a07..7d717dcd44 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -165,9 +165,11 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): settings = get_project_settings(os.environ['AVALON_PROJECT']) display_handle = settings['maya']['load'].get( - 'reference_loader', {}).get('display_handle', True) + 'reference_loader', {} + ).get('display_handle', True) cmds.setAttr( - "{}.displayHandle".format(group_name), display_handle) + "{}.displayHandle".format(group_name), display_handle + ) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -179,7 +181,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[2]) / 255)) cmds.setAttr( - "{}.displayHandle".format(group_name), display_handle) + "{}.displayHandle".format(group_name), display_handle + ) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space From 4107874eb999a6a5dfb7bf00c209b365b71bd796 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 27 Apr 2023 23:41:00 +0200 Subject: [PATCH 19/25] Project packager: Backup and restore can store only database (#4879) * added helper functions to client mongo api * pack and unpack project functions can work without project files * added flag argument to pack project command to zip only project files * unpack project has also only project argument * Fix extractions --- openpype/cli.py | 15 +- openpype/client/mongo.py | 223 +++++++++++++++++++++++++- openpype/lib/project_backpack.py | 267 +++++++++++++++++++------------ openpype/pype_commands.py | 8 +- 4 files changed, 394 insertions(+), 119 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index a650a9fdcc..54af42920d 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -415,11 +415,12 @@ def repack_version(directory): @main.command() @click.option("--project", help="Project name") @click.option( - "--dirpath", help="Directory where package is stored", default=None -) -def pack_project(project, dirpath): + "--dirpath", help="Directory where package is stored", default=None) +@click.option( + "--dbonly", help="Store only Database data", default=False, is_flag=True) +def pack_project(project, dirpath, dbonly): """Create a package of project with all files and database dump.""" - PypeCommands().pack_project(project, dirpath) + PypeCommands().pack_project(project, dirpath, dbonly) @main.command() @@ -427,9 +428,11 @@ def pack_project(project, dirpath): @click.option( "--root", help="Replace root which was stored in project", default=None ) -def unpack_project(zipfile, root): +@click.option( + "--dbonly", help="Store only Database data", default=False, is_flag=True) +def unpack_project(zipfile, root, dbonly): """Create a package of project with all files and database dump.""" - PypeCommands().unpack_project(zipfile, root) + PypeCommands().unpack_project(zipfile, root, dbonly) @main.command() diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py index 72acbc5476..251041c028 100644 --- a/openpype/client/mongo.py +++ b/openpype/client/mongo.py @@ -5,6 +5,12 @@ import logging import pymongo import certifi +from bson.json_util import ( + loads, + dumps, + CANONICAL_JSON_OPTIONS +) + if sys.version_info[0] == 2: from urlparse import urlparse, parse_qs else: @@ -15,6 +21,49 @@ class MongoEnvNotSet(Exception): pass +def documents_to_json(docs): + """Convert documents to json string. + + Args: + Union[list[dict[str, Any]], dict[str, Any]]: Document/s to convert to + json string. + + Returns: + str: Json string with mongo documents. + """ + + return dumps(docs, json_options=CANONICAL_JSON_OPTIONS) + + +def load_json_file(filepath): + """Load mongo documents from a json file. + + Args: + filepath (str): Path to a json file. + + Returns: + Union[dict[str, Any], list[dict[str, Any]]]: Loaded content from a + json file. + """ + + if not os.path.exists(filepath): + raise ValueError("Path {} was not found".format(filepath)) + + with open(filepath, "r") as stream: + content = stream.read() + return loads("".join(content)) + + +def get_project_database_name(): + """Name of database name where projects are available. + + Returns: + str: Name of database name where projects are. + """ + + return os.environ.get("AVALON_DB") or "avalon" + + def _decompose_url(url): """Decompose mongo url to basic components. @@ -210,12 +259,102 @@ class OpenPypeMongoConnection: return mongo_client -def get_project_database(): - db_name = os.environ.get("AVALON_DB") or "avalon" - return OpenPypeMongoConnection.get_mongo_client()[db_name] +# ------ Helper Mongo functions ------ +# Functions can be helpful with custom tools to backup/restore mongo state. +# Not meant as API functionality that should be used in production codebase! +def get_collection_documents(database_name, collection_name, as_json=False): + """Query all documents from a collection. + + Args: + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where to look for collection. + as_json (Optional[bool]): Output should be a json string. + Default: 'False' + + Returns: + Union[list[dict[str, Any]], str]: Queried documents. + """ + + client = OpenPypeMongoConnection.get_mongo_client() + output = list(client[database_name][collection_name].find({})) + if as_json: + output = documents_to_json(output) + return output -def get_project_connection(project_name): +def store_collection(filepath, database_name, collection_name): + """Store collection documents to a json file. + + Args: + filepath (str): Path to a json file where documents will be stored. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection to store. + """ + + # Make sure directory for output file exists + dirpath = os.path.dirname(filepath) + if not os.path.isdir(dirpath): + os.makedirs(dirpath) + + content = get_collection_documents(database_name, collection_name, True) + with open(filepath, "w") as stream: + stream.write(content) + + +def replace_collection_documents(docs, database_name, collection_name): + """Replace all documents in a collection with passed documents. + + Warnings: + All existing documents in collection will be removed if there are any. + + Args: + docs (list[dict[str, Any]]): New documents. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where new documents are + uploaded. + """ + + client = OpenPypeMongoConnection.get_mongo_client() + database = client[database_name] + if collection_name in database.list_collection_names(): + database.drop_collection(collection_name) + col = database[collection_name] + col.insert_many(docs) + + +def restore_collection(filepath, database_name, collection_name): + """Restore/replace collection from a json filepath. + + Warnings: + All existing documents in collection will be removed if there are any. + + Args: + filepath (str): Path to a json with documents. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where new documents are + uploaded. + """ + + docs = load_json_file(filepath) + replace_collection_documents(docs, database_name, collection_name) + + +def get_project_database(database_name=None): + """Database object where project collections are. + + Args: + database_name (Optional[str]): Custom name of database. + + Returns: + pymongo.database.Database: Collection related to passed project. + """ + + if not database_name: + database_name = get_project_database_name() + return OpenPypeMongoConnection.get_mongo_client()[database_name] + + +def get_project_connection(project_name, database_name=None): """Direct access to mongo collection. We're trying to avoid using direct access to mongo. This should be used @@ -223,13 +362,83 @@ def get_project_connection(project_name): api calls for that. Args: - project_name(str): Project name for which collection should be + project_name (str): Project name for which collection should be returned. + database_name (Optional[str]): Custom name of database. Returns: - pymongo.Collection: Collection realated to passed project. + pymongo.collection.Collection: Collection related to passed project. """ if not project_name: raise ValueError("Invalid project name {}".format(str(project_name))) - return get_project_database()[project_name] + return get_project_database(database_name)[project_name] + + +def get_project_documents(project_name, database_name=None): + """Query all documents from project collection. + + Args: + project_name (str): Name of project. + database_name (Optional[str]): Name of mongo database where to look for + project. + + Returns: + list[dict[str, Any]]: Documents in project collection. + """ + + if not database_name: + database_name = get_project_database_name() + return get_collection_documents(database_name, project_name) + + +def store_project_documents(project_name, filepath, database_name=None): + """Store project documents to a file as json string. + + Args: + project_name (str): Name of project to store. + filepath (str): Path to a json file where output will be stored. + database_name (Optional[str]): Name of mongo database where to look for + project. + """ + + if not database_name: + database_name = get_project_database_name() + + store_collection(filepath, database_name, project_name) + + +def replace_project_documents(project_name, docs, database_name=None): + """Replace documents in mongo with passed documents. + + Warnings: + Existing project collection is removed if exists in mongo. + + Args: + project_name (str): Name of project. + docs (list[dict[str, Any]]): Documents to restore. + database_name (Optional[str]): Name of mongo database where project + collection will be created. + """ + + if not database_name: + database_name = get_project_database_name() + replace_collection_documents(docs, database_name, project_name) + + +def restore_project_documents(project_name, filepath, database_name=None): + """Replace documents in mongo with passed documents. + + Warnings: + Existing project collection is removed if exists in mongo. + + Args: + project_name (str): Name of project. + filepath (str): File to json file with project documents. + database_name (Optional[str]): Name of mongo database where project + collection will be created. + """ + + if not database_name: + database_name = get_project_database_name() + restore_collection(filepath, database_name, project_name) diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index ff2f1d4b88..07107ec011 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -1,16 +1,19 @@ -"""These lib functions are primarily for development purposes. +"""These lib functions are for development purposes. -WARNING: This is not meant for production data. +WARNING: + This is not meant for production data. Please don't write code which is + dependent on functionality here. -Goal is to be able create package of current state of project with related -documents from mongo and files from disk to zip file and then be able recreate -the project based on the zip. +Goal is to be able to create package of current state of project with related +documents from mongo and files from disk to zip file and then be able +to recreate the project based on the zip. This gives ability to create project where a changes and tests can be done. -Keep in mind that to be able create a package of project has few requirements. -Possible requirement should be listed in 'pack_project' function. +Keep in mind that to be able to create a package of project has few +requirements. Possible requirement should be listed in 'pack_project' function. """ + import os import json import platform @@ -19,16 +22,12 @@ import shutil import datetime import zipfile -from bson.json_util import ( - loads, - dumps, - CANONICAL_JSON_OPTIONS +from openpype.client.mongo import ( + load_json_file, + get_project_connection, + replace_project_documents, + store_project_documents, ) -from openpype.client import ( - get_project, - get_whole_project, -) -from openpype.pipeline import AvalonMongoDB DOCUMENTS_FILE_NAME = "database" METADATA_FILE_NAME = "metadata" @@ -43,7 +42,52 @@ def add_timestamp(filepath): return new_base + ext -def pack_project(project_name, destination_dir=None): +def get_project_document(project_name, database_name=None): + """Query project document. + + Function 'get_project' from client api cannot be used as it does not allow + to change which 'database_name' is used. + + Args: + project_name (str): Name of project. + database_name (Optional[str]): Name of mongo database where to look for + project. + + Returns: + Union[dict[str, Any], None]: Project document or None. + """ + + col = get_project_connection(project_name, database_name) + return col.find_one({"type": "project"}) + + +def _pack_files_to_zip(zip_stream, source_path, root_path): + """Pack files to a zip stream. + + Args: + zip_stream (zipfile.ZipFile): Stream to a zipfile. + source_path (str): Path to a directory where files are. + root_path (str): Path to a directory which is used for calculation + of relative path. + """ + + for root, _, filenames in os.walk(source_path): + for filename in filenames: + filepath = os.path.join(root, filename) + # TODO add one more folder + archive_name = os.path.join( + PROJECT_FILES_DIR, + os.path.relpath(filepath, root_path) + ) + zip_stream.write(filepath, archive_name) + + +def pack_project( + project_name, + destination_dir=None, + only_documents=False, + database_name=None +): """Make a package of a project with mongo documents and files. This function has few restrictions: @@ -52,13 +96,18 @@ def pack_project(project_name, destination_dir=None): "{root[...]}/{project[name]}" Args: - project_name(str): Project that should be packaged. - destination_dir(str): Optional path where zip will be stored. Project's - root is used if not passed. + project_name (str): Project that should be packaged. + destination_dir (Optional[str]): Optional path where zip will be + stored. Project's root is used if not passed. + only_documents (Optional[bool]): Pack only Mongo documents and skip + files. + database_name (Optional[str]): Custom database name from which is + project queried. """ + print("Creating package of project \"{}\"".format(project_name)) # Validate existence of project - project_doc = get_project(project_name) + project_doc = get_project_document(project_name, database_name) if not project_doc: raise ValueError("Project \"{}\" was not found in database".format( project_name @@ -119,12 +168,7 @@ def pack_project(project_name, destination_dir=None): temp_docs_json = s.name # Query all project documents and store them to temp json - docs = list(get_whole_project(project_name)) - data = dumps( - docs, json_options=CANONICAL_JSON_OPTIONS - ) - with open(temp_docs_json, "w") as stream: - stream.write(data) + store_project_documents(project_name, temp_docs_json, database_name) print("Packing files into zip") # Write all to zip file @@ -133,16 +177,10 @@ def pack_project(project_name, destination_dir=None): zip_stream.write(temp_metadata_json, METADATA_FILE_NAME + ".json") # Add database documents zip_stream.write(temp_docs_json, DOCUMENTS_FILE_NAME + ".json") + # Add project files to zip - for root, _, filenames in os.walk(project_source_path): - for filename in filenames: - filepath = os.path.join(root, filename) - # TODO add one more folder - archive_name = os.path.join( - PROJECT_FILES_DIR, - os.path.relpath(filepath, root_path) - ) - zip_stream.write(filepath, archive_name) + if not only_documents: + _pack_files_to_zip(zip_stream, project_source_path, root_path) print("Cleaning up") # Cleanup @@ -152,80 +190,30 @@ def pack_project(project_name, destination_dir=None): print("*** Packing finished ***") -def unpack_project(path_to_zip, new_root=None): - """Unpack project zip file to recreate project. +def _unpack_project_files(unzip_dir, root_path, project_name): + """Move project files from unarchived temp folder to new root. + + Unpack is skipped if source files are not available in the zip. That can + happen if nothing was published yet or only documents were stored to + package. Args: - path_to_zip(str): Path to zip which was created using 'pack_project' - function. - new_root(str): Optional way how to set different root path for unpacked - project. + unzip_dir (str): Location where zip was unzipped. + root_path (str): Path to new root. + project_name (str): Name of project. """ - print("Unpacking project from zip {}".format(path_to_zip)) - if not os.path.exists(path_to_zip): - print("Zip file does not exists: {}".format(path_to_zip)) + + src_project_files_dir = os.path.join( + unzip_dir, PROJECT_FILES_DIR, project_name + ) + # Skip if files are not in the zip + if not os.path.exists(src_project_files_dir): return - tmp_dir = tempfile.mkdtemp(prefix="unpack_") - print("Zip is extracted to temp: {}".format(tmp_dir)) - with zipfile.ZipFile(path_to_zip, "r") as zip_stream: - zip_stream.extractall(tmp_dir) - - metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json") - with open(metadata_json_path, "r") as stream: - metadata = json.load(stream) - - docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json") - with open(docs_json_path, "r") as stream: - content = stream.readlines() - docs = loads("".join(content)) - - low_platform = platform.system().lower() - project_name = metadata["project_name"] - source_root = metadata["root"] - root_path = source_root[low_platform] - - # Drop existing collection - dbcon = AvalonMongoDB() - database = dbcon.database - if project_name in database.list_collection_names(): - database.drop_collection(project_name) - print("Removed existing project collection") - - print("Creating project documents ({})".format(len(docs))) - # Create new collection with loaded docs - collection = database[project_name] - collection.insert_many(docs) - - # Skip change of root if is the same as the one stored in metadata - if ( - new_root - and (os.path.normpath(new_root) == os.path.normpath(root_path)) - ): - new_root = None - - if new_root: - print("Using different root path {}".format(new_root)) - root_path = new_root - - project_doc = get_project(project_name) - roots = project_doc["config"]["roots"] - key = tuple(roots.keys())[0] - update_key = "config.roots.{}.{}".format(key, low_platform) - collection.update_one( - {"_id": project_doc["_id"]}, - {"$set": { - update_key: new_root - }} - ) - # Make sure root path exists if not os.path.exists(root_path): os.makedirs(root_path) - src_project_files_dir = os.path.join( - tmp_dir, PROJECT_FILES_DIR, project_name - ) dst_project_files_dir = os.path.normpath( os.path.join(root_path, project_name) ) @@ -241,8 +229,83 @@ def unpack_project(path_to_zip, new_root=None): )) shutil.move(src_project_files_dir, dst_project_files_dir) + +def unpack_project( + path_to_zip, new_root=None, database_only=None, database_name=None +): + """Unpack project zip file to recreate project. + + Args: + path_to_zip (str): Path to zip which was created using 'pack_project' + function. + new_root (str): Optional way how to set different root path for + unpacked project. + database_only (Optional[bool]): Unpack only database from zip. + database_name (str): Name of database where project will be recreated. + """ + + if database_only is None: + database_only = False + + print("Unpacking project from zip {}".format(path_to_zip)) + if not os.path.exists(path_to_zip): + print("Zip file does not exists: {}".format(path_to_zip)) + return + + tmp_dir = tempfile.mkdtemp(prefix="unpack_") + print("Zip is extracted to temp: {}".format(tmp_dir)) + with zipfile.ZipFile(path_to_zip, "r") as zip_stream: + if database_only: + for filename in ( + "{}.json".format(METADATA_FILE_NAME), + "{}.json".format(DOCUMENTS_FILE_NAME), + ): + zip_stream.extract(filename, tmp_dir) + else: + zip_stream.extractall(tmp_dir) + + metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json") + with open(metadata_json_path, "r") as stream: + metadata = json.load(stream) + + docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json") + docs = load_json_file(docs_json_path) + + low_platform = platform.system().lower() + project_name = metadata["project_name"] + source_root = metadata["root"] + root_path = source_root[low_platform] + + # Drop existing collection + replace_project_documents(project_name, docs, database_name) + print("Creating project documents ({})".format(len(docs))) + + # Skip change of root if is the same as the one stored in metadata + if ( + new_root + and (os.path.normpath(new_root) == os.path.normpath(root_path)) + ): + new_root = None + + if new_root: + print("Using different root path {}".format(new_root)) + root_path = new_root + + project_doc = get_project_document(project_name) + roots = project_doc["config"]["roots"] + key = tuple(roots.keys())[0] + update_key = "config.roots.{}.{}".format(key, low_platform) + collection = get_project_connection(project_name, database_name) + collection.update_one( + {"_id": project_doc["_id"]}, + {"$set": { + update_key: new_root + }} + ) + + _unpack_project_files(tmp_dir, root_path, project_name) + # CLeanup print("Cleaning up") shutil.rmtree(tmp_dir) - dbcon.uninstall() print("*** Unpack finished ***") diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index dc5b3d63c3..6a24cb0ebc 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -353,12 +353,12 @@ class PypeCommands: version_packer = VersionRepacker(directory) version_packer.process() - def pack_project(self, project_name, dirpath): + def pack_project(self, project_name, dirpath, database_only): from openpype.lib.project_backpack import pack_project - pack_project(project_name, dirpath) + pack_project(project_name, dirpath, database_only) - def unpack_project(self, zip_filepath, new_root): + def unpack_project(self, zip_filepath, new_root, database_only): from openpype.lib.project_backpack import unpack_project - unpack_project(zip_filepath, new_root) + unpack_project(zip_filepath, new_root, database_only) From 37d7a87fd116b2f3351df6ac42500ea696b427e6 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Sat, 29 Apr 2023 03:25:06 +0000 Subject: [PATCH 20/25] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 080fd6eece..72297a4430 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.6-nightly.1" +__version__ = "3.15.6-nightly.2" From 3e2559c0c2797c8c3dba717ac9594cd22499b80b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 29 Apr 2023 16:32:52 +0100 Subject: [PATCH 21/25] Fix repair and validation --- openpype/hosts/maya/plugins/publish/validate_attributes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_attributes.py b/openpype/hosts/maya/plugins/publish/validate_attributes.py index 6ca9afb9a4..7ebd9d7d03 100644 --- a/openpype/hosts/maya/plugins/publish/validate_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_attributes.py @@ -6,7 +6,7 @@ import pyblish.api from openpype.hosts.maya.api.lib import set_attribute from openpype.pipeline.publish import ( - RepairContextAction, + RepairAction, ValidateContentsOrder, ) @@ -26,7 +26,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin): order = ValidateContentsOrder label = "Attributes" hosts = ["maya"] - actions = [RepairContextAction] + actions = [RepairAction] optional = True attributes = None @@ -81,7 +81,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin): if node_name not in attributes: continue - for attr_name, expected in attributes.items(): + for attr_name, expected in attributes[node_name].items(): # Skip if attribute does not exist if not cmds.attributeQuery(attr_name, node=node, exists=True): From b8ce6e9e9c10383c7e7e0c36fba7bb603a5d9ee7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 May 2023 11:19:50 +0200 Subject: [PATCH 22/25] Photoshop: add autocreators for review and flat image (#4871) * OP-5656 - added auto creator for review in PS Review instance should be togglable. Review instance needs to be created for non publisher based workflows. * OP-5656 - refactored names * OP-5656 - refactored names * OP-5656 - new auto creator for flat image In old version flat image was created if no instances were created. Explicit auto creator added for clarity. Standardization of state of plugins * OP-5656 - updated according to auto image creator Subset template should be used from autocreator and not be separate. * OP-5656 - fix proper creator name * OP-5656 - fix log message * OP-5656 - fix use enable state * OP-5656 - fix formatting * OP-5656 - add review toggle to image instance For special cases where each image should have separate review. * OP-5656 - fix description * OP-5656 - fix not present asset and task in instance context * OP-5656 - refactor - both auto creators should use same class Provided separate description. * OP-5656 - fix - propagate review to families Image and auto image could have now review flag. Bottom logic is only for Webpublisher. * OP-5656 - fix - rename review files to avaid collision Image family produces jpg and png, jpg review would clash with name. It should be replaced by 'jpg_jpg'. * OP-5656 - fix - limit additional auto created only on WP In artist based publishing auto image would be created by auto creator (if enabled). Artist might want to disable image creation. * OP-5656 - added mark_for_review flag to Publish tab * OP-5656 - fixes for auto creator * OP-5656 - fixe - outputDef not needed outputDef should contain dict of output definition. In PS it doesn't make sense as it has separate extract_review without output definitions. * OP-5656 - added persistency of changes to auto creators Changes as enabling/disabling, changing review flag should persist. * OP-5656 - added documentation for admins * OP-5656 - added link to new documentation for admins * OP-5656 - Hound * OP-5656 - Hound * OP-5656 - fix shared families list * OP-5656 - added default variant for review and workfile creator For workfile Main was default variant, "" was for review. * OP-5656 - fix - use values from Settings * OP-5656 - fix - use original name of review for main review family outputName cannot be in repre or file would have ..._jpg.jpg * OP-5656 - refactor - standardized settings Active by default denotes if created instance is active (eg. publishable) when created. * OP-5656 - fixes for skipping collecting auto_image data["ids"] are necessary for extracting. Members are physical layers in image, ids are "virtual" items, won't get grouped into real image instance. * OP-5656 - reworked auto collectors This allows to use automatic test for proper testing. * OP-5656 - added automatic tests * OP-5656 - fixes for auto collectors * OP-5656 - removed unnecessary collector Logic moved to auto collectors. * OP-5656 - Hound --- .../create/workfile_creator.py => lib.py} | 23 +-- .../plugins/create/create_flatten_image.py | 120 ++++++++++++++ .../photoshop/plugins/create/create_image.py | 47 +++++- .../photoshop/plugins/create/create_review.py | 28 ++++ .../plugins/create/create_workfile.py | 28 ++++ .../plugins/publish/collect_auto_image.py | 101 ++++++++++++ .../plugins/publish/collect_auto_review.py | 92 +++++++++++ .../plugins/publish/collect_auto_workfile.py | 99 ++++++++++++ .../plugins/publish/collect_instances.py | 116 -------------- .../plugins/publish/collect_review.py | 32 +--- .../plugins/publish/collect_workfile.py | 57 ++----- .../plugins/publish/extract_review.py | 34 ++-- .../defaults/project_settings/photoshop.json | 29 +++- .../schema_project_photoshop.json | 151 +++++++++++++++--- .../test_publish_in_photoshop_auto_image.py | 93 +++++++++++ .../test_publish_in_photoshop_review.py | 111 +++++++++++++ website/docs/admin_hosts_photoshop.md | 127 +++++++++++++++ .../assets/admin_hosts_photoshop_settings.png | Bin 0 -> 14364 bytes website/sidebars.js | 1 + 19 files changed, 1044 insertions(+), 245 deletions(-) rename openpype/hosts/photoshop/{plugins/create/workfile_creator.py => lib.py} (83%) create mode 100644 openpype/hosts/photoshop/plugins/create/create_flatten_image.py create mode 100644 openpype/hosts/photoshop/plugins/create/create_review.py create mode 100644 openpype/hosts/photoshop/plugins/create/create_workfile.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_image.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_review.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py delete mode 100644 openpype/hosts/photoshop/plugins/publish/collect_instances.py create mode 100644 tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py create mode 100644 tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py create mode 100644 website/docs/admin_hosts_photoshop.md create mode 100644 website/docs/assets/admin_hosts_photoshop_settings.png diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/lib.py similarity index 83% rename from openpype/hosts/photoshop/plugins/create/workfile_creator.py rename to openpype/hosts/photoshop/lib.py index f5d56adcbc..ae7a33b7b6 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/lib.py @@ -7,28 +7,26 @@ from openpype.pipeline import ( from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances -class PSWorkfileCreator(AutoCreator): - identifier = "workfile" - family = "workfile" - - default_variant = "Main" - +class PSAutoCreator(AutoCreator): + """Generic autocreator to extend.""" def get_instance_attr_defs(self): return [] def collect_instances(self): for instance_data in cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: - subset_name = instance_data["subset"] - instance = CreatedInstance( - self.family, subset_name, instance_data, self + instance = CreatedInstance.from_existing( + instance_data, self ) self._add_instance_to_context(instance) def update_instances(self, update_list): - # nothing to change on workfiles - pass + self.log.debug("update_list:: {}".format(update_list)) + for created_inst, _changes in update_list: + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def create(self, options=None): existing_instance = None @@ -58,6 +56,9 @@ class PSWorkfileCreator(AutoCreator): project_name, host_name, None )) + if not self.active_on_create: + data["active"] = False + new_instance = CreatedInstance( self.family, subset_name, data, self ) diff --git a/openpype/hosts/photoshop/plugins/create/create_flatten_image.py b/openpype/hosts/photoshop/plugins/create/create_flatten_image.py new file mode 100644 index 0000000000..3bc61c8184 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_flatten_image.py @@ -0,0 +1,120 @@ +from openpype.pipeline import CreatedInstance + +from openpype.lib import BoolDef +import openpype.hosts.photoshop.api as api +from openpype.hosts.photoshop.lib import PSAutoCreator +from openpype.pipeline.create import get_subset_name +from openpype.client import get_asset_by_name + + +class AutoImageCreator(PSAutoCreator): + """Creates flatten image from all visible layers. + + Used in simplified publishing as auto created instance. + Must be enabled in Setting and template for subset name provided + """ + identifier = "auto_image" + family = "image" + + # Settings + default_variant = "" + # - Mark by default instance for review + mark_for_review = True + active_on_create = True + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.creator_identifier == self.identifier: + existing_instance = instance + break + + context = self.create_context + project_name = context.get_current_project_name() + asset_name = context.get_current_asset_name() + task_name = context.get_current_task_name() + host_name = context.host_name + asset_doc = get_asset_by_name(project_name, asset_name) + + if existing_instance is None: + subset_name = get_subset_name( + self.family, self.default_variant, task_name, asset_doc, + project_name, host_name + ) + + publishable_ids = [layer.id for layer in api.stub().get_layers() + if layer.visible] + data = { + "asset": asset_name, + "task": task_name, + # ids are "virtual" layers, won't get grouped as 'members' do + # same difference in color coded layers in WP + "ids": publishable_ids + } + + if not self.active_on_create: + data["active"] = False + + creator_attributes = {"mark_for_review": self.mark_for_review} + data.update({"creator_attributes": creator_attributes}) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + api.stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( # existing instance from different context + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + subset_name = get_subset_name( + self.family, self.default_variant, task_name, asset_doc, + project_name, host_name + ) + + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name + existing_instance["subset"] = subset_name + + api.stub().imprint(existing_instance.get("instance_id"), + existing_instance.data_to_store()) + + def get_pre_create_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review", + default=self.mark_for_review + ) + ] + + def get_instance_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review" + ) + ] + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["AutoImageCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.default_variant = plugin_settings["default_variant"] + self.mark_for_review = plugin_settings["mark_for_review"] + self.enabled = plugin_settings["enabled"] + + def get_detail_description(self): + return """Creator for flatten image. + + Studio might configure simple publishing workflow. In that case + `image` instance is automatically created which will publish flat + image from all visible layers. + + Artist might disable this instance from publishing or from creating + review for it though. + """ diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 3d82d6b6f0..f3165fca57 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -23,6 +23,11 @@ class ImageCreator(Creator): family = "image" description = "Image creator" + # Settings + default_variants = "" + mark_for_review = False + active_on_create = True + def create(self, subset_name_from_ui, data, pre_create_data): groups_to_create = [] top_layers_to_wrap = [] @@ -94,6 +99,12 @@ class ImageCreator(Creator): data.update({"layer_name": layer_name}) data.update({"long_name": "_".join(layer_names_in_hierarchy)}) + creator_attributes = {"mark_for_review": self.mark_for_review} + data.update({"creator_attributes": creator_attributes}) + + if not self.active_on_create: + data["active"] = False + new_instance = CreatedInstance(self.family, subset_name, data, self) @@ -134,11 +145,6 @@ class ImageCreator(Creator): self.host.remove_instance(instance) self._remove_instance_from_context(instance) - def get_default_variants(self): - return [ - "Main" - ] - def get_pre_create_attr_defs(self): output = [ BoolDef("use_selection", default=True, @@ -148,10 +154,34 @@ class ImageCreator(Creator): label="Create separate instance for each selected"), BoolDef("use_layer_name", default=False, - label="Use layer name in subset") + label="Use layer name in subset"), + BoolDef( + "mark_for_review", + label="Create separate review", + default=False + ) ] return output + def get_instance_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review" + ) + ] + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["ImageCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.default_variants = plugin_settings["default_variants"] + self.mark_for_review = plugin_settings["mark_for_review"] + self.enabled = plugin_settings["enabled"] + + def get_detail_description(self): return """Creator for Image instances @@ -180,6 +210,11 @@ class ImageCreator(Creator): but layer name should be used (set explicitly in UI or implicitly if multiple images should be created), it is added in capitalized form as a suffix to subset name. + + Each image could have its separate review created if necessary via + `Create separate review` toggle. + But more use case is to use separate `review` instance to create review + from all published items. """ def _handle_legacy(self, instance_data): diff --git a/openpype/hosts/photoshop/plugins/create/create_review.py b/openpype/hosts/photoshop/plugins/create/create_review.py new file mode 100644 index 0000000000..064485d465 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_review.py @@ -0,0 +1,28 @@ +from openpype.hosts.photoshop.lib import PSAutoCreator + + +class ReviewCreator(PSAutoCreator): + """Creates review instance which might be disabled from publishing.""" + identifier = "review" + family = "review" + + default_variant = "Main" + + def get_detail_description(self): + return """Auto creator for review. + + Photoshop review is created from all published images or from all + visible layers if no `image` instances got created. + + Review might be disabled by an artist (instance shouldn't be deleted as + it will get recreated in next publish either way). + """ + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["ReviewCreator"] + ) + + self.default_variant = plugin_settings["default_variant"] + self.active_on_create = plugin_settings["active_on_create"] + self.enabled = plugin_settings["enabled"] diff --git a/openpype/hosts/photoshop/plugins/create/create_workfile.py b/openpype/hosts/photoshop/plugins/create/create_workfile.py new file mode 100644 index 0000000000..d498f0549c --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_workfile.py @@ -0,0 +1,28 @@ +from openpype.hosts.photoshop.lib import PSAutoCreator + + +class WorkfileCreator(PSAutoCreator): + identifier = "workfile" + family = "workfile" + + default_variant = "Main" + + def get_detail_description(self): + return """Auto creator for workfile. + + It is expected that each publish will also publish its source workfile + for safekeeping. This creator triggers automatically without need for + an artist to remember and trigger it explicitly. + + Workfile instance could be disabled if it is not required to publish + workfile. (Instance shouldn't be deleted though as it will be recreated + in next publish automatically). + """ + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["WorkfileCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.enabled = plugin_settings["enabled"] diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py new file mode 100644 index 0000000000..ce408f8d01 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py @@ -0,0 +1,101 @@ +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoImage(pyblish.api.ContextPlugin): + """Creates auto image in non artist based publishes (Webpublisher). + + 'remotepublish' should be renamed to 'autopublish' or similar in the future + """ + + label = "Collect Auto Image" + order = pyblish.api.CollectorOrder + hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.2 + + targets = ["remotepublish"] + + def process(self, context): + family = "image" + for instance in context: + creator_identifier = instance.data.get("creator_identifier") + if creator_identifier and creator_identifier == "auto_image": + self.log.debug("Auto image instance found, won't create new") + return + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + auto_creator = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "AutoImageCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Auto image creator disabled, won't create new") + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == "auto_image": + if not item.get("active"): + self.log.debug("Auto_image instance disabled") + return + + layer_items = stub.get_layers() + + publishable_ids = [layer.id for layer in layer_items + if layer.visible] + + # collect stored image instances + instance_names = [] + for layer_item in layer_items: + layer_meta_data = stub.read(layer_item, stored_items) + + # Skip layers without metadata. + if layer_meta_data is None: + continue + + # Skip containers. + if "container" in layer_meta_data["id"]: + continue + + # active might not be in legacy meta + if layer_meta_data.get("active", True) and layer_item.visible: + instance_names.append(layer_meta_data["subset"]) + + if len(instance_names) == 0: + variants = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "CreateImage", {}).get( + "default_variants", ['']) + family = "image" + + variant = context.data.get("variant") or variants[0] + + subset_name = get_subset_name( + family, variant, task_name, asset_doc, + project_name, host_name + ) + + instance = context.create_instance(subset_name) + instance.data["family"] = family + instance.data["asset"] = asset_name + instance.data["subset"] = subset_name + instance.data["ids"] = publishable_ids + instance.data["publish"] = True + instance.data["creator_identifier"] = "auto_image" + + if auto_creator["mark_for_review"]: + instance.data["creator_attributes"] = {"mark_for_review": True} + instance.data["families"] = ["review"] + + self.log.info("auto image instance: {} ".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py new file mode 100644 index 0000000000..7de4adcaf4 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py @@ -0,0 +1,92 @@ +""" +Requires: + None + +Provides: + instance -> family ("review") +""" +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoReview(pyblish.api.ContextPlugin): + """Create review instance in non artist based workflow. + + Called only if PS is triggered in Webpublisher or in tests. + """ + + label = "Collect Auto Review" + hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.2 + targets = ["remotepublish"] + + publish = True + + def process(self, context): + family = "review" + has_review = False + for instance in context: + if instance.data["family"] == family: + self.log.debug("Review instance found, won't create new") + has_review = True + + creator_attributes = instance.data.get("creator_attributes", {}) + if (creator_attributes.get("mark_for_review") and + "review" not in instance.data["families"]): + instance.data["families"].append("review") + + if has_review: + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == family: + if not item.get("active"): + self.log.debug("Review instance disabled") + return + + auto_creator = context.data["project_settings"].get( + "photoshop", {}).get( + "create", {}).get( + "ReviewCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Review creator disabled, won't create new") + return + + variant = (context.data.get("variant") or + auto_creator["default_variant"]) + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name=host_name, + project_settings=proj_settings + ) + + instance = context.create_instance(subset_name) + instance.data.update({ + "subset": subset_name, + "label": subset_name, + "name": subset_name, + "family": family, + "families": [], + "representations": [], + "asset": asset_name, + "publish": self.publish + }) + + self.log.debug("auto review created::{}".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py new file mode 100644 index 0000000000..d10cf62c67 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py @@ -0,0 +1,99 @@ +import os +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoWorkfile(pyblish.api.ContextPlugin): + """Collect current script for publish.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Workfile" + hosts = ["photoshop"] + + targets = ["remotepublish"] + + def process(self, context): + family = "workfile" + file_path = context.data["currentFile"] + _, ext = os.path.splitext(file_path) + staging_dir = os.path.dirname(file_path) + base_name = os.path.basename(file_path) + workfile_representation = { + "name": ext[1:], + "ext": ext[1:], + "files": base_name, + "stagingDir": staging_dir, + } + + for instance in context: + if instance.data["family"] == family: + self.log.debug("Workfile instance found, won't create new") + instance.data.update({ + "label": base_name, + "name": base_name, + "representations": [], + }) + + # creating representation + _, ext = os.path.splitext(file_path) + instance.data["representations"].append( + workfile_representation) + + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == family: + if not item.get("active"): + self.log.debug("Workfile instance disabled") + return + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + auto_creator = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "WorkfileCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Workfile creator disabled, won't create new") + return + + # context.data["variant"] might come only from collect_batch_data + variant = (context.data.get("variant") or + auto_creator["default_variant"]) + + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name=host_name, + project_settings=proj_settings + ) + + # Create instance + instance = context.create_instance(subset_name) + instance.data.update({ + "subset": subset_name, + "label": base_name, + "name": base_name, + "family": family, + "families": [], + "representations": [], + "asset": asset_name + }) + + # creating representation + instance.data["representations"].append(workfile_representation) + + self.log.debug("auto workfile review created:{}".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py deleted file mode 100644 index 5bf12379b1..0000000000 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ /dev/null @@ -1,116 +0,0 @@ -import pprint - -import pyblish.api - -from openpype.settings import get_project_settings -from openpype.hosts.photoshop import api as photoshop -from openpype.lib import prepare_template_data -from openpype.pipeline import legacy_io - - -class CollectInstances(pyblish.api.ContextPlugin): - """Gather instances by LayerSet and file metadata - - Collects publishable instances from file metadata or enhance - already collected by creator (family == "image"). - - If no image instances are explicitly created, it looks if there is value - in `flatten_subset_template` (configurable in Settings), in that case it - produces flatten image with all visible layers. - - Identifier: - id (str): "pyblish.avalon.instance" - """ - - label = "Collect Instances" - order = pyblish.api.CollectorOrder - hosts = ["photoshop"] - families_mapping = { - "image": [] - } - # configurable in Settings - flatten_subset_template = "" - - def process(self, context): - instance_by_layer_id = {} - for instance in context: - if ( - instance.data["family"] == "image" and - instance.data.get("members")): - layer_id = str(instance.data["members"][0]) - instance_by_layer_id[layer_id] = instance - - stub = photoshop.stub() - layer_items = stub.get_layers() - layers_meta = stub.get_layers_metadata() - instance_names = [] - - all_layer_ids = [] - for layer_item in layer_items: - layer_meta_data = stub.read(layer_item, layers_meta) - all_layer_ids.append(layer_item.id) - - # Skip layers without metadata. - if layer_meta_data is None: - continue - - # Skip containers. - if "container" in layer_meta_data["id"]: - continue - - # active might not be in legacy meta - if not layer_meta_data.get("active", True): - continue - - instance = instance_by_layer_id.get(str(layer_item.id)) - if instance is None: - instance = context.create_instance(layer_meta_data["subset"]) - - instance.data["layer"] = layer_item - instance.data.update(layer_meta_data) - instance.data["families"] = self.families_mapping[ - layer_meta_data["family"] - ] - instance.data["publish"] = layer_item.visible - instance_names.append(layer_meta_data["subset"]) - - # Produce diagnostic message for any graphical - # user interface interested in visualising it. - self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format( - pprint.pformat(instance.data, indent=4))) - - if len(instance_names) != len(set(instance_names)): - self.log.warning("Duplicate instances found. " + - "Remove unwanted via Publisher") - - if len(instance_names) == 0 and self.flatten_subset_template: - project_name = context.data["projectEntity"]["name"] - variants = get_project_settings(project_name).get( - "photoshop", {}).get( - "create", {}).get( - "CreateImage", {}).get( - "defaults", ['']) - family = "image" - task_name = legacy_io.Session["AVALON_TASK"] - asset_name = context.data["assetEntity"]["name"] - - variant = context.data.get("variant") or variants[0] - fill_pairs = { - "variant": variant, - "family": family, - "task": task_name - } - - subset = self.flatten_subset_template.format( - **prepare_template_data(fill_pairs)) - - instance = context.create_instance(subset) - instance.data["family"] = family - instance.data["asset"] = asset_name - instance.data["subset"] = subset - instance.data["ids"] = all_layer_ids - instance.data["families"] = self.families_mapping[family] - instance.data["publish"] = True - - self.log.info("flatten instance: {} ".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 7e598a8250..87ec4ee3f1 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -14,10 +14,7 @@ from openpype.pipeline.create import get_subset_name class CollectReview(pyblish.api.ContextPlugin): - """Gather the active document as review instance. - - Triggers once even if no 'image' is published as by defaults it creates - flatten image from a workfile. + """Adds review to families for instances marked to be reviewable. """ label = "Collect Review" @@ -28,25 +25,8 @@ class CollectReview(pyblish.api.ContextPlugin): publish = True def process(self, context): - family = "review" - subset = get_subset_name( - family, - context.data.get("variant", ''), - context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], - project_settings=context.data["project_settings"] - ) - - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": subset, - "name": subset, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"], - "publish": self.publish - }) + for instance in context: + creator_attributes = instance.data["creator_attributes"] + if (creator_attributes.get("mark_for_review") and + "review" not in instance.data["families"]): + instance.data["families"].append("review") diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 9a5aad5569..9625464499 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -14,50 +14,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin): default_variant = "Main" def process(self, context): - existing_instance = None for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, won't create new") - existing_instance = instance - break + file_path = context.data["currentFile"] + _, ext = os.path.splitext(file_path) + staging_dir = os.path.dirname(file_path) + base_name = os.path.basename(file_path) - family = "workfile" - # context.data["variant"] might come only from collect_batch_data - variant = context.data.get("variant") or self.default_variant - subset = get_subset_name( - family, - variant, - context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], - project_settings=context.data["project_settings"] - ) - - file_path = context.data["currentFile"] - staging_dir = os.path.dirname(file_path) - base_name = os.path.basename(file_path) - - # Create instance - if existing_instance is None: - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": base_name, - "name": base_name, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"] - }) - else: - instance = existing_instance - - # creating representation - _, ext = os.path.splitext(file_path) - instance.data["representations"].append({ - "name": ext[1:], - "ext": ext[1:], - "files": base_name, - "stagingDir": staging_dir, - }) + # creating representation + _, ext = os.path.splitext(file_path) + instance.data["representations"].append({ + "name": ext[1:], + "ext": ext[1:], + "files": base_name, + "stagingDir": staging_dir, + }) + return diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 9d7eff0211..d5416a389d 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -47,32 +47,42 @@ class ExtractReview(publish.Extractor): layers = self._get_layers_from_image_instances(instance) self.log.info("Layers image instance found: {}".format(layers)) + repre_name = "jpg" + repre_skeleton = { + "name": repre_name, + "ext": "jpg", + "stagingDir": staging_dir, + "tags": self.jpg_options['tags'], + } + + if instance.data["family"] != "review": + # enable creation of review, without this jpg review would clash + # with jpg of the image family + output_name = repre_name + repre_name = "{}_{}".format(repre_name, output_name) + repre_skeleton.update({"name": repre_name, + "outputName": output_name}) + if self.make_image_sequence and len(layers) > 1: self.log.info("Extract layers to image sequence.") img_list = self._save_sequence_images(staging_dir, layers) - instance.data["representations"].append({ - "name": "jpg", - "ext": "jpg", - "files": img_list, + repre_skeleton.update({ "frameStart": 0, "frameEnd": len(img_list), "fps": fps, - "stagingDir": staging_dir, - "tags": self.jpg_options['tags'], + "files": img_list, }) + instance.data["representations"].append(repre_skeleton) processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") img_list = self._save_flatten_image(staging_dir, layers) - instance.data["representations"].append({ - "name": "jpg", - "ext": "jpg", - "files": img_list, # cannot be [] for single frame - "stagingDir": staging_dir, - "tags": self.jpg_options['tags'] + repre_skeleton.update({ + "files": img_list, }) + instance.data["representations"].append(repre_skeleton) processed_img_names = [img_list] ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index bcf21f55dd..2454691958 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -10,23 +10,40 @@ } }, "create": { - "CreateImage": { - "defaults": [ + "ImageCreator": { + "enabled": true, + "active_on_create": true, + "mark_for_review": false, + "default_variants": [ "Main" ] + }, + "AutoImageCreator": { + "enabled": false, + "active_on_create": true, + "mark_for_review": false, + "default_variant": "" + }, + "ReviewCreator": { + "enabled": true, + "active_on_create": true, + "default_variant": "" + }, + "WorkfileCreator": { + "enabled": true, + "active_on_create": true, + "default_variant": "Main" } }, "publish": { "CollectColorCodedInstances": { + "enabled": true, "create_flatten_image": "no", "flatten_subset_template": "", "color_code_mapping": [] }, - "CollectInstances": { - "flatten_subset_template": "" - }, "CollectReview": { - "publish": true + "enabled": true }, "CollectVersion": { "enabled": false diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 0071e632af..f6c46aba8b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -31,16 +31,126 @@ { "type": "dict", "collapsible": true, - "key": "CreateImage", + "key": "ImageCreator", "label": "Create Image", + "checkbox_key": "enabled", "children": [ + { + "type": "label", + "label": "Manually create instance from layer or group of layers. \n Separate review could be created for this image to be sent to Asset Management System." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "boolean", + "key": "mark_for_review", + "label": "Review by default" + }, { "type": "list", - "key": "defaults", - "label": "Default Subsets", + "key": "default_variants", + "label": "Default Variants", "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "AutoImageCreator", + "label": "Create Flatten Image", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create image for all visible layers, used for simplified processing. \n Separate review could be created for this image to be sent to Asset Management System." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "boolean", + "key": "mark_for_review", + "label": "Review by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ReviewCreator", + "label": "Create Review", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create review instance containing all published image instances or visible layers if no image instance." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled", + "default": true + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "WorkfileCreator", + "label": "Create Workfile", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create workfile instance" + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] } ] }, @@ -56,11 +166,18 @@ "is_group": true, "key": "CollectColorCodedInstances", "label": "Collect Color Coded Instances", + "checkbox_key": "enabled", "children": [ { "type": "label", "label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)" }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled", + "default": true + }, { "key": "create_flatten_image", "label": "Create flatten image", @@ -131,40 +248,26 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CollectInstances", - "label": "Collect Instances", - "children": [ - { - "type": "label", - "label": "Name for flatten image created if no image instance present" - }, - { - "type": "text", - "key": "flatten_subset_template", - "label": "Subset template for flatten image" - } - ] - }, { "type": "dict", "collapsible": true, "key": "CollectReview", "label": "Collect Review", + "checkbox_key": "enabled", "children": [ { "type": "boolean", - "key": "publish", - "label": "Active" - } - ] + "key": "enabled", + "label": "Enabled", + "default": true + } + ] }, { "type": "dict", "key": "CollectVersion", "label": "Collect Version", + "checkbox_key": "enabled", "children": [ { "type": "label", diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py new file mode 100644 index 0000000000..1594b36dec --- /dev/null +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.photoshop.lib import PhotoshopTestClass + +log = logging.getLogger("test_publish_in_photoshop") + + +class TestPublishInPhotoshopAutoImage(PhotoshopTestClass): + """Test for publish in Phohoshop with different review configuration. + + Workfile contains 3 layers, auto image and review instances created. + + Test contains updates to Settings!!! + + """ + PERSIST = True + + TEST_FILES = [ + ("1iLF6aNI31qlUCD1rGg9X9eMieZzxL-rc", + "test_photoshop_publish_auto_image.zip", "") + ] + + APP_GROUP = "photoshop" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 0, + name="imageMainForeground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 0, + name="imageMainBackground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 5)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + # review from image + additional_args = {"context.subset": "imageBeautyMain", + "context.ext": "jpg", + "name": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageBeautyMain", + "context.ext": "jpg", + "name": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "review"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInPhotoshopAutoImage() diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py new file mode 100644 index 0000000000..64b6868d7c --- /dev/null +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py @@ -0,0 +1,111 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.photoshop.lib import PhotoshopTestClass + +log = logging.getLogger("test_publish_in_photoshop") + + +class TestPublishInPhotoshopImageReviews(PhotoshopTestClass): + """Test for publish in Phohoshop with different review configuration. + + Workfile contains 2 image instance, one has review flag, second doesn't. + + Regular `review` family is disabled. + + Expected result is to `imageMainForeground` to have additional file with + review, `imageMainBackground` without. No separate `review` family. + + `test_project_test_asset_imageMainForeground_v001_jpg.jpg` is expected name + of imageForeground review, `_jpg` suffix is needed to differentiate between + image and review file. + + """ + PERSIST = True + + TEST_FILES = [ + ("12WGbNy9RJ3m9jlnk0Ib9-IZmONoxIz_p", + "test_photoshop_publish_review.zip", "") + ] + + APP_GROUP = "photoshop" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainForeground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 6)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "jpg", + "context.representation": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "jpg", + "context.representation": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + additional_args = {"context.subset": "review"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInPhotoshopImageReviews() diff --git a/website/docs/admin_hosts_photoshop.md b/website/docs/admin_hosts_photoshop.md new file mode 100644 index 0000000000..de684f01d2 --- /dev/null +++ b/website/docs/admin_hosts_photoshop.md @@ -0,0 +1,127 @@ +--- +id: admin_hosts_photoshop +title: Photoshop Settings +sidebar_label: Photoshop +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Photoshop settings + +There is a couple of settings that could configure publishing process for **Photoshop**. +All of them are Project based, eg. each project could have different configuration. + +Location: Settings > Project > Photoshop + +![AfterEffects Project Settings](assets/admin_hosts_photoshop_settings.png) + +## Color Management (ImageIO) + +Placeholder for Color Management. Currently not implemented yet. + +## Creator plugins + +Contains configurable items for creators used during publishing from Photoshop. + +### Create Image + +Provides list of [variants](artist_concepts.md#variant) that will be shown to an artist in Publisher. Default value `Main`. + +### Create Flatten Image + +Provides simplified publishing process. It will create single `image` instance for artist automatically. This instance will +produce flatten image from all visible layers in a workfile. + +- Subset template for flatten image - provide template for subset name for this instance (example `imageBeauty`) +- Review - should be separate review created for this instance + +### Create Review + +Creates single `review` instance automatically. This allows artists to disable it if needed. + +### Create Workfile + +Creates single `workfile` instance automatically. This allows artists to disable it if needed. + +## Publish plugins + +Contains configurable items for publish plugins used during publishing from Photoshop. + +### Collect Color Coded Instances + +Used only in remote publishing! + +Allows to create automatically `image` instances for configurable highlight color set on layer or group in the workfile. + +#### Create flatten image + - Flatten with images - produce additional `image` with all published `image` instances merged + - Flatten only - produce only merged `image` instance + - No - produce only separate `image` instances + +#### Subset template for flatten image + +Template used to create subset name automatically (example `image{layer}Main` - uses layer name in subset name) + +### Collect Review + +Disable if no review should be created + +### Collect Version + +If enabled it will push version from workfile name to all published items. Eg. if artist is publishing `test_asset_workfile_v005.psd` +produced `image` and `review` files will contain `v005` (even if some previous version were skipped for particular family). + +### Validate Containers + +Checks if all imported assets to the workfile through `Loader` are in latest version. Limits cases that older version of asset would be used. + +If enabled, artist might still decide to disable validation for each publish (for special use cases). +Limit this optionality by toggling `Optional`. +`Active` toggle denotes that by default artists sees that optional validation as enabled. + +### Validate naming of subsets and layers + +Subset cannot contain invalid characters or extract to file would fail + +#### Regex pattern of invalid characters + +Contains weird characters like `/`, `/`, these might cause an issue when file (which contains subset name) is created on OS disk. + +#### Replacement character + +Replace all offending characters with this one. `_` is default. + +### Extract Image + +Controls extension formats of published instances of `image` family. `png` and `jpg` are by default. + +### Extract Review + +Controls output definitions of extracted reviews to upload on Asset Management (AM). + +#### Makes an image sequence instead of flatten image + +If multiple `image` instances are produced, glue created images into image sequence (`mov`) to review all of them separetely. +Without it only flatten image would be produced. + +#### Maximum size of sources for review + +Set Byte limit for review file. Applicable if gigantic `image` instances are produced, full image size is unnecessary to upload to AM. + +#### Extract jpg Options + +Handles tags for produced `.jpg` representation. `Create review` and `Add review to Ftrack` are defaults. + +#### Extract mov Options + +Handles tags for produced `.mov` representation. `Create review` and `Add review to Ftrack` are defaults. + + +### Workfile Builder + +Allows to open prepared workfile for an artist when no workfile exists. Useful to share standards, additional helpful content in the workfile. + +Could be configured per `Task type`, eg. `composition` task type could use different `.psd` template file than `art` task. +Workfile template must be accessible for all artists. +(Currently not handled by [SiteSync](module_site_sync.md)) \ No newline at end of file diff --git a/website/docs/assets/admin_hosts_photoshop_settings.png b/website/docs/assets/admin_hosts_photoshop_settings.png new file mode 100644 index 0000000000000000000000000000000000000000..aaa6ecbed7b353733f8424abe3c2df0cb903935a GIT binary patch literal 14364 zcmc(Gd03KZ+xOix*{-cZxwTBI=2DYeDDAfDIF(B(gr-)mq_~1qR+b{vG^XWNIa5t8 zxa5jRxj<&Zm?M*q5VGtz;|>2nDud<0r-i_$KjRWpII?KyX^(an5M(vKi`M$ zIlcz~Do6{bQFFlm=SLs%jR63|7X8mGM%2@@0N_w@c>kW02?!}S;6}(~exLfxvNX&u zndrT1zTbD%E2`*rT!?ae*U`Ykp3-w$N_(F=>@@sU6zKOD`n2c1f#*p0hkgUuuP#4c zK=w)B+owK%ue@S|U3wX}mOn4|xV5pB0eNntxoy2&{r1*dSN-#*@uE+OOedBsu{i0O z<_{6pz%`Z~!<;PCIraRE*YHJ&Q}6`&CmSySu-$1f^=i}Ht9Afz=Q$dRmvCs#ihDNL z#Q-q+TL~uB)&My3b7k-1W+-s!aQkV8c|icMaYGn)4Ph3L^XtELKs<~f^M-ivOABB) zpZwoAg=x6FsMU0-dVQQR)xNdx64@NnT#>C}?bMAv~_J#UN zuJ2k-_s%vd&gfu7tchPr$(`roOhR9?1755+TWS)wj4q? zUQAcabJ>OW3Fv{_m}fp3uNz?_6Nyr8!V{|CbZ1{NDWg9B&0d#S%YKK#?r=iKTx!g? z_v=|eT^Z<@d$Z$+YkJFLlqt%$+0HAA(%0C?C))8t>%@as#h&xujhaG4>f$@yk6hVe zf$NDY4Kq`oH`(2DKQ#supR@?dc0GBiTK1-bI)5af^ulc5n0IdjDimcJXVQim-+W+x z=f*XZ-6P*sLI_Gl^QY=%C2n#Iq`Ydg_B7^VKOxaGEnKbK?94V}N00;tmi3gJ6T1%9 z;_aqq?@_D<08Ynghmju%Eu)Vd&qg&R_bu$R-m4*PSyZ%r;cI!AFElF7UxA6RCkMPa z44n-O-US!LY)Pm!w)h=-+TvktP2ixEjqk1F#Y19Fn-+1b!Dk-1F^n5Mfr6!&27O56 zCC1X9MZuO2!d6#I)%m>4WV43ueT-;Hkmfbmy^dpnMn_J93;GV_|1PRrGPI*6OREu> zF>*x%5pCh8;@Z#xkE*(c5oT?M6GoFgR&b!o<97h)yY}sopA1GX23}Z$PwuK5u?!5t z+OG#s{R{ZC>$dqy3{Z=J7r*Jh)}P*Asw0ajsb>>G>-Py#ERl#9v{OwDVh-{CfsCCAf)5#l>z$#Z>~V z=A}~;G61%fjZLxP8v@I-l^2%Kk7m^+Ohqa`4!4MZ;4-;?rdZg>HgOEu?Y|=_K>4xW zVpLS~1XT?`grxmZ)IGPSt?0&8uM*P&p~jGV5XvuRhQzEQaX6~adoVxt9Ta~JEcxrK z_deTF-g2JIrx&;^7M@bToN+z@i&Kun%dFkqid&S_MD{Z~;D<`Y;^dX|T_nM>B>}Kj zRxC}XQxC-9`4|= zQ|CpJx#SIi&n@MJK8h_bz?8K<_WV3HE6`r^?#Q{kH+#3t7aUj78j(YvP|NEV7T*pi zfgyZ4CF{tD`6F$|TzU;eu$!WD-MHRN&(;NJDbi>AeU_i4zPDoIdS@O6$|rcHQ@4%P z4_jwOQnG>!W>JQ7j+PZ8c1jUZ+scSc#xRZ7_6e(#1hhzb?3D_uFYuFD0t$Np=>)Lb zTGCUVjcUB(I2wo=PVa|pFSFKIMg<&-csh#ie;W$r%;YYg!LD$C?1lswJ}kS2XO&aKS{nTtEHOa-yXonaX*>brKObH2gPyj z#FUxDv}4SAJo_U_cJ3J|zg{0@zxb>v-|FZ(mgV5`$>=v-t|p{2z=LhX*Z=R@kIH^Kbfw_sH#xGH&Ee zn$#HwotJ1Kgur0b?_{$e;~l{|-=7ZbK5%J+hP>z%CTImU`6NDRui06xU0TpPO(z}M zII2W74(AlNIF9m1j-%1jYZjSw`U?eb0vk?{`w*hlk?JO)BAmz(#t^(q56ymYGMKvB z(3Q~db+g*JVQOh|TJ`0e1BxnTxJ)u(^gZyxzqfb~)PtuLubVkC)2Nk9N6x9DZBJ&n z6oy6=if|F$wWY?hcW>2W-6k8a?QZyde4?Z0pp$b-uO*$OZgSN~=3Jtz|My_w*P`P; z!!7KN9SqakG^s(DH|wcbBF@@<&BTRHazo4y6q=}pT0I) zW03_VGsN}IGzXLT#v~Vu_%>`v1J3;9rflxCdA1~sft)9hvky@32OuP5KrO_cqPtSU z=@HE-?K#1bn?4xbDJM zUrWSTN+m4`aqzpDtAc#C7yzno>CEW;7f)K96Xz{Bwz+0QDk-yzL%q zH-%-`;+V^hp6Q^EAo)X$2I|dTYz_Vp03;B?* z(lxZ|L)BDgoHV7XoIt`!T!QjiFN`c_y|*VVA7?}ex5Fa)u~QAW*H9CeAQ(KgP%k`T znj|nag6~RHcrX-~w-V?;0=)OwfA`9GyPhu0=mm~~U#=fApmwI<+=#8u)>vZte z0?f4uV=f_ie05-b81qk3puXgN+3_D!MlV54^E4K}k@@eshEv5uU6h_XPfGQ*VmFqb z@09XWfD6t)-)s}dKKf#RZd6@k;pkkz_LOhT_0;)-e1?3u#n&H^wWcXyyl&?2P%?#@ zz9&fT@r5w5jh+=rh@i6%KxgN!q*F2k%d(|M(B{qR;tXe)Ec+H%6IR2?wPt)9hW*k2 zOHbU}jP4{|*Vfot?bFr-7cW-NBsG&R4I65R507fvl&!EU3!Gt4Psx3j@}tYDd06t1 zs}6g38B^PYi-56`vz@C9tqJkV7)rsSr5C~3!;>NRcaS8nMrbJ*#JcAh!Y3Q%QG(WQ zT)(?RRb~A%M`7Btwx1Bclu>AmITUFd$$D#R_-PQ5t$0wXwnVZ7b@82+Lg$! zU;qwA7Eb15#H@m{FerEK*5qVjglrD*V#hB#Lmi*mGHU`l=qOnFD{F)0#~1NTV6Sa! zQP52_3k8#jy|SvLvJ3`E*z_q6jOeTqre;f8r`q&5DX=IzkP42O}Og&+Skd7|60w1Q>j@142J zVWGsG8kn4E%R5^})tta@J>Vjnxu6cwo4Ttvm1DMLsec+;l{N>sq`kc}bP3(R8>|}4 z)s9gXcRqVvFP%3VNVX0G!>InT&suYrjp!}uvo2|~^&`KgTxpURGl>GlP zK;;V71n+(S=?j`|Q1cAO0^w`9Ry;JUM+u|iJF&Zf7b&mq?>Od65Xf%^TebyN@CO)M zPj?2boOIgd_QdE}hTGC#{w17TBs)u3^%zfpPq}q{%m)^emmeh@|6uj1?8p)%EXiNjIR6bNCe~cke;kSoxSLpeUtVJ* z2v*cj!W^Rg%YM%gbjN-3pfT}yUvVoIwGO*FSSR|h29TYGzF|0=zI|6w} zp@bEa!goL7!=n2nZ40=hrpriZCphaA)`pmDIUW3FMoDg9o|fG&`wCQ)G!s5}>w$ph zCEl5^&$zrl<=({j&5?=VK;4g5m-kOzPzHUs6s|`ujg{@L4PV5z1t4^p%h_#SH=mC7 z%`3lAQ>iUkNKwJuFxZ`ffL8gF17=7Q!mc}|P&hO*rfSk3U!c`^5i{#C6$9FrfoIP{ z)3(c?MGFU#Xfrul89!ZO`iA1#wD7$(fgJz=%ER<~V?lptLGD)t7j&e{zoOb4KhL3J z!)J((7*uu{Ec-<`{oY(Jg}?M%b$l^egn~I3Hg4EcJB+ou3=z7V#_&~|1;C(BZ{CMB&5rx)LdfJV{sneCM}pgjo;fEU&3~GlN}DP4TmC4F zA>{}?)5Dmd{gNvETL=Ilw);!Um@KsF>B!;M~aZ>x11TMzff*)V@ zkDS2Y->-d2zmJ|jtgT-6XQgDD?+H8`-gWpM5diO^=WZgm6}=x&kFot9+{+@btwQ0#Cd`N zZwX`TjH;Vo%D)9EbX^+tJR>xn?dmG;f=EKsa~Yc+PuTD(@<)vz)Mx{s;Exip9J%g? zp*7ra+souIerU)-SvCV>gJ9;kYB%`o??C%L@!wL(`~$MRH<&4cPa;oR=*A&$BVMF@ zvtM_4o}f_MjrTHbF$42spT^eU5>90?(+)q;hSv?VW*mCb;Hrhgt}pZ)@_T~@TcWPj zZZHA_-^d3Rba|oUb;?ob>=%m(EsitG8PN{&gMhlrRs;bYy(pdp8Z+@R{x<4>epJZ< zflj2;>P@WGn>cd{%E0SJ`C8-sVMb|Z+rb8kfnsQ;<4L_cu+<|`$nx1oKz95R`J0$L8cSl%KRlVEh z_8a;q6LVV!6&R_E(>vvv$lbh>{=&axh1&`S9xsMZlDl!2wA(XniKiIxyk|{nrmAZR zsz!#wLWBtctkHof&q=HAPC#28Y9^kzBFU?D_jF(O^{{Y5JHV%ya&JEr|0z?(E+)za zww1r!da4sQ`SuihTXJZ0a;KNIppGYzVQZ(3#7|%O(9}68sJ=)XwY%IeZ^3E0P9pbJ zMszy~XBlf^*%lAIQf3CGyRL0g;`&nD6%~{SXg!NQ&vMXyWN3Qshy1BVqyhnMd@sa zEGnfGh*lq4L*M{6eV%#~8Ebn;q_{$Rf~)W8 zas+QHrr3Pb9fc<}7~M>3Vz(r`wv%bsG>RQLUWQ4C5TLWa=T>OpYVQduE2b2P!6$~*GYaZa@}_a&uTr+oH5^@<8g`V$r(OTp zRQ8TW5}<#Pi4vw?Vv`u}$WeG&HWMY#^uS#?y@irrBtD5{UFa%pRXy;68#4y>DtdHb zyn*-VRL?_%<}+)ZyH{3j*i#w1b6Y=LWyY%d%nWc7pW)nhGE_w@QvIP6Rp6jD`6uGI z=y3G_U11X7mju_a%Rg%VteSR>w0GA^{-AEs7TTCEM>>ddkly%0t4PMufzjkrnMjP> zp_NJBJ9`y`VKehbPS1Kj>NI9cQhnCO0=+1kYLRrF%vuL1ER{Lb@&$;`5CCh^gW+i_CAL<WacQL*})n#TQ$aFB8GPL3gp%|nj;p4PtS@1!#odLI7pF7F zk1WvzDk~sxKiq_%u*`M*w#Rob^2fU%h%(oiKUiLcx-Z2&Ec_@)j>|At?QF-|vzvTy zP5b4HqbovnbHUB;aziBiF*OJwz2$41o1xHz$t-fOWm~`Q58y?Bu_Z&U@mFb(OXx3l zml!8wzq_<=JjOy3t9^RruTL92*J|L)0mtwEHt;B}YKpl_=EYCm#)*6S70Wd}@x;oA zU9tZJ=Jo~xsyw_56^4%DKWoa5BwK3^|1;UqNKLvSCLd(2ccvq-xJF$jA{uThiFCwMe{p{qlK#u@!71^GfFlPUpkGE$lKv2Iert+{xdD_E8q z(VxJkUFQz!X0#&`Rv1Ud`69_7aU;SQ0d;8|#DE|44scUt1le5h}Jxl(__MEqe%9df}>^U?GYC}ob8yeB|bPV zt|iEgu`*RNcpxB&Y|@T=*L&3pT zX86ts<72tkc*W`I$(9SLOgYQ>YvTGW3lgg_zjWyb)Ol8@64%wo|QdGE{4wzQ9r^$~|-`1@A zZtl|Cb6O{=p+z@GeUJ5sKJUeraSGae_|2*;L4=8qqzv3pafN{>e{OG_B3ylbF;VI^ zcnojeu0DU(G26s{pK9Q)8lU*5N>nXXN3>zis%?l;1ZPO|b}2Z|b`-4p`@_rU9FFc; z+SPW*_;$y-Qin+Rk!8LF<+gC76R+lS_2~U1xW!;bBxhP86*~Rs7-vgk{^GP)bgZH3h9 zLX5u?)~9MPmf=@Ft2VhVe=G5c*Lih*dnDjdNcz2W|6W#L>v#>-j`-?};w6;2+l0hj zn7#2z5@(8`WI3f*e&S?j)&`&H(PRf@r@W;HHexhXEnU*7g%UWC$X%?5s8`oN)Emo! zUEM>^Ly=%3BSDv@rbM9;92{RLLe>$xro~7`?o84#ut+uRk8R1h{liB`SkX#h1Xx2F z-m`tYfnbEBGz=nfJg|lQ{?`1U)6El0dRQye7Fl&KFQ7q81NVGU_%Z10=vPu7DudI6evY#*H4O z`B2B+TK2fP&M1a;V73kb$v^B+ccCSXOg&9~9$4)-FX+r4tm$}Uv8(!%x5Qy9IA#TF z1#NngT;jiFR9~Hc>r#V&@r~j5-vDy?I~$w?|7=FZ*K)<1@MC{55;OAlZVNA@j$Zo~ z9~U#)CT_9NO^SnTQtZtjfa*5ruC_V*8GlMzEPjF?XY?e?@a1a|ctt7y57+4mj}4x% z@t6&_kO{*JCli4)B+%oRQCbz}{1TE@h@31wzle0eG*mR}5^f#g12cg-#p|wp7WIS^ zx5>QFr{-svv8EJX^5{dw0zj1y-fYIC4thn1RHLtmvHVV#Tw7vjdeuCHFiM+m@!MLM zF$=Ud`x0&~;e+vhC{0Uu5Rs`WubTo6X&~T+LtpT$#$zLe*Tx=!Ji)9NO-ty}3yquW z5K<|5HG{2K`AJ4KCVxe9fdI@JU?uDA@fGqi={~S{YEpm&-yCuq(KK2^YJ1{w_N%e9 zu}33Vpxg$$U|m7yF$MNb=5xln;)`o3Js<5)hc4xea`s2T3E3piLP;l^BghTnrG%Ql z%jj}F+XlV#$1>0b@u>mch0!%}nzjBW3>j7#Ul(OV@XoX|lrC$xD-3zlN2ZAzU+tg+ z$Fjgp_!F6BR%Q%QD(FSg)(u+8(@1Zu25xax{@PBc>AX_6_+#4x#K#j`fSj$UMYZ5H zs%7PebwjaZb*-CR85;R=FSK^;F09of9dtiFxb7U-R5}=5bEWYtA*mUHLQJ&a)gR!*$ zrbTHDC?IJvVOOvPp_^#UQ0{0mF9hCOgQ)k9Qj7odAqMF^no^3)pL_#pG5}>(;Eglk zFMi$D45USD@LJYFFen~_>^aC3A&Joz%pO3B5K?dp-~s__2Wm&vkIaBVi; zUpJAg`{>pqp-q}79s4Ih-i`$FBOhBPvdFa4+Akdb;q`Sgo9Xv9-gpHAPn#On=o*q_ zS|j6D30`0^Q$E#fGxKwjImi$0>&-*GMVVSQ@(0$W@7s?yZVe(GHT839D%14z7yYgO z7zbR+1$zgk`=M^PKq_ZB$>Z!&m@&!2%5RH#hIXhTeFTg9O8Pl}=SMR43Vkx!v@2%% zK||71o%1gDnuw1GlQJAAMd{G=csA&20;jk0istTGZ3LY01R0_;&5ma|UN^zuTw)G# z78*H)$=Aq9Zxx++u6fr!oti(Cu`E@t$nY~16e%28k3` z;PUreL%*uAsT$3U%TGuR(3whFXZj~sE9&0tn@GWQT^Dvuwrqq^KEsG^i|>Eq_YOq< zo!`S6SCkejl1(yqu{)^{wsLE%X}8~}0B1xzR_1y0rCeHVRAW=fX%B9+FFdNi*in$u zzDH)j=XP*c{Gn;#g~yCv>ghbWlh;jcm7=Hemg(oA@^09(Kk-`ebPDF}ff)F@^(jXk z3$mo4*%8NTL{86USFMt*Bf__d9}J!uyw_$M>GiN9tZKsF1yQRTF`6S4CoUfr*z&?l zW-7cf`)e6S^hnlR;1~kjviHr7oTfK1zIMEI1RM9{J#4TK;ivV#@uq26bIU7#e^hD2 z&QWZ2@&j6PV`O93*2o=C(DRI1;)*K7h1p@dqVQIV!(<4M)A()QY9g{kUkGxr8DTVV z8gldI#xt_4CE<_I=UQytL84@8l|Us*)!IX&j9UU4Sp_>}Q3S91c&W$l;6P8-O zQTiN}?f83pXhexAU_JRcR$d#vA!bEd{#j-0gIenRmu+bK06RqF^yUK>x8)MpqcNUC z4Krxt-XfJ zZa~3&N^w8I*JHhC{gEqf%L(Z4+){H=OI`n_N8=?aCxbfcW(#bsLqt-q5QkM{3jG$9 zN3D73!<`H&RQV3gXw6DOW&O}>Dk!q+{~Jxxgsz)tO-hO$;L1j}dIc7ctL|JioN zKQ}~BvKBDW7l3650Q{>=KZ1j5>g)j^Zi~L}@>Mz1ef|pzf}twp^l4=e z(o!@b!T&MU^Mr=TpMLLfcWM+qkuO&3`r&0W@2To%g)<%SGW;|Z|D{N*+Vd0RHCO5+S;66a`TU?Al z4IYT%w!^l>mAt5kC2eaTIW8QkU~C|kn#!90vCEOXSp!SQt@ajyPW2$;8R6RvK;bKR zXj111=Dh`v`H=jk)fEQ#`)dmX3G=$0PnLZif4N3n088@8&lhS&-^f$N;$eY!#Ie7o zmul~v#whf!QP3LU{90EJ|L#=r)8vRSNs;RFw0wjY;D1+33?hP1*`Ej|mh#PD!_gLH zS{YH?x}(i^9;t0dn|2~VB-2i~4Nr!+TiEaM_CN40D<`86Id~E3ur`rvTi`Z^EeNo3 z%TukZqco*hMMdD=Su;by+7!qDS#YoyyB36w28RL6(Be34Ov!Rhs3`1B&~~E+d*P9j z=K1h&F9hWUtO-fJ(c87^$jQR-b+tVF^99@p_|}Tkq@EZ|N>mX#l>U-ax`-`&NnKJB zJS`VbV5fuGyNClHaJi+%tkx7k5FH80MOXsS<@!k>;G~=G9EjfNiVMUNPX+2mu+`P1 zRk3#LME`)rd+n~M^bmO#KD938=;!>e5IJ29LS$F$TyL5rW)%XZ+uR< zWX3q(pR$Ssi}h@%quMp``eZz-8cI$f(qosbJ2}{08#EQPDaJRs6nOCvq?kdN4&`Jg zT3gcnuF4PGr1neN1@pPj&NqB9Eq<n zS$}m)LU-64!0_jPft!AIi@7niOYar$L(d4SiaX{icQl78bEOBnAS@`O=2~c?XzeOZ zDAw*Fe;K&+H) z3veQ`*QYyZvt`Dv8-_d~_d534{aS9~Vr{2;!~0N^FxyWN9vPLljNF1>Qk487s;4eP zc$v!&HP2sYS!oHMAJ_r{HU7al2HzDl$zA|_?E-#maco@>1@l_UG9@SPL%_V`8r`@> zPgHq^N8t?%!M&jbY%?ukE>Nxm4;WbRfhzO=nGWn%A^U$!0V3mU;6;@l6#rS_t1mmh zO2#h!|B#H4V`&Y4Q}doJlB$cV+=^AQ5|em8in&dA(l!8yKl+b~-lpV;J|);p4anmi zRRx4_Y>6?hWo70rPl?7VDP(m7TE?o{IkgYAeAfS|>5T2+n?3gN6 zxCuSCVIi1AqI15fIYFI=H9@o%8_R2z_av6A-ei{u)`98lkmBE@uKLoDT3$eQCNZFR z9NGDv858;psu|;E}41kU?R}wpEHRDVU!} z1!tI5@N-raUL@FnR)8Ajz7AX7aZ{NsL6EcTQbsZJ=um~G;c6%qze@HIRLIGnn#Mbu z)}}PB_j*oYxE@I`BnQ>u9Y`CGIpn$dU?P;04%C{E!dT`}uo^mp*GopX6RM;f6PF_u z_S5OVThbM$dhaBi!ua}=nnw59F`ayzq$-K<=d+d#0&OzK%WY;t5(rCisP(-;(ygfo ze-mGvVI7y9SszUWvkWL+ERP5EB9wweD64h-(tsP0%Sc>Thk0*b7R{+rY!bOMcg8Cc z3FL?bY?a4yt)#gQqG)gYBAEm9S$y005v9Zr`)lCln+#qNaQySO(W;(w`7v2PC+V6! zhm4Cj1?8VL7lct%>2k5{D19{|3U2~#O%}K{xwbqi1~usCSysrA(&k5+(l_kpgeFU* zVSd{+(X!e^L*SCgMlaBkUFFOZY!88jpv^5xZ?1E$2UVz1?( z*emE-U9wfthpxH6nFZhh`xfT^UX}beWoXL|km>>Y;{U6gW5Pz%h+qI50nw|?zP>*2 zKO_A7XKkt6eUNNj(B$WmI92X^xHhelRn+$cjhT`3_-ZlnZ#D)E)@b~g!w8B_Ut)mG zigLU@5|``WSlKf*g$aHccX1VYWwu(RF^4}b8>)5#6$Yc#rKO~SA_b$f-omBC+PWi! zu{7&z(Gj(b9`L|0AS)(6d}RrTseipA9#ov7$ju4a=)nVHZRgdeZOk=Gu~X%Vr4}82 zeiqdkid7s?hLQD|I95>V&c84QUX$OCwkPcz34qG1h>UNA!rIy;C5#OQl&6bQPqFP+ zbmX{+jB@2-Ib)WSQ4ch}<}_@%CX;k{-SamayrM%H+X7}Wzr);-Gm1Z%n4TC7i?;y5 z=ehb>JTEIIlHpf}=Yn@q&ffu`c4_ZHYF|Dp4&6my`rOXU|x3qo&{;#12;L7 z^#kk<$Izn9;M)OaRaSfN)W=$~GDtyhS`t2^F%=!Qh@9?ozb8raC~~^m2zcNBZL2~# zWnt#Q5x1&e+Gl*U{}^IPk!n~;SnVG}6dS3jvQ z^Mr1?jvCAbQ__yRV9XcWG7+@f_R~XTt;AyPtm!)3$w!bo?dhRSfiOeLsIrnjnW_>| z(GdEu#T~bE_Sv6)u|*_oyO$hK^z*kfKOzKxF#;&SRBa?gxj_P{2^nlk*4t}`Bzws> zhd;5Pq?X9TyO~!Mt5L9=j#Tl-@f3_PYlNQLKVT=KD+=I2d6{h7*})O>@YVDxGDH1@ zQY#U{%=OFcNZM(d0A-~ Date: Tue, 2 May 2023 16:25:03 +0200 Subject: [PATCH 23/25] :art: soft-fail when pan/zoom locked on camera --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 825a8d38c7..3ceef6f3d3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -217,7 +217,11 @@ class ExtractPlayblast(publish.Extractor): instance.data["panel"], edit=True, **viewport_defaults ) - cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + try: + cmds.setAttr( + "{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + except RuntimeError: + self.log.warning("Cannot restore Pan/Zoom settings.") collected_files = os.listdir(stagingdir) patterns = [clique.PATTERNS["frames"]] From fec104de8e085d0ce0d70e9679c98924338ab3ce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 May 2023 18:49:02 +0200 Subject: [PATCH 24/25] Fix: Locally copied version of last published workfile is not incremented (#4722) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix: Locally copied version of last published workfile is not incremented * fix subset first match * correct anatomy name * Fix typo and linting * keep source filepath for further path conformation * fetch also input dependencies of workfile * required changes * lint * fix case only one subset * Enhancement: copy last workfile as reusable methods (#6) * Enhancement: copy last published workfile as reusable methods (WiP) * Added get_host_extensions method, added subset_id and las_version_doc access, added optional arguments to get_last_published_workfile * Plugged in the new methods + minor changes * Added docstrings, last workfile optional argument, and removed unused code * Using new implementation to get local workfile path. Warning: It adds an extra dot to the extension which I need to fix * Refactoring and fixed double dots * Added match subset_id and get representation method, plus clan up * Removed unused vars * Fixed some rebasing errors * delinted unchanged code and renamed get_representation into get_representation_with_task * This time it's really delinted, I hope... * Update openpype/modules/sync_server/sync_server.py reprenation isn't the right spelling (: Co-authored-by: FΓ©lix David * Changes based on reviews * Fixed non imperative docstring and missing space * Fixed another non imperative docstring * Update openpype/modules/sync_server/sync_server.py Fixed typo Co-authored-by: FΓ©lix David Co-authored-by: Hayley GUILLOT Co-authored-by: FΓ©lix David * Fix: syntax error * fix single subset case * Restore sync server enabled test in hook * Python2 syntax * renaming and missing key case handling * Fix local workfile overwritten on update in some cases (#7) * Fix: Local workfile overwrite when local version number is higher than published workfile version number (WiP) * Changed regex search, clean up * Readded mistakenly removed newline * lint * remove anticipated functions for cleaner PR * remove funcs from entities.py * change to get_last_workfile_with_version * clean * Update openpype/modules/sync_server/sync_server.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> * removed get_last_published_workfile_path * moved hook to sync server module * fix lint * Refactor - download only if not present * Refactor - change to list instead of set * Refactor - removing unnecessary code last_published_workfile_path must exists or we wouldn't get there. Use version only from that. * Refactor - removing unnecessary imports * Added check for max fail tries * Refactor - cleaned up how to get last workfile * Updated docstrings * Remove unused imports Co-authored-by: FΓ©lix David * OP-5466 - run this on more DCC * Updated documentation * Fix - handle hero versions Skip hero versions, look only for versioned published to get max version id. * Hound * Refactor - simplified download_last_published_workfile Logic should be in pre hook * Skip if no profile found * Removed unwanted import * Use collected project_doc Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> * Use cached project_settings Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --------- Co-authored-by: FΓ©lix David Co-authored-by: Sharkitty <81646000+Sharkitty@users.noreply.github.com> Co-authored-by: Hayley GUILLOT Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Co-authored-by: Jakub JeΕΎek --- .../pre_copy_last_published_workfile.py | 151 +++++------ openpype/modules/sync_server/sync_server.py | 104 +++++++- .../modules/sync_server/sync_server_module.py | 35 ++- website/docs/module_site_sync.md | 237 ++++++++++++------ 4 files changed, 379 insertions(+), 148 deletions(-) rename openpype/{hooks => modules/sync_server/launch_hooks}/pre_copy_last_published_workfile.py (56%) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py similarity index 56% rename from openpype/hooks/pre_copy_last_published_workfile.py rename to openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py index 26b43c39cb..bbc220945c 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py @@ -1,15 +1,20 @@ import os import shutil -from time import sleep + from openpype.client.entities import ( - get_last_version_by_subset_id, get_representations, - get_subsets, + get_project ) + from openpype.lib import PreLaunchHook -from openpype.lib.local_settings import get_local_site_id from openpype.lib.profiles_filtering import filter_profiles -from openpype.pipeline.load.utils import get_representation_path +from openpype.modules.sync_server.sync_server import ( + download_last_published_workfile, +) +from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile.path_resolving import ( + get_workfile_template_key, +) from openpype.settings.lib import get_project_settings @@ -22,7 +27,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): # Before `AddLastWorkfileToLaunchArgs` order = -1 - app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"] + # any DCC could be used but TrayPublisher and other specials + app_groups = ["blender", "photoshop", "tvpaint", "aftereffects", + "nuke", "nukeassist", "nukex", "hiero", "nukestudio", + "maya", "harmony", "celaction", "flame", "fusion", + "houdini", "tvpaint"] def execute(self): """Check if local workfile doesn't exist, else copy it. @@ -31,11 +40,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): 2- Check if workfile in work area doesn't exist 3- Check if published workfile exists and is copied locally in publish 4- Substitute copied published workfile as first workfile + with incremented version by +1 Returns: None: This is a void method. """ - sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: self.log.debug("Sync server module is not enabled or available") @@ -53,6 +62,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): # Get data project_name = self.data["project_name"] + asset_name = self.data["asset_name"] task_name = self.data["task_name"] task_type = self.data["task_type"] host_name = self.application.host_name @@ -68,6 +78,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): "hosts": host_name, } last_workfile_settings = filter_profiles(profiles, filter_data) + if not last_workfile_settings: + return use_last_published_workfile = last_workfile_settings.get( "use_last_published_workfile" ) @@ -92,57 +104,27 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return + max_retries = int((sync_server.sync_project_settings[project_name] + ["config"] + ["retry_cnt"])) + self.log.info("Trying to fetch last published workfile...") - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") - # Check it can proceed - if not project_doc and not asset_doc: - return + context_filters = { + "asset": asset_name, + "family": "workfile", + "task": {"name": task_name, "type": task_type} + } - # Get subset id - subset_id = next( - ( - subset["_id"] - for subset in get_subsets( - project_name, - asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family", "data.families"], - ) - if subset["data"].get("family") == "workfile" - # Legacy compatibility - or "workfile" in subset["data"].get("families", {}) - ), - None, - ) - if not subset_id: - self.log.debug( - 'No any workfile for asset "{}".'.format(asset_doc["name"]) - ) - return + workfile_representations = list(get_representations( + project_name, + context_filters=context_filters + )) - # Get workfile representation - last_version_doc = get_last_version_by_subset_id( - project_name, subset_id, fields=["_id"] - ) - if not last_version_doc: - self.log.debug("Subset does not have any versions") - return - - workfile_representation = next( - ( - representation - for representation in get_representations( - project_name, version_ids=[last_version_doc["_id"]] - ) - if representation["context"]["task"]["name"] == task_name - ), - None, - ) - - if not workfile_representation: + if not workfile_representations: self.log.debug( 'No published workfile for task "{}" and host "{}".'.format( task_name, host_name @@ -150,28 +132,55 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return - local_site_id = get_local_site_id() - sync_server.add_site( - project_name, - workfile_representation["_id"], - local_site_id, - force=True, - priority=99, - reset_timer=True, + filtered_repres = filter( + lambda r: r["context"].get("version") is not None, + workfile_representations ) - - while not sync_server.is_representation_on_site( - project_name, workfile_representation["_id"], local_site_id - ): - sleep(5) - - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots + workfile_representation = max( + filtered_repres, key=lambda r: r["context"]["version"] ) - local_workfile_dir = os.path.dirname(last_workfile) # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir + last_published_workfile_path = download_last_published_workfile( + host_name, + project_name, + task_name, + workfile_representation, + max_retries, + anatomy=anatomy ) + if not last_published_workfile_path: + self.log.debug( + "Couldn't download {}".format(last_published_workfile_path) + ) + return + + project_doc = self.data["project_doc"] + + project_settings = self.data["project_settings"] + template_key = get_workfile_template_key( + task_name, host_name, project_name, project_settings + ) + + # Get workfile data + workfile_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + + extension = last_published_workfile_path.split(".")[-1] + workfile_data["version"] = ( + workfile_representation["context"]["version"] + 1) + workfile_data["ext"] = extension + + anatomy_result = anatomy.format(workfile_data) + local_workfile_path = anatomy_result[template_key]["path"] + + # Copy last published workfile to local workfile directory + shutil.copy( + last_published_workfile_path, + local_workfile_path, + ) + + self.data["last_workfile_path"] = local_workfile_path + # Keep source filepath for further path conformation + self.data["source_filepath"] = last_published_workfile_path diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 5b873a37cf..d1d5c2863d 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -3,10 +3,15 @@ import os import asyncio import threading import concurrent.futures -from concurrent.futures._base import CancelledError +from time import sleep from .providers import lib +from openpype.client.entity_links import get_linked_representation_id from openpype.lib import Logger +from openpype.lib.local_settings import get_local_site_id +from openpype.modules.base import ModulesManager +from openpype.pipeline import Anatomy +from openpype.pipeline.load.utils import get_representation_path_with_anatomy from .utils import SyncStatus, ResumableError @@ -189,6 +194,98 @@ def _site_is_working(module, project_name, site_name, site_config): return handler.is_active() +def download_last_published_workfile( + host_name: str, + project_name: str, + task_name: str, + workfile_representation: dict, + max_retries: int, + anatomy: Anatomy = None, +) -> str: + """Download the last published workfile + + Args: + host_name (str): Host name. + project_name (str): Project name. + task_name (str): Task name. + workfile_representation (dict): Workfile representation. + max_retries (int): complete file failure only after so many attempts + anatomy (Anatomy, optional): Anatomy (Used for optimization). + Defaults to None. + + Returns: + str: last published workfile path localized + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + # Get sync server module + sync_server = ModulesManager().modules_by_name.get("sync_server") + if not sync_server or not sync_server.enabled: + print("Sync server module is disabled or unavailable.") + return + + if not workfile_representation: + print( + "Not published workfile for task '{}' and host '{}'.".format( + task_name, host_name + ) + ) + return + + last_published_workfile_path = get_representation_path_with_anatomy( + workfile_representation, anatomy + ) + if (not last_published_workfile_path or + not os.path.exists(last_published_workfile_path)): + return + + # If representation isn't available on remote site, then return. + if not sync_server.is_representation_on_site( + project_name, + workfile_representation["_id"], + sync_server.get_remote_site(project_name), + ): + print( + "Representation for task '{}' and host '{}'".format( + task_name, host_name + ) + ) + return + + # Get local site + local_site_id = get_local_site_id() + + # Add workfile representation to local site + representation_ids = {workfile_representation["_id"]} + representation_ids.update( + get_linked_representation_id( + project_name, repre_id=workfile_representation["_id"] + ) + ) + for repre_id in representation_ids: + if not sync_server.is_representation_on_site(project_name, repre_id, + local_site_id): + sync_server.add_site( + project_name, + repre_id, + local_site_id, + force=True, + priority=99 + ) + sync_server.reset_timer() + print("Starting to download:{}".format(last_published_workfile_path)) + # While representation unavailable locally, wait. + while not sync_server.is_representation_on_site( + project_name, workfile_representation["_id"], local_site_id, + max_retries=max_retries + ): + sleep(5) + + return last_published_workfile_path + + class SyncServerThread(threading.Thread): """ Separate thread running synchronization server with asyncio loop. @@ -358,7 +455,6 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(project_name) self.log.debug( "Waiting for {} seconds to new loop".format(delay) @@ -370,8 +466,8 @@ class SyncServerThread(threading.Thread): self.log.warning( "ConnectionResetError in sync loop, trying next loop", exc_info=True) - except CancelledError: - # just stopping server + except asyncio.exceptions.CancelledError: + # cancelling timer pass except ResumableError: self.log.warning( diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 5a4fa07e98..b85b045bd9 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -838,6 +838,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return ret_dict + def get_launch_hook_paths(self): + """Implementation for applications launch hooks. + + Returns: + (str): full absolut path to directory with hooks for the module + """ + + return os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "launch_hooks" + ) + # Needs to be refactored after Settings are updated # # Methods for Settings to get appriate values to fill forms # def get_configurable_items(self, scope=None): @@ -1045,9 +1057,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.sync_server_thread.reset_timer() def is_representation_on_site( - self, project_name, representation_id, site_name + self, project_name, representation_id, site_name, max_retries=None ): - """Checks if 'representation_id' has all files avail. on 'site_name'""" + """Checks if 'representation_id' has all files avail. on 'site_name' + + Args: + project_name (str) + representation_id (str) + site_name (str) + max_retries (int) (optional) - provide only if method used in while + loop to bail out + Returns: + (bool): True if 'representation_id' has all files correctly on the + 'site_name' + Raises: + (ValueError) Only If 'max_retries' provided if upload/download + failed too many times to limit infinite loop check. + """ representation = get_representation_by_id(project_name, representation_id, fields=["_id", "files"]) @@ -1060,6 +1086,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if site["name"] != site_name: continue + if max_retries: + tries = self._get_tries_count_from_rec(site) + if tries >= max_retries: + raise ValueError("Failed too many times") + if (site.get("progress") or site.get("error") or not site.get("created_dt")): return False diff --git a/website/docs/module_site_sync.md b/website/docs/module_site_sync.md index 3e5794579c..68f56cb548 100644 --- a/website/docs/module_site_sync.md +++ b/website/docs/module_site_sync.md @@ -7,80 +7,112 @@ sidebar_label: Site Sync import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; +Site Sync allows users and studios to synchronize published assets between +multiple 'sites'. Site denotes a storage location, +which could be a physical disk, server, cloud storage. To be able to use site +sync, it first needs to be configured. -:::warning -**This feature is** currently **in a beta stage** and it is not recommended to rely on it fully for production. -::: - -Site Sync allows users and studios to synchronize published assets between multiple 'sites'. Site denotes a storage location, -which could be a physical disk, server, cloud storage. To be able to use site sync, it first needs to be configured. - -The general idea is that each user acts as an individual site and can download and upload any published project files when they are needed. that way, artist can have access to the whole project, but only every store files that are relevant to them on their home workstation. +The general idea is that each user acts as an individual site and can download +and upload any published project files when they are needed. that way, artist +can have access to the whole project, but only every store files that are +relevant to them on their home workstation. :::note -At the moment site sync is only able to deal with publishes files. No workfiles will be synchronized unless they are published. We are working on making workfile synchronization possible as well. +At the moment site sync is only able to deal with publishes files. No workfiles +will be synchronized unless they are published. We are working on making +workfile synchronization possible as well. ::: ## System Settings -To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype Settings/System/Modules/Site Sync**. +To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype +Settings/System/Modules/Site Sync**. ![Configure module](assets/site_sync_system.png) -### Sites +### Sites By default there are two sites created for each OpenPype installation: -- **studio** - default site - usually a centralized mounted disk accessible to all artists. Studio site is used if Site Sync is disabled. -- **local** - each workstation or server running OpenPype Tray receives its own with unique site name. Workstation refers to itself as "local"however all other sites will see it under it's unique ID. -Artists can explore their site ID by opening OpenPype Info tool by clicking on a version number in the tray app. +- **studio** - default site - usually a centralized mounted disk accessible to + all artists. Studio site is used if Site Sync is disabled. +- **local** - each workstation or server running OpenPype Tray receives its own + with unique site name. Workstation refers to itself as "local"however all + other sites will see it under it's unique ID. -Many different sites can be created and configured on the system level, and some or all can be assigned to each project. +Artists can explore their site ID by opening OpenPype Info tool by clicking on +a version number in the tray app. -Each OpenPype Tray app works with two sites at one time. (Sites can be the same, and no syncing is done in this setup). +Many different sites can be created and configured on the system level, and +some or all can be assigned to each project. -Sites could be configured differently per project basis. +Each OpenPype Tray app works with two sites at one time. (Sites can be the +same, and no syncing is done in this setup). -Each new site needs to be created first in `System Settings`. Most important feature of site is its Provider, select one from already prepared Providers. +Sites could be configured differently per project basis. -#### Alternative sites +Each new site needs to be created first in `System Settings`. Most important +feature of site is its Provider, select one from already prepared Providers. + +#### Alternative sites This attribute is meant for special use cases only. -One of the use cases is sftp site vendoring (exposing) same data as regular site (studio). Each site is accessible for different audience. 'studio' for artists in a studio via shared disk, 'sftp' for externals via sftp server with mounted 'studio' drive. +One of the use cases is sftp site vendoring (exposing) same data as regular +site (studio). Each site is accessible for different audience. 'studio' for +artists in a studio via shared disk, 'sftp' for externals via sftp server with +mounted 'studio' drive. -Change of file status on one site actually means same change on 'alternate' site occurred too. (eg. artists publish to 'studio', 'sftp' is using -same location >> file is accessible on 'sftp' site right away, no need to sync it anyhow.) +Change of file status on one site actually means same change on 'alternate' +site occurred too. (eg. artists publish to 'studio', 'sftp' is using +same location >> file is accessible on 'sftp' site right away, no need to sync +it anyhow.) ##### Example + ![Configure module](assets/site_sync_system_sites.png) -Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in the studio SFTP server is deployed on a machine that has access to `studio` drive. +Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in +the studio SFTP server is deployed on a machine that has access to `studio` +drive. Alternative sites work both way: + - everything published to `studio` is accessible on a `sftp` site too -- everything published to `sftp` (most probably via artist's local disk - artists publishes locally, representation is marked to be synced to `sftp`. Immediately after it is synced, it is marked to be available on `studio` too for artists in the studio to use.) +- everything published to `sftp` (most probably via artist's local disk - + artists publishes locally, representation is marked to be synced to `sftp`. + Immediately after it is synced, it is marked to be available on `studio` too + for artists in the studio to use.) ## Project Settings -Sites need to be made available for each project. Of course this is possible to do on the default project as well, in which case all other projects will inherit these settings until overridden explicitly. +Sites need to be made available for each project. Of course this is possible to +do on the default project as well, in which case all other projects will +inherit these settings until overridden explicitly. You'll find the setting in **Settings/Project/Global/Site Sync** -The attributes that can be configured will vary between sites and their providers. +The attributes that can be configured will vary between sites and their +providers. ## Local settings -Each user should configure root folder for their 'local' site via **Local Settings** in OpenPype Tray. This folder will be used for all files that the user publishes or downloads while working on a project. Artist has the option to set the folder as "default"in which case it is used for all the projects, or it can be set on a project level individually. +Each user should configure root folder for their 'local' site via **Local +Settings** in OpenPype Tray. This folder will be used for all files that the +user publishes or downloads while working on a project. Artist has the option +to set the folder as "default"in which case it is used for all the projects, or +it can be set on a project level individually. -Artists can also override which site they use as active and remote if need be. +Artists can also override which site they use as active and remote if need be. ![Local overrides](assets/site_sync_local_setting.png) - ## Providers -Each site implements a so called `provider` which handles most common operations (list files, copy files etc.) and provides interface with a particular type of storage. (disk, gdrive, aws, etc.) -Multiple configured sites could share the same provider with different settings (multiple mounted disks - each disk can be a separate site, while +Each site implements a so called `provider` which handles most common +operations (list files, copy files etc.) and provides interface with a +particular type of storage. (disk, gdrive, aws, etc.) +Multiple configured sites could share the same provider with different +settings (multiple mounted disks - each disk can be a separate site, while all share the same provider). **Currently implemented providers:** @@ -89,21 +121,30 @@ all share the same provider). Handles files stored on disk storage. -Local drive provider is the most basic one that is used for accessing all standard hard disk storage scenarios. It will work with any storage that can be mounted on your system in a standard way. This could correspond to a physical external hard drive, network mounted storage, internal drive or even VPN connected network drive. It doesn't care about how the drive is mounted, but you must be able to point to it with a simple directory path. +Local drive provider is the most basic one that is used for accessing all +standard hard disk storage scenarios. It will work with any storage that can be +mounted on your system in a standard way. This could correspond to a physical +external hard drive, network mounted storage, internal drive or even VPN +connected network drive. It doesn't care about how the drive is mounted, but +you must be able to point to it with a simple directory path. Default sites `local` and `studio` both use local drive provider. - ### Google Drive -Handles files on Google Drive (this). GDrive is provided as a production example for implementing other cloud providers +Handles files on Google Drive (this). GDrive is provided as a production +example for implementing other cloud providers -Let's imagine a small globally distributed studio which wants all published work for all their freelancers uploaded to Google Drive folder. +Let's imagine a small globally distributed studio which wants all published +work for all their freelancers uploaded to Google Drive folder. For this use case admin needs to configure: -- how many times it tries to synchronize file in case of some issue (network, permissions) + +- how many times it tries to synchronize file in case of some issue (network, + permissions) - how often should synchronization check for new assets -- sites for synchronization - 'local' and 'gdrive' (this can be overridden in local settings) +- sites for synchronization - 'local' and 'gdrive' (this can be overridden in + local settings) - user credentials - root folder location on Google Drive side @@ -111,30 +152,43 @@ Configuration would look like this: ![Configure project](assets/site_sync_project_settings.png) -*Site Sync* for Google Drive works using its API: https://developers.google.com/drive/api/v3/about-sdk +*Site Sync* for Google Drive works using its +API: https://developers.google.com/drive/api/v3/about-sdk -To configure Google Drive side you would need to have access to Google Cloud Platform project: https://console.cloud.google.com/ +To configure Google Drive side you would need to have access to Google Cloud +Platform project: https://console.cloud.google.com/ To get working connection to Google Drive there are some necessary steps: -- first you need to enable GDrive API: https://developers.google.com/drive/api/v3/enable-drive-api -- next you need to create user, choose **Service Account** (for basic configuration no roles for account are necessary) + +- first you need to enable GDrive + API: https://developers.google.com/drive/api/v3/enable-drive-api +- next you need to create user, choose **Service Account** (for basic + configuration no roles for account are necessary) - add new key for created account and download .json file with credentials -- share destination folder on the Google Drive with created account (directly in GDrive web application) -- add new site back in OpenPype Settings, name as you want, provider needs to be 'gdrive' +- share destination folder on the Google Drive with created account (directly + in GDrive web application) +- add new site back in OpenPype Settings, name as you want, provider needs to + be 'gdrive' - distribute credentials file via shared mounted disk location :::note -If you are using regular personal GDrive for testing don't forget adding `/My Drive` as the prefix in root configuration. Business accounts and share drives don't need this. +If you are using regular personal GDrive for testing don't forget +adding `/My Drive` as the prefix in root configuration. Business accounts and +share drives don't need this. ::: ### SFTP -SFTP provider is used to connect to SFTP server. Currently authentication with `user:password` or `user:ssh key` is implemented. -Please provide only one combination, don't forget to provide password for ssh key if ssh key was created with a passphrase. +SFTP provider is used to connect to SFTP server. Currently authentication +with `user:password` or `user:ssh key` is implemented. +Please provide only one combination, don't forget to provide password for ssh +key if ssh key was created with a passphrase. -(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing connection, it will be mush faster.) +(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing +connection, it will be mush faster.) -Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)! +Beware that ssh key expects OpenSSH format (`.pem`) not a Putty +format (`.ppk`)! #### How to set SFTP site @@ -143,60 +197,101 @@ Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)! ![Enable syncing and create site](assets/site_sync_sftp_system.png) -- In Projects setting enable Site Sync (on default project - all project will be synched, or on specific project) -- Configure SFTP connection and destination folder on a SFTP server (in screenshot `/upload`) +- In Projects setting enable Site Sync (on default project - all project will + be synched, or on specific project) +- Configure SFTP connection and destination folder on a SFTP server (in + screenshot `/upload`) ![SFTP connection](assets/site_sync_project_sftp_settings.png) - -- if you want to force syncing between local and sftp site for all users, use combination `active site: local`, `remote site: NAME_OF_SFTP_SITE` -- if you want to allow only specific users to use SFTP syncing (external users, not located in the office), use `active site: studio`, `remote site: studio`. + +- if you want to force syncing between local and sftp site for all users, use + combination `active site: local`, `remote site: NAME_OF_SFTP_SITE` +- if you want to allow only specific users to use SFTP syncing (external users, + not located in the office), use `active site: studio`, `remote site: studio`. ![Select active and remote site on a project](assets/site_sync_sftp_project_setting_not_forced.png) -- Each artist can decide and configure syncing from his/her local to SFTP via `Local Settings` +- Each artist can decide and configure syncing from his/her local to SFTP + via `Local Settings` ![Select active and remote site on a project](assets/site_sync_sftp_settings_local.png) - + ### Custom providers -If a studio needs to use other services for cloud storage, or want to implement totally different storage providers, they can do so by writing their own provider plugin. We're working on a developer documentation, however, for now we recommend looking at `abstract_provider.py`and `gdrive.py` inside `openpype/modules/sync_server/providers` and using it as a template. +If a studio needs to use other services for cloud storage, or want to implement +totally different storage providers, they can do so by writing their own +provider plugin. We're working on a developer documentation, however, for now +we recommend looking at `abstract_provider.py`and `gdrive.py` +inside `openpype/modules/sync_server/providers` and using it as a template. ### Running Site Sync in background -Site Sync server synchronizes new published files from artist machine into configured remote location by default. +Site Sync server synchronizes new published files from artist machine into +configured remote location by default. -There might be a use case where you need to synchronize between "non-artist" sites, for example between studio site and cloud. In this case -you need to run Site Sync as a background process from a command line (via service etc) 24/7. +There might be a use case where you need to synchronize between "non-artist" +sites, for example between studio site and cloud. In this case +you need to run Site Sync as a background process from a command line (via +service etc) 24/7. -To configure all sites where all published files should be synced eventually you need to configure `project_settings/global/sync_server/config/always_accessible_on` property in Settings (per project) first. +To configure all sites where all published files should be synced eventually +you need to +configure `project_settings/global/sync_server/config/always_accessible_on` +property in Settings (per project) first. ![Set another non artist remote site](assets/site_sync_always_on.png) This is an example of: + - Site Sync is enabled for a project -- default active and remote sites are set to `studio` - eg. standard process: everyone is working in a studio, publishing to shared location etc. -- (but this also allows any of the artists to work remotely, they would change their active site in their own Local Settings to `local` and configure local root. - This would result in everything artist publishes is saved first onto his local folder AND synchronized to `studio` site eventually.) +- default active and remote sites are set to `studio` - eg. standard process: + everyone is working in a studio, publishing to shared location etc. +- (but this also allows any of the artists to work remotely, they would change + their active site in their own Local Settings to `local` and configure local + root. + This would result in everything artist publishes is saved first onto his + local folder AND synchronized to `studio` site eventually.) - everything exported must also be eventually uploaded to `sftp` site -This eventual synchronization between `studio` and `sftp` sites must be physically handled by background process. +This eventual synchronization between `studio` and `sftp` sites must be +physically handled by background process. -As current implementation relies heavily on Settings and Local Settings, background process for a specific site ('studio' for example) must be configured via Tray first to `syncserver` command to work. +As current implementation relies heavily on Settings and Local Settings, +background process for a specific site ('studio' for example) must be +configured via Tray first to `syncserver` command to work. To do this: -- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of active (source) site. In most use cases it would be studio (for cases of backups of everything published to studio site to different cloud site etc.) +- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of + active (source) site. In most use cases it would be studio (for cases of + backups of everything published to studio site to different cloud site etc.) - start `Tray` -- check `Local ID` in information dialog after clicking on version number in the Tray +- check `Local ID` in information dialog after clicking on version number in + the Tray - open `Local Settings` in the `Tray` - configure for each project necessary active site and remote site - close `Tray` - run OP from a command line with `syncserver` and `--active_site` arguments - -This is an example how to trigger background syncing process where active (source) site is `studio`. -(It is expected that OP is installed on a machine, `openpype_console` is on PATH. If not, add full path to executable. +This is an example how to trigger background syncing process where active ( +source) site is `studio`. +(It is expected that OP is installed on a machine, `openpype_console` is on +PATH. If not, add full path to executable. ) + ```shell openpype_console syncserver --active_site studio -``` \ No newline at end of file +``` + +### Syncing of last published workfile + +Some DCC might have enabled +in `project_setting/global/tools/Workfiles/last_workfile_on_startup`, eg. open +DCC with last opened workfile. + +Flag `use_last_published_workfile` tells that last published workfile should be +used if no workfile is present locally. +This use case could happen if artists starts working on new task locally, +doesn't have any workfile present. In that case last published will be +synchronized locally and its version bumped by 1 (as workfile's version is +always +1 from published version). \ No newline at end of file From c542934da45f6dc50bb8ceabfb23f4ff822f016b Mon Sep 17 00:00:00 2001 From: Ynbot Date: Wed, 3 May 2023 03:25:25 +0000 Subject: [PATCH 25/25] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 72297a4430..9832ff4747 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.6-nightly.2" +__version__ = "3.15.6-nightly.3"