From 7babd66ee01e86de4263a4ca28d496e77287e8ea Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 18 Jan 2023 00:11:02 +0100 Subject: [PATCH 1/4] Add global unique subsets validator --- .../publish/help/validate_unique_subsets.xml | 17 +++++ .../publish/validate_unique_subsets.py | 76 +++++++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 openpype/plugins/publish/help/validate_unique_subsets.xml create mode 100644 openpype/plugins/publish/validate_unique_subsets.py diff --git a/openpype/plugins/publish/help/validate_unique_subsets.xml b/openpype/plugins/publish/help/validate_unique_subsets.xml new file mode 100644 index 0000000000..b18f046f84 --- /dev/null +++ b/openpype/plugins/publish/help/validate_unique_subsets.xml @@ -0,0 +1,17 @@ + + + +Subset not unique + +## Clashing subset names found + +Multiples instances from your scene are set to publish into the same asset > subset. + + Non unique subset names: '{non_unique}' + +### How to repair? + +Remove the offending instances or rename to have a unique name. + + + \ No newline at end of file diff --git a/openpype/plugins/publish/validate_unique_subsets.py b/openpype/plugins/publish/validate_unique_subsets.py new file mode 100644 index 0000000000..11fb827770 --- /dev/null +++ b/openpype/plugins/publish/validate_unique_subsets.py @@ -0,0 +1,76 @@ +from collections import defaultdict +import pyblish.api +from openpype.pipeline.publish import ( + PublishXmlValidationError, +) + + +class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): + """Validate all subset names are unique. + + This only validates whether the instances currently set to publish from + the workfile overlap one another for the asset + subset they are publishing + to. + + This does not perform any check against existing publishes in the database + since it is allowed to publish into existing subsets resulting in + versioning. + + A subset may appear twice to publish from the workfile if one + of them is set to publish to another asset than the other. + + """ + + label = "Validate Subset Uniqueness" + order = pyblish.api.ValidatorOrder + families = ["*"] + + def process(self, context): + + # Find instance per (asset,subset) + instance_per_asset_subset = defaultdict(list) + for instance in context: + + # Ignore disabled instances + if not instance.data.get('publish', True): + continue + + # Ignore instance without asset data + asset = instance.data.get("asset") + if asset is None: + self.log.warning("Instance found without `asset` data: " + "{}".format(instance.name)) + continue + + # Ignore instance without subset data + subset = instance.data.get("subset") + if subset is None: + self.log.warning("Instance found without `subset` data: " + "{}".format(instance.name)) + continue + + instance_per_asset_subset[(asset, subset)].append(instance) + + non_unique = [] + for (asset, subset), instances in instance_per_asset_subset.items(): + + # A single instance per asset, subset is fine + if len(instances) < 2: + continue + + non_unique.append("{asset} > {subset}".format(asset=asset, + subset=subset)) + + if not non_unique: + # All is ok + return + + msg = ("Instance subset names {} are not unique. ".format(non_unique) + + "Please remove or rename duplicates.") + formatting_data = { + "non_unique": ",".join(non_unique) + } + + if non_unique: + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) From 9df7d43cda49eeb83a4b77d50032cb0105fb46b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 18 Jan 2023 00:12:07 +0100 Subject: [PATCH 2/4] Remove fusion validate unique subsets in favor of global one --- .../publish/validate_unique_subsets.py | 29 ------------------- 1 file changed, 29 deletions(-) delete mode 100644 openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py diff --git a/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py b/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py deleted file mode 100644 index b218a311ba..0000000000 --- a/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py +++ /dev/null @@ -1,29 +0,0 @@ -import pyblish.api - - -class ValidateUniqueSubsets(pyblish.api.InstancePlugin): - """Ensure all instances have a unique subset name""" - - order = pyblish.api.ValidatorOrder - label = "Validate Unique Subsets" - families = ["render"] - hosts = ["fusion"] - - @classmethod - def get_invalid(cls, instance): - - context = instance.context - subset = instance.data["subset"] - for other_instance in context: - if other_instance == instance: - continue - - if other_instance.data["subset"] == subset: - return [instance] # current instance is invalid - - return [] - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError("Animation content is invalid. See log.") From be715734d47feae003e430996cdd2cdc63179240 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 18 Jan 2023 00:12:22 +0100 Subject: [PATCH 3/4] Remove photoshop validate unique subsets in favor of global one --- .../publish/help/validate_unique_subsets.xml | 14 ------- .../publish/validate_unique_subsets.py | 39 ------------------- 2 files changed, 53 deletions(-) delete mode 100644 openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml delete mode 100644 openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml deleted file mode 100644 index 4b47973193..0000000000 --- a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - -Subset not unique - -## Non unique subset name found - - Non unique subset names: '{non_unique}' -### How to repair? - -Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances? - - - \ No newline at end of file diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py deleted file mode 100644 index 78e84729ce..0000000000 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ /dev/null @@ -1,39 +0,0 @@ -import collections -import pyblish.api -from openpype.pipeline.publish import ( - ValidateContentsOrder, - PublishXmlValidationError, -) - - -class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): - """ - Validate that all subset's names are unique. - """ - - label = "Validate Subset Uniqueness" - hosts = ["photoshop"] - order = ValidateContentsOrder - families = ["image"] - - def process(self, context): - subset_names = [] - - for instance in context: - self.log.info("instance:: {}".format(instance.data)) - if instance.data.get('publish'): - subset_names.append(instance.data.get('subset')) - - non_unique = \ - [item - for item, count in collections.Counter(subset_names).items() - if count > 1] - msg = ("Instance subset names {} are not unique. ".format(non_unique) + - "Remove duplicates via SubsetManager.") - formatting_data = { - "non_unique": ",".join(non_unique) - } - - if non_unique: - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) From 40620431a2a0ee4935e079249e63e67fb741ad91 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 18 Jan 2023 00:15:34 +0100 Subject: [PATCH 4/4] Remove maya validate unique subsets in favor of global one --- .../validate_review_subset_uniqueness.xml | 28 -------------- .../validate_review_subset_uniqueness.py | 38 ------------------- 2 files changed, 66 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml delete mode 100644 openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py diff --git a/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml b/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml deleted file mode 100644 index fd1bf4cbaa..0000000000 --- a/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - Review subsets not unique - - ## Non unique subset name found - - Non unique subset names: '{non_unique}' - - ### __Detailed Info__ (optional) - - This might happen if you already published for this asset - review subset with legacy name {task}Review. - This legacy name limits possibility of publishing of multiple - reviews from a single workfile. Proper review subset name should - now - contain variant also (as 'Main', 'Default' etc.). That would - result in completely new subset though, so this situation must - be handled manually. - - ### How to repair? - - Legacy subsets must be removed from Openpype DB, please ask admin - to do that. Please provide them asset and subset names. - - - - \ No newline at end of file diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py deleted file mode 100644 index 361c594013..0000000000 --- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -import collections -import pyblish.api -from openpype.pipeline.publish import ( - ValidateContentsOrder, - PublishXmlValidationError, -) - - -class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): - """Validates that review subset has unique name.""" - - order = ValidateContentsOrder - hosts = ["maya"] - families = ["review"] - label = "Validate Review Subset Unique" - - def process(self, context): - subset_names = [] - - for instance in context: - self.log.debug("Instance: {}".format(instance.data)) - if instance.data.get('publish'): - subset_names.append(instance.data.get('subset')) - - non_unique = \ - [item - for item, count in collections.Counter(subset_names).items() - if count > 1] - msg = ("Instance subset names {} are not unique. ".format(non_unique) + - "Ask admin to remove subset from DB for multiple reviews.") - formatting_data = { - "non_unique": ",".join(non_unique) - } - - if non_unique: - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data)