diff --git a/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py b/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py
deleted file mode 100644
index b218a311ba..0000000000
--- a/openpype/hosts/fusion/plugins/publish/validate_unique_subsets.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import pyblish.api
-
-
-class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
- """Ensure all instances have a unique subset name"""
-
- order = pyblish.api.ValidatorOrder
- label = "Validate Unique Subsets"
- families = ["render"]
- hosts = ["fusion"]
-
- @classmethod
- def get_invalid(cls, instance):
-
- context = instance.context
- subset = instance.data["subset"]
- for other_instance in context:
- if other_instance == instance:
- continue
-
- if other_instance.data["subset"] == subset:
- return [instance] # current instance is invalid
-
- return []
-
- def process(self, instance):
- invalid = self.get_invalid(instance)
- if invalid:
- raise RuntimeError("Animation content is invalid. See log.")
diff --git a/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml b/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml
deleted file mode 100644
index fd1bf4cbaa..0000000000
--- a/openpype/hosts/maya/plugins/publish/help/validate_review_subset_uniqueness.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
-
- Review subsets not unique
-
- ## Non unique subset name found
-
- Non unique subset names: '{non_unique}'
-
- ### __Detailed Info__ (optional)
-
- This might happen if you already published for this asset
- review subset with legacy name {task}Review.
- This legacy name limits possibility of publishing of multiple
- reviews from a single workfile. Proper review subset name should
- now
- contain variant also (as 'Main', 'Default' etc.). That would
- result in completely new subset though, so this situation must
- be handled manually.
-
- ### How to repair?
-
- Legacy subsets must be removed from Openpype DB, please ask admin
- to do that. Please provide them asset and subset names.
-
-
-
-
\ No newline at end of file
diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py
deleted file mode 100644
index 361c594013..0000000000
--- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-import collections
-import pyblish.api
-from openpype.pipeline.publish import (
- ValidateContentsOrder,
- PublishXmlValidationError,
-)
-
-
-class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin):
- """Validates that review subset has unique name."""
-
- order = ValidateContentsOrder
- hosts = ["maya"]
- families = ["review"]
- label = "Validate Review Subset Unique"
-
- def process(self, context):
- subset_names = []
-
- for instance in context:
- self.log.debug("Instance: {}".format(instance.data))
- if instance.data.get('publish'):
- subset_names.append(instance.data.get('subset'))
-
- non_unique = \
- [item
- for item, count in collections.Counter(subset_names).items()
- if count > 1]
- msg = ("Instance subset names {} are not unique. ".format(non_unique) +
- "Ask admin to remove subset from DB for multiple reviews.")
- formatting_data = {
- "non_unique": ",".join(non_unique)
- }
-
- if non_unique:
- raise PublishXmlValidationError(self, msg,
- formatting_data=formatting_data)
diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py
deleted file mode 100644
index 78e84729ce..0000000000
--- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import collections
-import pyblish.api
-from openpype.pipeline.publish import (
- ValidateContentsOrder,
- PublishXmlValidationError,
-)
-
-
-class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
- """
- Validate that all subset's names are unique.
- """
-
- label = "Validate Subset Uniqueness"
- hosts = ["photoshop"]
- order = ValidateContentsOrder
- families = ["image"]
-
- def process(self, context):
- subset_names = []
-
- for instance in context:
- self.log.info("instance:: {}".format(instance.data))
- if instance.data.get('publish'):
- subset_names.append(instance.data.get('subset'))
-
- non_unique = \
- [item
- for item, count in collections.Counter(subset_names).items()
- if count > 1]
- msg = ("Instance subset names {} are not unique. ".format(non_unique) +
- "Remove duplicates via SubsetManager.")
- formatting_data = {
- "non_unique": ",".join(non_unique)
- }
-
- if non_unique:
- raise PublishXmlValidationError(self, msg,
- formatting_data=formatting_data)
diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml b/openpype/plugins/publish/help/validate_unique_subsets.xml
similarity index 53%
rename from openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml
rename to openpype/plugins/publish/help/validate_unique_subsets.xml
index 4b47973193..b18f046f84 100644
--- a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml
+++ b/openpype/plugins/publish/help/validate_unique_subsets.xml
@@ -3,12 +3,15 @@
Subset not unique
-## Non unique subset name found
+## Clashing subset names found
+
+Multiples instances from your scene are set to publish into the same asset > subset.
Non unique subset names: '{non_unique}'
+
### How to repair?
-Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances?
+Remove the offending instances or rename to have a unique name.
\ No newline at end of file
diff --git a/openpype/plugins/publish/validate_unique_subsets.py b/openpype/plugins/publish/validate_unique_subsets.py
new file mode 100644
index 0000000000..11fb827770
--- /dev/null
+++ b/openpype/plugins/publish/validate_unique_subsets.py
@@ -0,0 +1,76 @@
+from collections import defaultdict
+import pyblish.api
+from openpype.pipeline.publish import (
+ PublishXmlValidationError,
+)
+
+
+class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
+ """Validate all subset names are unique.
+
+ This only validates whether the instances currently set to publish from
+ the workfile overlap one another for the asset + subset they are publishing
+ to.
+
+ This does not perform any check against existing publishes in the database
+ since it is allowed to publish into existing subsets resulting in
+ versioning.
+
+ A subset may appear twice to publish from the workfile if one
+ of them is set to publish to another asset than the other.
+
+ """
+
+ label = "Validate Subset Uniqueness"
+ order = pyblish.api.ValidatorOrder
+ families = ["*"]
+
+ def process(self, context):
+
+ # Find instance per (asset,subset)
+ instance_per_asset_subset = defaultdict(list)
+ for instance in context:
+
+ # Ignore disabled instances
+ if not instance.data.get('publish', True):
+ continue
+
+ # Ignore instance without asset data
+ asset = instance.data.get("asset")
+ if asset is None:
+ self.log.warning("Instance found without `asset` data: "
+ "{}".format(instance.name))
+ continue
+
+ # Ignore instance without subset data
+ subset = instance.data.get("subset")
+ if subset is None:
+ self.log.warning("Instance found without `subset` data: "
+ "{}".format(instance.name))
+ continue
+
+ instance_per_asset_subset[(asset, subset)].append(instance)
+
+ non_unique = []
+ for (asset, subset), instances in instance_per_asset_subset.items():
+
+ # A single instance per asset, subset is fine
+ if len(instances) < 2:
+ continue
+
+ non_unique.append("{asset} > {subset}".format(asset=asset,
+ subset=subset))
+
+ if not non_unique:
+ # All is ok
+ return
+
+ msg = ("Instance subset names {} are not unique. ".format(non_unique) +
+ "Please remove or rename duplicates.")
+ formatting_data = {
+ "non_unique": ",".join(non_unique)
+ }
+
+ if non_unique:
+ raise PublishXmlValidationError(self, msg,
+ formatting_data=formatting_data)