mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge pull request #4336 from BigRoy/global_validate_unique_subsets
fixes https://github.com/ynput/OpenPype/issues/4330
This commit is contained in:
commit
4d567c0316
6 changed files with 81 additions and 136 deletions
|
|
@ -1,29 +0,0 @@
|
||||||
import pyblish.api
|
|
||||||
|
|
||||||
|
|
||||||
class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
|
|
||||||
"""Ensure all instances have a unique subset name"""
|
|
||||||
|
|
||||||
order = pyblish.api.ValidatorOrder
|
|
||||||
label = "Validate Unique Subsets"
|
|
||||||
families = ["render"]
|
|
||||||
hosts = ["fusion"]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_invalid(cls, instance):
|
|
||||||
|
|
||||||
context = instance.context
|
|
||||||
subset = instance.data["subset"]
|
|
||||||
for other_instance in context:
|
|
||||||
if other_instance == instance:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if other_instance.data["subset"] == subset:
|
|
||||||
return [instance] # current instance is invalid
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
def process(self, instance):
|
|
||||||
invalid = self.get_invalid(instance)
|
|
||||||
if invalid:
|
|
||||||
raise RuntimeError("Animation content is invalid. See log.")
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<root>
|
|
||||||
<error id="main">
|
|
||||||
<title>Review subsets not unique</title>
|
|
||||||
<description>
|
|
||||||
## Non unique subset name found
|
|
||||||
|
|
||||||
Non unique subset names: '{non_unique}'
|
|
||||||
<detail>
|
|
||||||
### __Detailed Info__ (optional)
|
|
||||||
|
|
||||||
This might happen if you already published for this asset
|
|
||||||
review subset with legacy name {task}Review.
|
|
||||||
This legacy name limits possibility of publishing of multiple
|
|
||||||
reviews from a single workfile. Proper review subset name should
|
|
||||||
now
|
|
||||||
contain variant also (as 'Main', 'Default' etc.). That would
|
|
||||||
result in completely new subset though, so this situation must
|
|
||||||
be handled manually.
|
|
||||||
</detail>
|
|
||||||
### How to repair?
|
|
||||||
|
|
||||||
Legacy subsets must be removed from Openpype DB, please ask admin
|
|
||||||
to do that. Please provide them asset and subset names.
|
|
||||||
|
|
||||||
</description>
|
|
||||||
</error>
|
|
||||||
</root>
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import collections
|
|
||||||
import pyblish.api
|
|
||||||
from openpype.pipeline.publish import (
|
|
||||||
ValidateContentsOrder,
|
|
||||||
PublishXmlValidationError,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin):
|
|
||||||
"""Validates that review subset has unique name."""
|
|
||||||
|
|
||||||
order = ValidateContentsOrder
|
|
||||||
hosts = ["maya"]
|
|
||||||
families = ["review"]
|
|
||||||
label = "Validate Review Subset Unique"
|
|
||||||
|
|
||||||
def process(self, context):
|
|
||||||
subset_names = []
|
|
||||||
|
|
||||||
for instance in context:
|
|
||||||
self.log.debug("Instance: {}".format(instance.data))
|
|
||||||
if instance.data.get('publish'):
|
|
||||||
subset_names.append(instance.data.get('subset'))
|
|
||||||
|
|
||||||
non_unique = \
|
|
||||||
[item
|
|
||||||
for item, count in collections.Counter(subset_names).items()
|
|
||||||
if count > 1]
|
|
||||||
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
|
|
||||||
"Ask admin to remove subset from DB for multiple reviews.")
|
|
||||||
formatting_data = {
|
|
||||||
"non_unique": ",".join(non_unique)
|
|
||||||
}
|
|
||||||
|
|
||||||
if non_unique:
|
|
||||||
raise PublishXmlValidationError(self, msg,
|
|
||||||
formatting_data=formatting_data)
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
import collections
|
|
||||||
import pyblish.api
|
|
||||||
from openpype.pipeline.publish import (
|
|
||||||
ValidateContentsOrder,
|
|
||||||
PublishXmlValidationError,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
|
||||||
"""
|
|
||||||
Validate that all subset's names are unique.
|
|
||||||
"""
|
|
||||||
|
|
||||||
label = "Validate Subset Uniqueness"
|
|
||||||
hosts = ["photoshop"]
|
|
||||||
order = ValidateContentsOrder
|
|
||||||
families = ["image"]
|
|
||||||
|
|
||||||
def process(self, context):
|
|
||||||
subset_names = []
|
|
||||||
|
|
||||||
for instance in context:
|
|
||||||
self.log.info("instance:: {}".format(instance.data))
|
|
||||||
if instance.data.get('publish'):
|
|
||||||
subset_names.append(instance.data.get('subset'))
|
|
||||||
|
|
||||||
non_unique = \
|
|
||||||
[item
|
|
||||||
for item, count in collections.Counter(subset_names).items()
|
|
||||||
if count > 1]
|
|
||||||
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
|
|
||||||
"Remove duplicates via SubsetManager.")
|
|
||||||
formatting_data = {
|
|
||||||
"non_unique": ",".join(non_unique)
|
|
||||||
}
|
|
||||||
|
|
||||||
if non_unique:
|
|
||||||
raise PublishXmlValidationError(self, msg,
|
|
||||||
formatting_data=formatting_data)
|
|
||||||
|
|
@ -3,12 +3,15 @@
|
||||||
<error id="main">
|
<error id="main">
|
||||||
<title>Subset not unique</title>
|
<title>Subset not unique</title>
|
||||||
<description>
|
<description>
|
||||||
## Non unique subset name found
|
## Clashing subset names found
|
||||||
|
|
||||||
|
Multiples instances from your scene are set to publish into the same asset > subset.
|
||||||
|
|
||||||
Non unique subset names: '{non_unique}'
|
Non unique subset names: '{non_unique}'
|
||||||
|
|
||||||
### How to repair?
|
### How to repair?
|
||||||
|
|
||||||
Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances?
|
Remove the offending instances or rename to have a unique name.
|
||||||
</description>
|
</description>
|
||||||
</error>
|
</error>
|
||||||
</root>
|
</root>
|
||||||
76
openpype/plugins/publish/validate_unique_subsets.py
Normal file
76
openpype/plugins/publish/validate_unique_subsets.py
Normal file
|
|
@ -0,0 +1,76 @@
|
||||||
|
from collections import defaultdict
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline.publish import (
|
||||||
|
PublishXmlValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
||||||
|
"""Validate all subset names are unique.
|
||||||
|
|
||||||
|
This only validates whether the instances currently set to publish from
|
||||||
|
the workfile overlap one another for the asset + subset they are publishing
|
||||||
|
to.
|
||||||
|
|
||||||
|
This does not perform any check against existing publishes in the database
|
||||||
|
since it is allowed to publish into existing subsets resulting in
|
||||||
|
versioning.
|
||||||
|
|
||||||
|
A subset may appear twice to publish from the workfile if one
|
||||||
|
of them is set to publish to another asset than the other.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "Validate Subset Uniqueness"
|
||||||
|
order = pyblish.api.ValidatorOrder
|
||||||
|
families = ["*"]
|
||||||
|
|
||||||
|
def process(self, context):
|
||||||
|
|
||||||
|
# Find instance per (asset,subset)
|
||||||
|
instance_per_asset_subset = defaultdict(list)
|
||||||
|
for instance in context:
|
||||||
|
|
||||||
|
# Ignore disabled instances
|
||||||
|
if not instance.data.get('publish', True):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ignore instance without asset data
|
||||||
|
asset = instance.data.get("asset")
|
||||||
|
if asset is None:
|
||||||
|
self.log.warning("Instance found without `asset` data: "
|
||||||
|
"{}".format(instance.name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ignore instance without subset data
|
||||||
|
subset = instance.data.get("subset")
|
||||||
|
if subset is None:
|
||||||
|
self.log.warning("Instance found without `subset` data: "
|
||||||
|
"{}".format(instance.name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
instance_per_asset_subset[(asset, subset)].append(instance)
|
||||||
|
|
||||||
|
non_unique = []
|
||||||
|
for (asset, subset), instances in instance_per_asset_subset.items():
|
||||||
|
|
||||||
|
# A single instance per asset, subset is fine
|
||||||
|
if len(instances) < 2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
non_unique.append("{asset} > {subset}".format(asset=asset,
|
||||||
|
subset=subset))
|
||||||
|
|
||||||
|
if not non_unique:
|
||||||
|
# All is ok
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
|
||||||
|
"Please remove or rename duplicates.")
|
||||||
|
formatting_data = {
|
||||||
|
"non_unique": ",".join(non_unique)
|
||||||
|
}
|
||||||
|
|
||||||
|
if non_unique:
|
||||||
|
raise PublishXmlValidationError(self, msg,
|
||||||
|
formatting_data=formatting_data)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue