mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Add global unique subsets validator
This commit is contained in:
parent
23b8edd1e7
commit
7babd66ee0
2 changed files with 93 additions and 0 deletions
17
openpype/plugins/publish/help/validate_unique_subsets.xml
Normal file
17
openpype/plugins/publish/help/validate_unique_subsets.xml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset not unique</title>
|
||||
<description>
|
||||
## Clashing subset names found
|
||||
|
||||
Multiples instances from your scene are set to publish into the same asset > subset.
|
||||
|
||||
Non unique subset names: '{non_unique}'
|
||||
|
||||
### How to repair?
|
||||
|
||||
Remove the offending instances or rename to have a unique name.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
76
openpype/plugins/publish/validate_unique_subsets.py
Normal file
76
openpype/plugins/publish/validate_unique_subsets.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
from collections import defaultdict
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
PublishXmlValidationError,
|
||||
)
|
||||
|
||||
|
||||
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
|
||||
"""Validate all subset names are unique.
|
||||
|
||||
This only validates whether the instances currently set to publish from
|
||||
the workfile overlap one another for the asset + subset they are publishing
|
||||
to.
|
||||
|
||||
This does not perform any check against existing publishes in the database
|
||||
since it is allowed to publish into existing subsets resulting in
|
||||
versioning.
|
||||
|
||||
A subset may appear twice to publish from the workfile if one
|
||||
of them is set to publish to another asset than the other.
|
||||
|
||||
"""
|
||||
|
||||
label = "Validate Subset Uniqueness"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["*"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Find instance per (asset,subset)
|
||||
instance_per_asset_subset = defaultdict(list)
|
||||
for instance in context:
|
||||
|
||||
# Ignore disabled instances
|
||||
if not instance.data.get('publish', True):
|
||||
continue
|
||||
|
||||
# Ignore instance without asset data
|
||||
asset = instance.data.get("asset")
|
||||
if asset is None:
|
||||
self.log.warning("Instance found without `asset` data: "
|
||||
"{}".format(instance.name))
|
||||
continue
|
||||
|
||||
# Ignore instance without subset data
|
||||
subset = instance.data.get("subset")
|
||||
if subset is None:
|
||||
self.log.warning("Instance found without `subset` data: "
|
||||
"{}".format(instance.name))
|
||||
continue
|
||||
|
||||
instance_per_asset_subset[(asset, subset)].append(instance)
|
||||
|
||||
non_unique = []
|
||||
for (asset, subset), instances in instance_per_asset_subset.items():
|
||||
|
||||
# A single instance per asset, subset is fine
|
||||
if len(instances) < 2:
|
||||
continue
|
||||
|
||||
non_unique.append("{asset} > {subset}".format(asset=asset,
|
||||
subset=subset))
|
||||
|
||||
if not non_unique:
|
||||
# All is ok
|
||||
return
|
||||
|
||||
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
|
||||
"Please remove or rename duplicates.")
|
||||
formatting_data = {
|
||||
"non_unique": ",".join(non_unique)
|
||||
}
|
||||
|
||||
if non_unique:
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
Loading…
Add table
Add a link
Reference in a new issue