modified global plugins

This commit is contained in:
Jakub Trllo 2024-02-21 18:37:52 +01:00
parent d7785ef2da
commit 464fc4c783
26 changed files with 266 additions and 229 deletions

View file

@ -72,7 +72,7 @@ class CleanUp(pyblish.api.InstancePlugin):
self.clean_renders(instance, skip_cleanup_filepaths)
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
if instance.data["productType"] in ef]:
return
import tempfile
@ -105,8 +105,8 @@ class CleanUp(pyblish.api.InstancePlugin):
def clean_renders(self, instance, skip_cleanup_filepaths):
transfers = instance.data.get("transfers", list())
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
instance_families = instance.data.get("families", list())
instance_product_type = instance.data.get("productType")
dirnames = []
transfers_dirs = []
@ -127,19 +127,24 @@ class CleanUp(pyblish.api.InstancePlugin):
).format(src))
continue
if os.path.normpath(src) != os.path.normpath(dest):
if instance_family == 'render' or 'render' in current_families:
self.log.info("Removing src: `{}`...".format(src))
try:
os.remove(src)
except PermissionError:
self.log.warning(
"Insufficient permission to delete {}".format(src)
)
continue
if os.path.normpath(src) == os.path.normpath(dest):
continue
# add dir for cleanup
dirnames.append(os.path.dirname(src))
if (
instance_product_type == "render"
or "render" in instance_families
):
self.log.info("Removing src: `{}`...".format(src))
try:
os.remove(src)
except PermissionError:
self.log.warning(
"Insufficient permission to delete {}".format(src)
)
continue
# add dir for cleanup
dirnames.append(os.path.dirname(src))
# clean by regex patterns
# make unique set

View file

@ -4,8 +4,8 @@ Requires:
context -> projectEntity
context -> assetEntity
instance -> folderPath
instance -> subset
instance -> family
instance -> productName
instance -> productType
Optional:
instance -> version
@ -120,7 +120,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
).format(joined_asset_names))
def fill_latest_versions(self, context, project_name):
"""Try to find latest version for each instance's subset.
"""Try to find latest version for each instance's product name.
Key "latestVersion" is always set to latest version or `None`.
@ -134,7 +134,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
self.log.debug("Querying latest versions for instances.")
hierarchy = {}
names_by_asset_ids = collections.defaultdict(set)
names_by_folder_ids = collections.defaultdict(set)
for instance in context:
# Make sure `"latestVersion"` key is set
latest_version = instance.data.get("latestVersion")
@ -145,41 +145,41 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
if not asset_doc:
continue
# Store asset ids and subset names for queries
asset_id = asset_doc["_id"]
subset_name = instance.data["subset"]
# Store folder ids and product names for queries
folder_id = asset_doc["_id"]
product_name = instance.data["productName"]
# Prepare instance hierarchy for faster filling latest versions
if asset_id not in hierarchy:
hierarchy[asset_id] = {}
if subset_name not in hierarchy[asset_id]:
hierarchy[asset_id][subset_name] = []
hierarchy[asset_id][subset_name].append(instance)
names_by_asset_ids[asset_id].add(subset_name)
if folder_id not in hierarchy:
hierarchy[folder_id] = {}
if product_name not in hierarchy[folder_id]:
hierarchy[folder_id][product_name] = []
hierarchy[folder_id][product_name].append(instance)
names_by_folder_ids[folder_id].add(product_name)
subset_docs = []
if names_by_asset_ids:
if names_by_folder_ids:
subset_docs = list(get_subsets(
project_name, names_by_asset_ids=names_by_asset_ids
project_name, names_by_folder_ids=names_by_folder_ids
))
subset_ids = [
product_ids = {
subset_doc["_id"]
for subset_doc in subset_docs
]
}
last_version_docs_by_subset_id = get_last_versions(
project_name, subset_ids, fields=["name"]
last_version_docs_by_product_id = get_last_versions(
project_name, product_ids, fields=["name"]
)
for subset_doc in subset_docs:
subset_id = subset_doc["_id"]
last_version_doc = last_version_docs_by_subset_id.get(subset_id)
product_id = subset_doc["_id"]
last_version_doc = last_version_docs_by_product_id.get(product_id)
if last_version_doc is None:
continue
asset_id = subset_doc["parent"]
subset_name = subset_doc["name"]
_instances = hierarchy[asset_id][subset_name]
folder_id = subset_doc["parent"]
product_name = subset_doc["name"]
_instances = hierarchy[folder_id][product_name]
for _instance in _instances:
_instance.data["latestVersion"] = last_version_doc["name"]
@ -191,9 +191,15 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
for instance in context:
anatomy_data = copy.deepcopy(context.data["anatomyData"])
product_name = instance.data["productName"]
product_type = instance.data["productType"]
anatomy_data.update({
"family": instance.data["family"],
"subset": instance.data["subset"],
"family": product_type,
"subset": product_name,
"product": {
"name": product_name,
"type": product_type,
}
})
self._fill_asset_data(instance, project_doc, anatomy_data)
@ -227,8 +233,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
instance.context.data["hostName"],
task_name=task_name,
task_type=task_type,
family=instance.data["family"],
subset=instance.data["subset"]
product_type=instance.data["productType"],
product_name=instance.data["productName"]
)
anatomy_data["version"] = version_number

View file

@ -14,7 +14,7 @@ from ayon_core.pipeline.load import get_representation_path_with_anatomy
class CollectAudio(pyblish.api.ContextPlugin):
"""Collect asset's last published audio.
The audio subset name searched for is defined in:
The audio product name searched for is defined in:
project settings > Collect Audio
Note:
@ -71,9 +71,9 @@ class CollectAudio(pyblish.api.ContextPlugin):
asset_names = set(instances_by_asset_name.keys())
self.log.debug((
"Searching for audio product '{subset}' in assets {assets}"
"Searching for audio product '{product}' in assets {assets}"
).format(
subset=self.audio_product_name,
product=self.audio_product_name,
assets=", ".join([
'"{}"'.format(asset_name)
for asset_name in asset_names
@ -102,64 +102,64 @@ class CollectAudio(pyblish.api.ContextPlugin):
}]
self.log.debug("Audio Data added to instance ...")
def query_representations(self, project_name, asset_names):
"""Query representations related to audio subsets for passed assets.
def query_representations(self, project_name, folder_paths):
"""Query representations related to audio products for passed assets.
Args:
project_name (str): Project in which we're looking for all
entities.
asset_names (Iterable[str]): Asset names where to look for audio
subsets and their representations.
folder_paths (Iterable[str]): Folder paths where to look for audio
products and their representations.
Returns:
collections.defaultdict[str, List[Dict[Str, Any]]]: Representations
related to audio subsets by asset name.
related to audio products by asset name.
"""
output = collections.defaultdict(list)
# Query asset documents
asset_docs = get_assets(
project_name,
asset_names=asset_names,
asset_names=folder_paths,
fields=["_id", "name", "data.parents"]
)
asset_id_by_name = {
folder_id_by_path = {
get_asset_name_identifier(asset_doc): asset_doc["_id"]
for asset_doc in asset_docs
}
asset_ids = set(asset_id_by_name.values())
folder_ids = set(folder_id_by_path.values())
# Query subsets with name define by 'audio_product_name' attr
# - one or none subsets with the name should be available on an asset
# Query products with name define by 'audio_product_name' attr
# - one or none products with the name should be available on an asset
subset_docs = get_subsets(
project_name,
subset_names=[self.audio_product_name],
asset_ids=asset_ids,
asset_ids=folder_ids,
fields=["_id", "parent"]
)
subset_id_by_asset_id = {}
product_id_by_folder_id = {}
for subset_doc in subset_docs:
asset_id = subset_doc["parent"]
subset_id_by_asset_id[asset_id] = subset_doc["_id"]
folder_id = subset_doc["parent"]
product_id_by_folder_id[folder_id] = subset_doc["_id"]
subset_ids = set(subset_id_by_asset_id.values())
if not subset_ids:
product_ids = set(product_id_by_folder_id.values())
if not product_ids:
return output
# Find all latest versions for the subsets
version_docs_by_subset_id = get_last_versions(
project_name, subset_ids=subset_ids, fields=["_id", "parent"]
# Find all latest versions for the products
version_docs_by_product_id = get_last_versions(
project_name, subset_ids=product_ids, fields=["_id", "parent"]
)
version_id_by_subset_id = {
subset_id: version_doc["_id"]
for subset_id, version_doc in version_docs_by_subset_id.items()
version_id_by_product_id = {
product_id: version_doc["_id"]
for product_id, version_doc in version_docs_by_product_id.items()
}
version_ids = set(version_id_by_subset_id.values())
version_ids = set(version_id_by_product_id.values())
if not version_ids:
return output
# Find representations under latest versions of audio subsets
# Find representations under latest versions of audio products
repre_docs = get_representations(
project_name, version_ids=version_ids
)
@ -171,9 +171,9 @@ class CollectAudio(pyblish.api.ContextPlugin):
if not repre_docs_by_version_id:
return output
for asset_name in asset_names:
asset_id = asset_id_by_name.get(asset_name)
subset_id = subset_id_by_asset_id.get(asset_id)
version_id = version_id_by_subset_id.get(subset_id)
output[asset_name] = repre_docs_by_version_id[version_id]
for folder_path in folder_paths:
folder_id = folder_id_by_path.get(folder_path)
product_id = product_id_by_folder_id.get(folder_id)
version_id = version_id_by_product_id.get(product_id)
output[folder_path] = repre_docs_by_version_id[version_id]
return output

View file

@ -38,8 +38,8 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin):
template_key = "transient"
def process(self, instance):
family = instance.data["family"]
subset_name = instance.data["subset"]
product_type = instance.data["productType"]
product_name = instance.data["productName"]
host_name = instance.context.data["hostName"]
project_name = instance.context.data["projectName"]
project_settings = instance.context.data["project_settings"]
@ -47,9 +47,15 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin):
task = instance.data["anatomyData"].get("task", {})
transient_tml, is_persistent = get_custom_staging_dir_info(
project_name, host_name, family, task.get("name"),
task.get("type"), subset_name, project_settings=project_settings,
anatomy=anatomy, log=self.log)
project_name,
host_name,
product_type,
product_name,
task.get("name"),
task.get("type"),
project_settings=project_settings,
anatomy=anatomy,
log=self.log)
if transient_tml:
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
@ -66,5 +72,5 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin):
result_str = "Not adding"
self.log.debug("{} custom staging dir for instance with '{}'".format(
result_str, family
result_str, product_type
))

View file

@ -17,7 +17,7 @@ class CollectFramesFixDef(
):
"""Provides text field to insert frame(s) to be rerendered.
Published files of last version of an instance subset are collected into
Published files of last version of an instance product are collected into
instance.data["last_version_published_files"]. All these but frames
mentioned in text field will be reused for new version.
"""
@ -40,7 +40,7 @@ class CollectFramesFixDef(
instance.data["frames_to_fix"] = frames_to_fix
subset_name = instance.data["subset"]
product_name = instance.data["productName"]
asset_name = instance.data["folderPath"]
project_entity = instance.data["projectEntity"]
@ -48,7 +48,7 @@ class CollectFramesFixDef(
version = get_last_version_by_subset_name(
project_name,
subset_name,
product_name,
asset_name=asset_name
)
if not version:

View file

@ -72,18 +72,21 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
transient_data,
thumbnail_path
):
subset = in_data["subset"]
product_name = in_data["productName"]
# If instance data already contain families then use it
instance_families = in_data.get("families") or []
# Add product type to families
instance_families.append(in_data["productType"])
instance = context.create_instance(subset)
instance = context.create_instance(product_name)
instance.data.update({
"subset": subset,
"publish": True,
"label": in_data.get("label") or product_name,
"name": product_name,
"folderPath": in_data["folderPath"],
"task": in_data["task"],
"label": in_data.get("label") or subset,
"name": subset,
"family": in_data["family"],
"productName": product_name,
"productType": in_data["productType"],
"families": instance_families,
"representations": [],
"thumbnailSource": thumbnail_path

View file

@ -28,11 +28,13 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
# shot data dict
shot_data = {}
family = instance.data["family"]
product_type = instance.data["productType"]
families = instance.data["families"]
# exclude other families then self.families with intersection
if not set(self.families).intersection(set(families + [family])):
if not set(self.families).intersection(
set(families + [product_type])
):
continue
# exclude if not masterLayer True

View file

@ -91,7 +91,7 @@ class CollectOtioReview(pyblish.api.InstancePlugin):
if otio_review_clips:
# add review track to instance and change label to reflect it
label = instance.data.get("label", instance.data["subset"])
label = instance.data.get("label", instance.data["productName"])
instance.data["label"] = label + " (review)"
instance.data["families"] += ["review", "ftrack"]
instance.data["otioReviewClips"] = otio_review_clips

View file

@ -16,7 +16,7 @@ from ayon_core.pipeline.publish import (
class CollectOtioSubsetResources(pyblish.api.InstancePlugin):
"""Get Resources for a subset version"""
"""Get Resources for a product version"""
label = "Collect OTIO Subset Resources"
order = pyblish.api.CollectorOrder + 0.491
@ -32,7 +32,7 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin):
make_sequence_collection
)
if "audio" in instance.data["family"]:
if "audio" in instance.data["productType"]:
return
if not instance.data.get("representations"):
@ -250,14 +250,14 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin):
# Task can be optional in anatomy data
host_name = context.data["hostName"]
family = instance.data["family"]
product_type = instance.data["productType"]
anatomy_data = instance.data["anatomyData"]
task_info = anatomy_data.get("task") or {}
return get_publish_template_name(
project_name,
host_name,
family,
product_type,
task_name=task_info.get("name"),
task_type=task_info.get("type"),
project_settings=context.data["project_settings"],

View file

@ -103,9 +103,9 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
for instance_data in data.get("instances"):
self.log.debug(" - processing instance for {}".format(
instance_data.get("subset")))
instance_data.get("productName")))
instance = self._context.create_instance(
instance_data.get("subset")
instance_data.get("productName")
)
self._fill_staging_dir(instance_data, anatomy)

View file

@ -156,16 +156,16 @@ class ExtractBurnin(publish.Extractor):
def main_process(self, instance):
host_name = instance.context.data["hostName"]
family = instance.data["family"]
product_type = instance.data["productType"]
product_name = instance.data["productName"]
task_data = instance.data["anatomyData"].get("task", {})
task_name = task_data.get("name")
task_type = task_data.get("type")
subset = instance.data["subset"]
filtering_criteria = {
"hosts": host_name,
"product_types": family,
"product_names": subset,
"product_types": product_type,
"product_names": product_name,
"task_names": task_name,
"task_types": task_type,
}
@ -177,9 +177,11 @@ class ExtractBurnin(publish.Extractor):
if not profile:
self.log.debug((
"Skipped instance. None of profiles in presets are for"
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
" | Task type \"{}\" | Product name \"{}\" "
).format(host_name, family, task_name, task_type, subset))
" Host: \"{}\" | Product type: \"{}\" | Product name \"{}\""
" | Task name \"{}\" | Task type \"{}\""
).format(
host_name, product_type, product_name, task_name, task_type
))
return
# Pre-filter burnin definitions by instance families
@ -189,7 +191,7 @@ class ExtractBurnin(publish.Extractor):
"Skipped instance. Burnin definitions are not set for profile"
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
" | Profile \"{}\""
).format(host_name, family, task_name, profile))
).format(host_name, product_type, task_name, profile))
return
burnin_options = self._get_burnin_options()

View file

@ -26,11 +26,11 @@ class ExtractOIIOTranscode(publish.Extractor):
This dict contains source colorspace information, collected by hosts.
Target colorspace is selected by profiles in the Settings, based on:
- families
- host
- host names
- product types
- product names
- task types
- task names
- subset names
Can produce one or more representations (with different extensions) based
on output definition in format:
@ -313,15 +313,15 @@ class ExtractOIIOTranscode(publish.Extractor):
def _get_profile(self, instance):
"""Returns profile if and how repre should be color transcoded."""
host_name = instance.context.data["hostName"]
family = instance.data["family"]
product_type = instance.data["productType"]
product_name = instance.data["productName"]
task_data = instance.data["anatomyData"].get("task", {})
task_name = task_data.get("name")
task_type = task_data.get("type")
subset = instance.data["subset"]
filtering_criteria = {
"hosts": host_name,
"product_types": family,
"product_names": subset,
"product_types": product_type,
"product_names": product_name,
"task_names": task_name,
"task_types": task_type,
}
@ -331,9 +331,11 @@ class ExtractOIIOTranscode(publish.Extractor):
if not profile:
self.log.debug((
"Skipped instance. None of profiles in presets are for"
" Host: \"{}\" | Product types: \"{}\" | Task \"{}\""
" | Task type \"{}\" | Product names: \"{}\" "
).format(host_name, family, task_name, task_type, subset))
" Host: \"{}\" | Product types: \"{}\" | Product names: \"{}\""
" | Task name \"{}\" | Task type \"{}\""
).format(
host_name, product_type, product_name, task_name, task_type
))
return profile

View file

@ -103,7 +103,9 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
else:
audio_fpath = recycling_file.pop()
if "audio" in (inst.data["families"] + [inst.data["family"]]):
if "audio" in (
inst.data["families"] + [inst.data["productType"]]
):
# create empty representation attr
if "representations" not in inst.data:
inst.data["representations"] = []
@ -140,10 +142,10 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
"""
return [
_i for _i in context
# filter only those with audio family
# filter only those with audio product type or family
# and also with reviewAudio data key
if bool("audio" in (
_i.data.get("families", []) + [_i.data["family"]])
_i.data.get("families", []) + [_i.data["productType"]])
) or _i.data.get("reviewAudio")
]

View file

@ -103,38 +103,38 @@ class ExtractReview(pyblish.api.InstancePlugin):
def _get_outputs_for_instance(self, instance):
host_name = instance.context.data["hostName"]
family = self.main_family_from_instance(instance)
product_type = instance.data["productType"]
self.log.debug("Host: \"{}\"".format(host_name))
self.log.debug("Family: \"{}\"".format(family))
self.log.debug("Product type: \"{}\"".format(product_type))
profile = filter_profiles(
self.profiles,
{
"hosts": host_name,
"product_types": family,
"product_types": product_type,
},
logger=self.log)
if not profile:
self.log.info((
"Skipped instance. None of profiles in presets are for"
" Host: \"{}\" | Family: \"{}\""
).format(host_name, family))
" Host: \"{}\" | Product type: \"{}\""
).format(host_name, product_type))
return
self.log.debug("Matching profile: \"{}\"".format(json.dumps(profile)))
subset_name = instance.data.get("subset")
product_name = instance.data.get("productName")
instance_families = self.families_from_instance(instance)
filtered_outputs = self.filter_output_defs(
profile, subset_name, instance_families
profile, product_name, instance_families
)
if not filtered_outputs:
self.log.info((
"Skipped instance. All output definitions from selected"
" profile do not match instance families \"{}\" or"
" subset name \"{}\"."
).format(str(instance_families), subset_name))
" product name \"{}\"."
).format(str(instance_families), product_name))
# Store `filename_suffix` to save arguments
profile_outputs = []
@ -1463,13 +1463,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
return filters
def main_family_from_instance(self, instance):
"""Returns main family of entered instance."""
family = instance.data.get("family")
if not family:
family = instance.data["families"][0]
return family
def families_from_instance(self, instance):
"""Returns all families of entered instance."""
families = []
@ -1497,7 +1490,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
return any(family.lower() in families_filter_lower
for family in families)
def filter_output_defs(self, profile, subset_name, families):
def filter_output_defs(self, profile, product_name, families):
"""Return outputs matching input instance families.
Output definitions without families filter are marked as valid.
@ -1505,7 +1498,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
Args:
profile (dict): Profile from presets matching current context.
families (list): All families of current instance.
subset_name (str): name of subset
product_name (str): Product name.
Returns:
dict[str, Any]: Containing all output definitions matching entered
@ -1536,11 +1529,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
# Skip empty strings
if name_filter
]
if subset_name and product_name_filters:
if product_name and product_name_filters:
match = False
for product_name_filter in product_name_filters:
compiled = re.compile(product_name_filter)
if compiled.search(subset_name):
if compiled.search(product_name):
match = True
break

View file

@ -86,16 +86,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
instance.data["representations"].remove(repre)
def _main_process(self, instance):
subset_name = instance.data["subset"]
product_name = instance.data["productName"]
instance_repres = instance.data.get("representations")
if not instance_repres:
self.log.debug((
"Instance {} does not have representations. Skipping"
).format(subset_name))
).format(product_name))
return
self.log.debug(
"Processing instance with subset name {}".format(subset_name)
"Processing instance with product name {}".format(product_name)
)
# Skip if instance have 'review' key in data set to 'False'
@ -110,15 +110,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
# skip crypto passes.
# TODO: This is just a quick fix and has its own side-effects - it is
# affecting every subset name with `crypto` in its name.
# affecting every prouct name with `crypto` in its name.
# This must be solved properly, maybe using tags on
# representation that can be determined much earlier and
# with better precision.
if "crypto" in subset_name.lower():
if "crypto" in product_name.lower():
self.log.debug("Skipping crypto passes.")
return
# We only want to process the subsets needed from settings.
# We only want to process the produces needed from settings.
def validate_string_against_patterns(input_str, patterns):
for pattern in patterns:
if re.match(pattern, input_str):
@ -128,14 +128,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
product_names = self.product_names
if product_names:
result = validate_string_against_patterns(
instance.data["subset"], product_names
product_name, product_names
)
if not result:
self.log.debug(
"Product name \"{}\" did not match settings filters: {}".format(
instance.data["subset"], product_names
)
)
self.log.debug((
"Product name \"{}\" did not match settings filters: {}"
).format(product_name, product_names))
return
# first check for any explicitly marked representations for thumbnail

View file

@ -38,9 +38,9 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
def process(self, instance):
self._create_context_thumbnail(instance.context)
subset_name = instance.data["subset"]
product_name = instance.data["productName"]
self.log.debug(
"Processing instance with subset name {}".format(subset_name)
"Processing instance with product name {}".format(product_name)
)
thumbnail_source = instance.data.get("thumbnailSource")
if not thumbnail_source:

View file

@ -3,11 +3,11 @@
<error id="main">
<title>Subset not unique</title>
<description>
## Clashing subset names found
## Clashing product names found
Multiples instances from your scene are set to publish into the same asset > subset.
Multiples instances from your scene are set to publish into the same folder > product.
Non unique subset names: '{non_unique}'
Non unique product names: '{non_unique}'
### How to repair?

View file

@ -60,7 +60,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"""Register publish in the database and transfer files to destinations.
Steps:
1) Register the subset and version
1) Register the product and version
2) Transfer the representation files to the destination
3) Register the representation
@ -148,8 +148,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Representation context keys that should always be written to
# the database even if not used by the destination template
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "username", "user", "output"
"project",
"asset",
"hierarchy",
"folder",
"task",
"product",
"subset",
"family",
"version",
"representation",
"username",
"user",
"output"
]
def process(self, instance):
@ -172,7 +183,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.warning((
"Skipping, there are no representations"
" to integrate for instance {}"
).format(instance.data["family"]))
).format(instance.data["productType"]))
return
file_transactions = FileTransaction(log=self.log,
@ -205,7 +216,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if not repres:
raise KnownPublishError(
"Instance {} has no representations to integrate".format(
instance.data["family"]
instance.data["productType"]
)
)
@ -307,9 +318,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# increase if the file transaction takes a long time.
op_session.commit()
self.log.info("Subset '{subset[name]}' version {version[name]} "
"written to database..".format(subset=subset,
version=version))
self.log.info((
"Product '{}' version {} written to database.."
).format(subset["name"], version["name"]))
# Process all file transfers of all integrations now
self.log.debug("Integrating source files to destination ...")
@ -403,13 +414,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
def prepare_subset(self, instance, op_session, project_name):
asset_doc = instance.data["assetEntity"]
subset_name = instance.data["subset"]
family = instance.data["family"]
self.log.debug("Subset: {}".format(subset_name))
product_name = instance.data["productName"]
product_type = instance.data["productType"]
self.log.debug("Product: {}".format(product_name))
# Get existing subset if it exists
existing_subset_doc = get_subset_by_name(
project_name, subset_name, asset_doc["_id"]
project_name, product_name, asset_doc["_id"]
)
# Define subset data
@ -430,12 +441,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if existing_subset_doc:
subset_id = existing_subset_doc["_id"]
subset_doc = new_subset_document(
subset_name, family, asset_doc["_id"], data, subset_id
product_name, product_type, asset_doc["_id"], data, subset_id
)
if existing_subset_doc is None:
# Create a new subset
self.log.info("Subset '%s' not found, creating ..." % subset_name)
self.log.info(
"Product '%s' not found, creating ..." % product_name
)
op_session.create_entity(
project_name, subset_doc["type"], subset_doc
)
@ -455,7 +468,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
update_data
)
self.log.debug("Prepared subset: {}".format(subset_name))
self.log.debug("Prepared product: {}".format(product_name))
return subset_doc
def prepare_version(self, instance, op_session, subset_doc, project_name):
@ -914,13 +927,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Task can be optional in anatomy data
host_name = context.data["hostName"]
anatomy_data = instance.data["anatomyData"]
family = anatomy_data["family"]
product_type = instance.data["productType"]
task_info = anatomy_data.get("task") or {}
return get_publish_template_name(
project_name,
host_name,
family,
product_type,
task_name=task_info.get("name"),
task_type=task_info.get("type"),
project_settings=context.data["project_settings"],

View file

@ -46,8 +46,18 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
# Can specify representation names that will be ignored (lower case)
ignored_representation_names = []
db_representation_context_keys = [
"project", "asset", "task", "subset", "representation",
"family", "hierarchy", "task", "username", "user"
"project",
"folder",
"asset",
"hierarchy",
"task",
"product",
"subset",
"family",
"representation",
"username",
"user",
"output"
]
# QUESTION/TODO this process should happen on server if crashed due to
# permissions error on files (files were used or user didn't have perms)
@ -57,8 +67,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.debug(
"--- Integration of Hero version for subset `{}` begins.".format(
instance.data.get("subset", str(instance))
"--- Integration of Hero version for product `{}` begins.".format(
instance.data["productName"]
)
)
published_repres = instance.data.get("published_representations")
@ -503,10 +513,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
raise
self.log.debug((
"--- hero version integration for subset `{}`"
"--- hero version integration for product `{}`"
" seems to be successful."
).format(
instance.data.get("subset", str(instance))
instance.data["productName"]
))
def get_all_files_from_path(self, path):
@ -558,14 +568,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
anatomy_data = instance.data["anatomyData"]
task_info = anatomy_data.get("task") or {}
host_name = instance.context.data["hostName"]
# TODO raise error if Hero not set?
family = self.main_family_from_instance(instance)
product_type = instance.data["productType"]
return get_publish_template_name(
project_name,
host_name,
family,
product_type,
task_info.get("name"),
task_info.get("type"),
project_settings=instance.context.data["project_settings"],
@ -573,13 +581,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
logger=self.log
)
def main_family_from_instance(self, instance):
"""Returns main family of entered instance."""
family = instance.data.get("family")
if not family:
family = instance.data["families"][0]
return family
def copy_file(self, src_path, dst_path):
# TODO check drives if are the same to check if cas hardlink
dirname = os.path.dirname(dst_path)

View file

@ -61,8 +61,8 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
"Instance {} doesn't have version.".format(instance))
continue
family = instance.data.get("family")
if family == "workfile":
product_type = instance.data["productType"]
if product_type == "workfile":
workfile_instance = instance
else:
other_instances.append(instance)

View file

@ -28,7 +28,7 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
product_grouping_profiles = None
def process(self, instance):
"""Look into subset group profiles set by settings.
"""Look into product group profiles set by settings.
Attribute 'product_grouping_profiles' is defined by settings.
"""
@ -40,7 +40,7 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
if instance.data.get("subsetGroup"):
# If subsetGroup is already set then allow that value to remain
self.log.debug((
"Skipping collect subset group due to existing value: {}"
"Skipping collect product group due to existing value: {}"
).format(instance.data["subsetGroup"]))
return
@ -56,12 +56,18 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
return
template = profile["template"]
product_name = instance.data["productName"]
product_type = instance.data["productType"]
fill_pairs = prepare_template_data({
"family": filter_criteria["product_types"],
"family": product_type,
"task": filter_criteria["tasks"],
"host": filter_criteria["hosts"],
"subset": instance.data["subset"],
"subset": product_name,
"product": {
"name": product_name,
"type": product_type,
},
"renderlayer": instance.data.get("renderlayer")
})
@ -91,7 +97,7 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
# Return filter criteria
return {
"product_types": anatomy_data["family"],
"product_types": instance.data["productType"],
"tasks": task.get("name"),
"hosts": instance.context.data["hostName"],
"task_types": task.get("type")

View file

@ -42,10 +42,6 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
label = "Integrate Thumbnails to AYON"
order = pyblish.api.IntegratorOrder + 0.01
required_context_keys = [
"project", "asset", "task", "subset", "version"
]
def process(self, context):
# Filter instances which can be used for integration
filtered_instance_items = self._prepare_instances(context)

View file

@ -82,6 +82,6 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin):
return (
instance.data.get("label")
or instance.data.get("name")
or instance.data.get("subset")
or instance.data.get("productName")
or str(instance)
)

View file

@ -37,8 +37,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin):
if not thumbnail_repres:
return
family = instance.data["family"]
subset_name = instance.data["subset"]
product_type = instance.data["productType"]
product_name = instance.data["productName"]
host_name = instance.context.data["hostName"]
anatomy_data = instance.data["anatomyData"]
@ -50,8 +50,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin):
"hosts": host_name,
"task_names": task.get("name"),
"task_types": task.get("type"),
"families": family,
"subsets": subset_name,
"product_types": product_type,
"product_names": product_name,
},
logger=self.log
)

View file

@ -21,7 +21,7 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
checked_template_names = ["source"]
# validate instances might have interim family, needs to be mapped to final
family_mapping = {
product_type_mapping = {
"renderLayer": "render",
"renderLocal": "render"
}
@ -39,7 +39,7 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
self,
"Instance meant for in place publishing."
" Its 'originalDirname' must be collected."
" Contact OP developer to modify collector."
" Contact AYON developer to modify collector."
)
anatomy = instance.context.data["anatomy"]
@ -62,15 +62,17 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
"""Find template which will be used during integration."""
project_name = instance.context.data["projectName"]
host_name = instance.context.data["hostName"]
product_type = instance.data["productType"]
mapped_product_type = (
self.product_type_mapping.get(product_type) or product_type
)
anatomy_data = instance.data["anatomyData"]
family = anatomy_data["family"]
family = self.family_mapping.get(family) or family
task_info = anatomy_data.get("task") or {}
return get_publish_template_name(
project_name,
host_name,
family,
mapped_product_type,
task_name=task_info.get("name"),
task_type=task_info.get("type"),
project_settings=instance.context.data["project_settings"],

View file

@ -6,17 +6,17 @@ from ayon_core.pipeline.publish import (
class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
"""Validate all subset names are unique.
"""Validate all product names are unique.
This only validates whether the instances currently set to publish from
the workfile overlap one another for the asset + subset they are publishing
the workfile overlap one another for the asset + product they are publishing
to.
This does not perform any check against existing publishes in the database
since it is allowed to publish into existing subsets resulting in
since it is allowed to publish into existing products resulting in
versioning.
A subset may appear twice to publish from the workfile if one
A product may appear twice to publish from the workfile if one
of them is set to publish to another asset than the other.
"""
@ -27,8 +27,8 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
def process(self, context):
# Find instance per (asset,subset)
instance_per_asset_subset = defaultdict(list)
# Find instance per (asset,product)
instance_per_asset_product = defaultdict(list)
for instance in context:
# Ignore disabled instances
@ -42,30 +42,30 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
"{}".format(instance.name))
continue
# Ignore instance without subset data
subset = instance.data.get("subset")
if subset is None:
self.log.warning("Instance found without `subset` data: "
"{}".format(instance.name))
# Ignore instance without product data
product_name = instance.data.get("productName")
if product_name is None:
self.log.warning((
"Instance found without `productName` in data: {}"
).format(instance.name))
continue
instance_per_asset_subset[(asset, subset)].append(instance)
instance_per_asset_product[(asset, product_name)].append(instance)
non_unique = []
for (asset, subset), instances in instance_per_asset_subset.items():
for (asset, product_name), instances in instance_per_asset_product.items():
# A single instance per asset, subset is fine
# A single instance per asset, product is fine
if len(instances) < 2:
continue
non_unique.append("{asset} > {subset}".format(asset=asset,
subset=subset))
non_unique.append("{} > {}".format(asset, product_name))
if not non_unique:
# All is ok
return
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
msg = ("Instance product names {} are not unique. ".format(non_unique) +
"Please remove or rename duplicates.")
formatting_data = {
"non_unique": ",".join(non_unique)