deadline using product name and type

This commit is contained in:
Jakub Trllo 2024-02-23 15:26:25 +01:00
parent 7813be2695
commit 591819b558
7 changed files with 72 additions and 57 deletions

View file

@ -112,7 +112,7 @@ class AfterEffectsSubmitDeadline(
file_name, frame = list(collect_frames([render_path]).items())[0]
if frame:
# replace frame ('000001') with Deadline's required '[#######]'
# expects filename in format project_asset_subset_version.FRAME.ext
# expects filename in format project_folder_product_version.FRAME.ext
render_dir = os.path.dirname(render_path)
file_name = os.path.basename(render_path)
hashed = '[{}]'.format(len(frame) * "#")

View file

@ -75,7 +75,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
script_name = os.path.basename(script_path)
for item in instance.context:
if "workfile" in item.data["family"]:
if "workfile" in item.data["productType"]:
msg = "Workfile (scene) must be published along"
assert item.data["publish"] is True, msg

View file

@ -104,7 +104,7 @@ class FusionSubmitDeadline(
# Collect all saver instances in context that are to be rendered
saver_instances = []
for instance in context:
if instance.data["family"] != "render":
if instance.data["productType"] != "render":
# Allow only saver family instances
continue

View file

@ -145,7 +145,9 @@ class HoudiniSubmitDeadline(
if split_render_job and not is_export_job:
# Convert from family to Deadline plugin name
# i.e., arnold_rop -> Arnold
plugin = instance.data["family"].replace("_rop", "").capitalize()
plugin = (
instance.data["productType"].replace("_rop", "").capitalize()
)
else:
plugin = "Houdini"
if split_render_job:
@ -252,21 +254,21 @@ class HoudiniSubmitDeadline(
# Output driver to render
if job_type == "render":
family = instance.data.get("family")
if family == "arnold_rop":
product_type = instance.data.get("productType")
if product_type == "arnold_rop":
plugin_info = ArnoldRenderDeadlinePluginInfo(
InputFile=instance.data["ifdFile"]
)
elif family == "mantra_rop":
elif product_type == "mantra_rop":
plugin_info = MantraRenderDeadlinePluginInfo(
SceneFile=instance.data["ifdFile"],
Version=hou_major_minor,
)
elif family == "vray_rop":
elif product_type == "vray_rop":
plugin_info = VrayRenderPluginInfo(
InputFilename=instance.data["ifdFile"],
)
elif family == "redshift_rop":
elif product_type == "redshift_rop":
plugin_info = RedshiftRenderPluginInfo(
SceneFile=instance.data["ifdFile"]
)
@ -287,8 +289,8 @@ class HoudiniSubmitDeadline(
else:
self.log.error(
"Family '%s' not supported yet to split render job",
family
"Product type '%s' not supported yet to split render job",
product_type
)
return
else:

View file

@ -183,11 +183,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
b_job_response.json()["_id"])
# redefinition of families
if "render" in instance.data["family"]:
instance.data['family'] = 'write'
if "render" in instance.data["productType"]:
instance.data["family"] = "write"
instance.data["productType"] = "write"
families.insert(0, "render2d")
elif "prerender" in instance.data["family"]:
instance.data['family'] = 'write'
elif "prerender" in instance.data["productType"]:
instance.data["family"] = "write"
instance.data["productType"] = "write"
families.insert(0, "prerender")
instance.data["families"] = families
@ -196,7 +198,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
AbstractSubmitDeadline"""
for instance in context:
if (
instance.data["family"] != "workfile"
instance.data["productType"] != "workfile"
# Disabled instances won't be integrated
or instance.data("publish") is False
):

View file

@ -18,7 +18,7 @@ from ayon_core.pipeline.version_start import get_versioning_start
from ayon_core.pipeline.farm.pyblish_functions import (
create_skeleton_instance_cache,
create_instances_for_cache,
attach_instances_to_subset,
attach_instances_to_product,
prepare_cache_representations,
create_metadata_path
)
@ -97,12 +97,12 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
(str): deadline_publish_job_id
"""
data = instance.data.copy()
subset = data["subset"]
job_name = "Publish - {subset}".format(subset=subset)
product_name = data["productName"]
job_name = "Publish - {}".format(product_name)
anatomy = instance.context.data['anatomy']
# instance.data.get("subset") != instances[0]["subset"]
# instance.data.get("productName") != instances[0]["productName"]
# 'Main' vs 'renderMain'
override_version = None
instance_version = instance.data.get("version") # take this if exists
@ -113,9 +113,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
anatomy,
deepcopy(instance.data["anatomyData"]),
instance.data.get("folderPath"),
instance.data["subset"],
instance.data["productName"],
instance.context,
instance.data["family"],
instance.data["productType"],
override_version
)
@ -259,7 +259,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
}
]
This will create instances for `beauty` and `Z` subset
This will create instances for `beauty` and `Z` product
adding those files to their respective representations.
If we have only list of files, we collect all file sequences.
@ -297,9 +297,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
instance_skeleton_data["representations"] += representations
instances = [instance_skeleton_data]
# attach instances to subset
# attach instances to product
if instance.data.get("attachTo"):
instances = attach_instances_to_subset(
instances = attach_instances_to_product(
instance.data.get("attachTo"), instances
)
@ -382,23 +382,24 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
json.dump(publish_job, f, indent=4, sort_keys=True)
def _get_publish_folder(self, anatomy, template_data,
asset, subset, context,
family, version=None):
asset, product_name, context,
product_type, version=None):
"""
Extracted logic to pre-calculate real publish folder, which is
calculated in IntegrateNew inside of Deadline process.
This should match logic in:
'collect_anatomy_instance_data' - to
get correct anatomy, family, version for subset and
get correct anatomy, family, version for product and
'collect_resources_path'
get publish_path
Args:
anatomy (ayon_core.pipeline.anatomy.Anatomy):
template_data (dict): pre-calculated collected data for process
asset (string): asset name
subset (string): subset name (actually group name of subset)
family (string): for current deadline process it's always 'render'
asset (str): asset name
product_name (str): Product name (actually group name of product).
product_type (str): for current deadline process it's always
'render'
TODO - for generic use family needs to be dynamically
calculated like IntegrateNew does
version (int): override version from instance if exists
@ -413,7 +414,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
if not version:
version = get_last_version_by_subset_name(
project_name,
subset,
product_name,
asset_name=asset
)
if version:
@ -424,8 +425,8 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
template_data["app"],
task_name=template_data["task"]["name"],
task_type=template_data["task"]["type"],
family="render",
subset=subset,
product_type="render",
product_name=product_name,
project_settings=context.data["project_settings"]
)
@ -435,14 +436,18 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
template_name = publish.get_publish_template_name(
project_name,
host_name,
family,
product_type,
task_info.get("name"),
task_info.get("type"),
)
template_data["subset"] = subset
template_data["family"] = family
template_data["subset"] = product_name
template_data["family"] = product_type
template_data["version"] = version
template_data["product"] = {
"name": product_name,
"type": product_type,
}
render_templates = anatomy.templates_obj[template_name]
if "folder" in render_templates:

View file

@ -19,7 +19,7 @@ from ayon_core.pipeline.version_start import get_versioning_start
from ayon_core.pipeline.farm.pyblish_functions import (
create_skeleton_instance,
create_instances_for_aov,
attach_instances_to_subset,
attach_instances_to_product,
prepare_representations,
create_metadata_path
)
@ -174,12 +174,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
(str): deadline_publish_job_id
"""
data = instance.data.copy()
subset = data["subset"]
job_name = "Publish - {subset}".format(subset=subset)
product_name = data["productName"]
job_name = "Publish - {}".format(product_name)
anatomy = instance.context.data['anatomy']
# instance.data.get("subset") != instances[0]["subset"]
# instance.data.get("productName") != instances[0]["productName"]
# 'Main' vs 'renderMain'
override_version = None
instance_version = instance.data.get("version") # take this if exists
@ -190,9 +190,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
anatomy,
deepcopy(instance.data["anatomyData"]),
instance.data.get("folderPath"),
instances[0]["subset"],
instances[0]["productName"],
instance.context,
instances[0]["family"],
instances[0]["productType"],
override_version
)
@ -356,7 +356,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
}
]
This will create instances for `beauty` and `Z` subset
This will create instances for `beauty` and `Z` product
adding those files to their respective representations.
If we have only list of files, we collect all file sequences.
@ -411,9 +411,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
instance_skeleton_data["representations"] += representations
instances = [instance_skeleton_data]
# attach instances to subset
# attach instances to product
if instance.data.get("attachTo"):
instances = attach_instances_to_subset(
instances = attach_instances_to_product(
instance.data.get("attachTo"), instances
)
@ -503,14 +503,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
json.dump(publish_job, f, indent=4, sort_keys=True)
def _get_publish_folder(self, anatomy, template_data,
asset, subset, context,
family, version=None):
asset, product_name, context,
product_type, version=None):
"""
Extracted logic to pre-calculate real publish folder, which is
calculated in IntegrateNew inside of Deadline process.
This should match logic in:
'collect_anatomy_instance_data' - to
get correct anatomy, family, version for subset and
get correct anatomy, family, version for product name and
'collect_resources_path'
get publish_path
@ -518,8 +518,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
anatomy (ayon_core.pipeline.anatomy.Anatomy):
template_data (dict): pre-calculated collected data for process
asset (string): asset name
subset (string): subset name (actually group name of subset)
family (string): for current deadline process it's always 'render'
product_name (string): Product name (actually group name
of product)
product_type (string): for current deadline process it's always
'render'
TODO - for generic use family needs to be dynamically
calculated like IntegrateNew does
version (int): override version from instance if exists
@ -535,7 +537,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
if not version:
version = get_last_version_by_subset_name(
project_name,
subset,
product_name,
asset_name=asset
)
if version:
@ -546,8 +548,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
host_name,
task_name=template_data["task"]["name"],
task_type=template_data["task"]["type"],
family="render",
subset=subset,
product_type="render",
product_name=product_name,
project_settings=context.data["project_settings"]
)
@ -557,14 +559,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
template_name = publish.get_publish_template_name(
project_name,
host_name,
family,
product_type,
task_info.get("name"),
task_info.get("type"),
)
template_data["subset"] = subset
template_data["family"] = family
template_data["version"] = version
template_data["subset"] = product_name
template_data["family"] = product_type
template_data["product"] = {
"name": product_name,
"type": product_type,
}
render_templates = anatomy.templates_obj[template_name]
if "folder" in render_templates: