Merge pull request #5420 from fabiaserra/feature/houdini_export_task

This commit is contained in:
Ondřej Samohel 2023-12-06 11:05:46 +01:00 committed by GitHub
commit f15c218d65
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 353 additions and 29 deletions

View file

@ -121,8 +121,8 @@ def get_id_required_nodes():
return list(nodes)
def get_output_parameter(node):
"""Return the render output parameter name of the given node
def get_export_parameter(node):
"""Return the export output parameter of the given node
Example:
root = hou.node("/obj")
@ -137,13 +137,70 @@ def get_output_parameter(node):
hou.Parm
"""
node_type = node.type().description()
node_type = node.type().name()
if node_type == "geometry":
# Ensures the proper Take is selected for each ROP to retrieve the correct
# ifd
try:
rop_take = hou.takes.findTake(node.parm("take").eval())
if rop_take is not None:
hou.takes.setCurrentTake(rop_take)
except AttributeError:
# hou object doesn't always have the 'takes' attribute
pass
if node_type == "Mantra" and node.parm("soho_outputmode").eval():
return node.parm("soho_diskfile")
elif node_type == "Alfred":
return node.parm("alf_diskfile")
elif (node_type == "RenderMan" or node_type == "RenderMan RIS"):
pre_ris22 = node.parm("rib_outputmode") and \
node.parm("rib_outputmode").eval()
ris22 = node.parm("diskfile") and node.parm("diskfile").eval()
if pre_ris22 or ris22:
return node.parm("soho_diskfile")
elif node_type == "Redshift" and node.parm("RS_archive_enable").eval():
return node.parm("RS_archive_file")
elif node_type == "Wedge" and node.parm("driver").eval():
return get_export_parameter(node.node(node.parm("driver").eval()))
elif node_type == "Arnold":
return node.parm("ar_ass_file")
elif node_type == "Alembic" and node.parm("use_sop_path").eval():
return node.parm("sop_path")
elif node_type == "Shotgun Mantra" and node.parm("soho_outputmode").eval():
return node.parm("sgtk_soho_diskfile")
elif node_type == "Shotgun Alembic" and node.parm("use_sop_path").eval():
return node.parm("sop_path")
elif node.type().nameWithCategory() == "Driver/vray_renderer":
return node.parm("render_export_filepath")
raise TypeError("Node type '%s' not supported" % node_type)
def get_output_parameter(node):
"""Return the render output parameter of the given node
Example:
root = hou.node("/obj")
my_alembic_node = root.createNode("alembic")
get_output_parameter(my_alembic_node)
# Result: "output"
Args:
node(hou.Node): node instance
Returns:
hou.Parm
"""
node_type = node.type().description()
category = node.type().category().name()
# Figure out which type of node is being rendered
if node_type == "Geometry" or node_type == "Filmbox FBX" or \
(node_type == "ROP Output Driver" and category == "Sop"):
return node.parm("sopoutput")
elif node_type == "alembic":
return node.parm("filename")
elif node_type == "comp":
elif node_type == "Composite":
return node.parm("copoutput")
elif node_type == "opengl":
return node.parm("picture")
@ -155,6 +212,15 @@ def get_output_parameter(node):
elif node_type == "ifd":
if node.evalParm("soho_outputmode"):
return node.parm("soho_diskfile")
elif node_type == "Octane":
return node.parm("HO_img_fileName")
elif node_type == "Fetch":
inner_node = node.node(node.parm("source").eval())
if inner_node:
return get_output_parameter(inner_node)
elif node.type().nameWithCategory() == "Driver/vray_renderer":
return node.parm("SettingsOutput_img_file_path")
raise TypeError("Node type '%s' not supported" % node_type)

View file

@ -13,6 +13,9 @@ class CreateArnoldRop(plugin.HoudiniCreator):
# Default extension
ext = "exr"
# Default to split export and render jobs
export_job = True
def create(self, subset_name, instance_data, pre_create_data):
import hou
@ -48,6 +51,15 @@ class CreateArnoldRop(plugin.HoudiniCreator):
"ar_exr_half_precision": 1 # half precision
}
if pre_create_data.get("export_job"):
ass_filepath = \
"{export_dir}{subset_name}/{subset_name}.$F4.ass".format(
export_dir=hou.text.expandString("$HIP/pyblish/ass/"),
subset_name=subset_name,
)
parms["ar_ass_export_enable"] = 1
parms["ar_ass_file"] = ass_filepath
instance_node.setParms(parms)
# Lock any parameters in this list
@ -66,6 +78,9 @@ class CreateArnoldRop(plugin.HoudiniCreator):
BoolDef("farm",
label="Submitting to Farm",
default=True),
BoolDef("export_job",
label="Split export and render jobs",
default=self.export_job),
EnumDef("image_format",
image_format_enum,
default=self.ext,

View file

@ -12,6 +12,9 @@ class CreateMantraROP(plugin.HoudiniCreator):
family = "mantra_rop"
icon = "magic"
# Default to split export and render jobs
export_job = True
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa
@ -44,6 +47,15 @@ class CreateMantraROP(plugin.HoudiniCreator):
"vm_picture": filepath,
}
if pre_create_data.get("export_job"):
ifd_filepath = \
"{export_dir}{subset_name}/{subset_name}.$F4.ifd".format(
export_dir=hou.text.expandString("$HIP/pyblish/ifd/"),
subset_name=subset_name,
)
parms["soho_outputmode"] = 1
parms["soho_diskfile"] = ifd_filepath
if self.selected_nodes:
# If camera found in selection
# we will use as render camera
@ -78,6 +90,9 @@ class CreateMantraROP(plugin.HoudiniCreator):
BoolDef("farm",
label="Submitting to Farm",
default=True),
BoolDef("export_job",
label="Split export and render jobs",
default=self.export_job),
EnumDef("image_format",
image_format_enum,
default="exr",

View file

@ -16,6 +16,9 @@ class CreateVrayROP(plugin.HoudiniCreator):
icon = "magic"
ext = "exr"
# Default to split export and render jobs
export_job = True
def create(self, subset_name, instance_data, pre_create_data):
instance_data.pop("active", None)
@ -52,6 +55,17 @@ class CreateVrayROP(plugin.HoudiniCreator):
"SettingsEXR_bits_per_channel": "16" # half precision
}
if pre_create_data.get("export_job"):
scene_filepath = \
"{export_dir}{subset_name}/{subset_name}.$F4.vrscene".format(
export_dir=hou.text.expandString("$HIP/pyblish/vrscene/"),
subset_name=subset_name,
)
# Setting render_export_mode to "2" because that's for
# "Export only" ("1" is for "Export & Render")
parms["render_export_mode"] = "2"
parms["render_export_filepath"] = scene_filepath
if self.selected_nodes:
# set up the render camera from the selected node
camera = None
@ -140,6 +154,9 @@ class CreateVrayROP(plugin.HoudiniCreator):
BoolDef("farm",
label="Submitting to Farm",
default=True),
BoolDef("export_job",
label="Split export and render jobs",
default=self.export_job),
EnumDef("image_format",
image_format_enum,
default=self.ext,

View file

@ -40,6 +40,25 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
default_prefix = evalParmNoFrame(rop, "ar_picture")
render_products = []
# Store whether we are splitting the render job (export + render)
export_job = bool(rop.parm("ar_ass_export_enable").eval())
instance.data["exportJob"] = export_job
export_prefix = None
export_products = []
if export_job:
export_prefix = evalParmNoFrame(
rop, "ar_ass_file", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
# Default beauty AOV
beauty_product = self.get_render_product_name(prefix=default_prefix,
suffix=None)

View file

@ -44,6 +44,25 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
default_prefix = evalParmNoFrame(rop, "vm_picture")
render_products = []
# Store whether we are splitting the render job (export + render)
export_job = bool(rop.parm("soho_outputmode").eval())
instance.data["exportJob"] = export_job
export_prefix = None
export_products = []
if export_job:
export_prefix = evalParmNoFrame(
rop, "soho_diskfile", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
# Default beauty AOV
beauty_product = self.get_render_product_name(
prefix=default_prefix, suffix=None

View file

@ -45,7 +45,26 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
render_products = []
# TODO: add render elements if render element
beauty_product = self.get_beauty_render_product(default_prefix)
# Store whether we are splitting the render job in an export + render
export_job = rop.parm("render_export_mode").eval() == "2"
instance.data["exportJob"] = export_job
export_prefix = None
export_products = []
if export_job:
export_prefix = evalParmNoFrame(
rop, "render_export_filepath", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
beauty_product = self.get_render_product_name(default_prefix)
render_products.append(beauty_product)
files_by_aov = {
"RGB Color": self.generate_expected_files(instance,
@ -79,7 +98,7 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_beauty_render_product(self, prefix, suffix="<reName>"):
def get_render_product_name(self, prefix, suffix="<reName>"):
"""Return the beauty output filename if render element enabled
"""
# Remove aov suffix from the product: `prefix.aov_suffix` -> `prefix`

View file

@ -460,7 +460,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
self.plugin_info = self.get_plugin_info()
self.aux_files = self.get_aux_files()
self.process_submission()
job_id = self.process_submission()
self.log.info("Submitted job to Deadline: {}.".format(job_id))
# TODO: Find a way that's more generic and not render type specific
export_job = instance.data["exportJob"]
if export_job:
self.log.info("Splitting export and render in two jobs")
self.log.info("Export job id: %s", job_id)
render_job_info = self.get_job_info(dependency_job_ids=[job_id])
render_plugin_info = self.get_plugin_info(job_type="render")
payload = self.assemble_payload(
job_info=render_job_info,
plugin_info=render_plugin_info
)
render_job_id = self.submit(payload)
self.log.info("Render job id: %s", render_job_id)
def process_submission(self):
"""Process data for submission.

View file

@ -5,12 +5,15 @@ from datetime import datetime
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.pipeline import legacy_io, OpenPypePyblishPluginMixin
from openpype.tests.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from openpype.lib import is_running_from_build
from openpype.lib import (
is_running_from_build,
BoolDef,
NumberDef
)
@attr.s
class DeadlinePluginInfo():
@ -20,8 +23,29 @@ class DeadlinePluginInfo():
IgnoreInputs = attr.ib(default=True)
class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit Solaris USD Render ROPs to Deadline.
@attr.s
class ArnoldRenderDeadlinePluginInfo():
InputFile = attr.ib(default=None)
Verbose = attr.ib(default=4)
@attr.s
class MantraRenderDeadlinePluginInfo():
SceneFile = attr.ib(default=None)
Version = attr.ib(default=None)
@attr.s
class VrayRenderPluginInfo():
InputFilename = attr.ib(default=None)
SeparateFilesPerFrame = attr.ib(default=True)
class HoudiniSubmitDeadline(
abstract_submit_deadline.AbstractSubmitDeadline,
OpenPypePyblishPluginMixin
):
"""Submit Render ROPs to Deadline.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable AVALON_DEADLINE.
@ -45,21 +69,95 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
targets = ["local"]
use_published = True
def get_job_info(self):
job_info = DeadlineJobInfo(Plugin="Houdini")
# presets
priority = 50
chunk_size = 1
export_priority = 50
export_chunk_size = 10
group = ""
export_group = ""
@classmethod
def get_attribute_defs(cls):
return [
NumberDef(
"priority",
label="Priority",
default=cls.priority,
decimals=0
),
NumberDef(
"chunk",
label="Frames Per Task",
default=cls.chunk_size,
decimals=0,
minimum=1,
maximum=1000
),
NumberDef(
"export_priority",
label="Export Priority",
default=cls.priority,
decimals=0
),
NumberDef(
"export_chunk",
label="Export Frames Per Task",
default=cls.export_chunk_size,
decimals=0,
minimum=1,
maximum=1000
),
BoolDef(
"suspend_publish",
default=False,
label="Suspend publish"
)
]
def get_job_info(self, dependency_job_ids=None):
instance = self._instance
context = instance.context
attribute_values = self.get_attr_values_from_data(instance.data)
# Whether Deadline render submission is being split in two
# (extract + render)
split_render_job = instance.data["exportJob"]
# If there's some dependency job ids we can assume this is a render job
# and not an export job
is_export_job = True
if dependency_job_ids:
is_export_job = False
if split_render_job and not is_export_job:
# Convert from family to Deadline plugin name
# i.e., arnold_rop -> Arnold
plugin = instance.data["family"].replace("_rop", "").capitalize()
else:
plugin = "Houdini"
job_info = DeadlineJobInfo(Plugin=plugin)
filepath = context.data["currentFile"]
filename = os.path.basename(filepath)
job_info.Name = "{} - {}".format(filename, instance.name)
job_info.BatchName = filename
job_info.Plugin = "Houdini"
job_info.UserName = context.data.get(
"deadlineUser", getpass.getuser())
if split_render_job and is_export_job:
job_info.Priority = attribute_values.get(
"export_priority", self.export_priority
)
else:
job_info.Priority = attribute_values.get(
"priority", self.priority
)
if is_in_tests():
job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S")
@ -73,9 +171,23 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
)
job_info.Frames = frames
# Make sure we make job frame dependent so render tasks pick up a soon
# as export tasks are done
if split_render_job and not is_export_job:
job_info.IsFrameDependent = True
job_info.Pool = instance.data.get("primaryPool")
job_info.SecondaryPool = instance.data.get("secondaryPool")
job_info.ChunkSize = instance.data.get("chunkSize", 10)
job_info.Group = self.group
if split_render_job and is_export_job:
job_info.ChunkSize = attribute_values.get(
"export_chunk", self.export_chunk_size
)
else:
job_info.ChunkSize = attribute_values.get(
"chunk", self.chunk_size
)
job_info.Comment = context.data.get("comment")
keys = [
@ -101,6 +213,7 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **legacy_io.Session)
for key in keys:
value = environment.get(key)
if value:
@ -115,25 +228,51 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
job_info.OutputDirectory += dirname.replace("\\", "/")
job_info.OutputFilename += fname
# Add dependencies if given
if dependency_job_ids:
job_info.JobDependencies = ",".join(dependency_job_ids)
return job_info
def get_plugin_info(self):
def get_plugin_info(self, job_type=None):
# Not all hosts can import this module.
import hou
instance = self._instance
context = instance.context
# Output driver to render
driver = hou.node(instance.data["instance_node"])
hou_major_minor = hou.applicationVersionString().rsplit(".", 1)[0]
plugin_info = DeadlinePluginInfo(
SceneFile=context.data["currentFile"],
OutputDriver=driver.path(),
Version=hou_major_minor,
IgnoreInputs=True
)
# Output driver to render
if job_type == "render":
family = instance.data.get("family")
if family == "arnold_rop":
plugin_info = ArnoldRenderDeadlinePluginInfo(
InputFile=instance.data["ifdFile"]
)
elif family == "mantra_rop":
plugin_info = MantraRenderDeadlinePluginInfo(
SceneFile=instance.data["ifdFile"],
Version=hou_major_minor,
)
elif family == "vray_rop":
plugin_info = VrayRenderPluginInfo(
InputFilename=instance.data["ifdFile"],
)
else:
self.log.error(
"Family '%s' not supported yet to split render job",
family
)
return
else:
driver = hou.node(instance.data["instance_node"])
plugin_info = DeadlinePluginInfo(
SceneFile=context.data["currentFile"],
OutputDriver=driver.path(),
Version=hou_major_minor,
IgnoreInputs=True
)
return attr.asdict(plugin_info)