mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Merge pull request #4825 from ynput/feature/OP-2637_Houdini-Farm-publishingrendering
This commit is contained in:
commit
264136b181
20 changed files with 1332 additions and 175 deletions
56
openpype/hosts/houdini/api/colorspace.py
Normal file
56
openpype/hosts/houdini/api/colorspace.py
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
import attr
|
||||||
|
import hou
|
||||||
|
from openpype.hosts.houdini.api.lib import get_color_management_preferences
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class LayerMetadata(object):
|
||||||
|
"""Data class for Render Layer metadata."""
|
||||||
|
frameStart = attr.ib()
|
||||||
|
frameEnd = attr.ib()
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class RenderProduct(object):
|
||||||
|
"""Getting Colorspace as
|
||||||
|
Specific Render Product Parameter for submitting
|
||||||
|
publish job.
|
||||||
|
|
||||||
|
"""
|
||||||
|
colorspace = attr.ib() # colorspace
|
||||||
|
view = attr.ib()
|
||||||
|
productName = attr.ib(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
class ARenderProduct(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Constructor."""
|
||||||
|
# Initialize
|
||||||
|
self.layer_data = self._get_layer_data()
|
||||||
|
self.layer_data.products = self.get_colorspace_data()
|
||||||
|
|
||||||
|
def _get_layer_data(self):
|
||||||
|
return LayerMetadata(
|
||||||
|
frameStart=int(hou.playbar.frameRange()[0]),
|
||||||
|
frameEnd=int(hou.playbar.frameRange()[1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_colorspace_data(self):
|
||||||
|
"""To be implemented by renderer class.
|
||||||
|
|
||||||
|
This should return a list of RenderProducts.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of RenderProduct
|
||||||
|
|
||||||
|
"""
|
||||||
|
data = get_color_management_preferences()
|
||||||
|
colorspace_data = [
|
||||||
|
RenderProduct(
|
||||||
|
colorspace=data["display"],
|
||||||
|
view=data["view"],
|
||||||
|
productName=""
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return colorspace_data
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
import logging
|
import logging
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
@ -581,3 +582,74 @@ def splitext(name, allowed_multidot_extensions):
|
||||||
return name[:-len(ext)], ext
|
return name[:-len(ext)], ext
|
||||||
|
|
||||||
return os.path.splitext(name)
|
return os.path.splitext(name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_top_referenced_parm(parm):
|
||||||
|
|
||||||
|
processed = set() # disallow infinite loop
|
||||||
|
while True:
|
||||||
|
if parm.path() in processed:
|
||||||
|
raise RuntimeError("Parameter references result in cycle.")
|
||||||
|
|
||||||
|
processed.add(parm.path())
|
||||||
|
|
||||||
|
ref = parm.getReferencedParm()
|
||||||
|
if ref.path() == parm.path():
|
||||||
|
# It returns itself when it doesn't reference
|
||||||
|
# another parameter
|
||||||
|
return ref
|
||||||
|
else:
|
||||||
|
parm = ref
|
||||||
|
|
||||||
|
|
||||||
|
def evalParmNoFrame(node, parm, pad_character="#"):
|
||||||
|
|
||||||
|
parameter = node.parm(parm)
|
||||||
|
assert parameter, "Parameter does not exist: %s.%s" % (node, parm)
|
||||||
|
|
||||||
|
# If the parameter has a parameter reference, then get that
|
||||||
|
# parameter instead as otherwise `unexpandedString()` fails.
|
||||||
|
parameter = get_top_referenced_parm(parameter)
|
||||||
|
|
||||||
|
# Substitute out the frame numbering with padded characters
|
||||||
|
try:
|
||||||
|
raw = parameter.unexpandedString()
|
||||||
|
except hou.Error as exc:
|
||||||
|
print("Failed: %s" % parameter)
|
||||||
|
raise RuntimeError(exc)
|
||||||
|
|
||||||
|
def replace(match):
|
||||||
|
padding = 1
|
||||||
|
n = match.group(2)
|
||||||
|
if n and int(n):
|
||||||
|
padding = int(n)
|
||||||
|
return pad_character * padding
|
||||||
|
|
||||||
|
expression = re.sub(r"(\$F([0-9]*))", replace, raw)
|
||||||
|
|
||||||
|
with hou.ScriptEvalContext(parameter):
|
||||||
|
return hou.expandStringAtFrame(expression, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_color_management_preferences():
|
||||||
|
"""Get default OCIO preferences"""
|
||||||
|
data = {
|
||||||
|
"config": hou.Color.ocio_configPath()
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get default display and view from OCIO
|
||||||
|
display = hou.Color.ocio_defaultDisplay()
|
||||||
|
disp_regex = re.compile(r"^(?P<name>.+-)(?P<display>.+)$")
|
||||||
|
disp_match = disp_regex.match(display)
|
||||||
|
|
||||||
|
view = hou.Color.ocio_defaultView()
|
||||||
|
view_regex = re.compile(r"^(?P<name>.+- )(?P<view>.+)$")
|
||||||
|
view_match = view_regex.match(view)
|
||||||
|
data.update({
|
||||||
|
"display": disp_match.group("display"),
|
||||||
|
"view": view_match.group("view")
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
|
||||||
71
openpype/hosts/houdini/plugins/create/create_arnold_rop.py
Normal file
71
openpype/hosts/houdini/plugins/create/create_arnold_rop.py
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
from openpype.hosts.houdini.api import plugin
|
||||||
|
from openpype.lib import EnumDef
|
||||||
|
|
||||||
|
|
||||||
|
class CreateArnoldRop(plugin.HoudiniCreator):
|
||||||
|
"""Arnold ROP"""
|
||||||
|
|
||||||
|
identifier = "io.openpype.creators.houdini.arnold_rop"
|
||||||
|
label = "Arnold ROP"
|
||||||
|
family = "arnold_rop"
|
||||||
|
icon = "magic"
|
||||||
|
defaults = ["master"]
|
||||||
|
|
||||||
|
# Default extension
|
||||||
|
ext = "exr"
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
import hou
|
||||||
|
|
||||||
|
# Remove the active, we are checking the bypass flag of the nodes
|
||||||
|
instance_data.pop("active", None)
|
||||||
|
instance_data.update({"node_type": "arnold"})
|
||||||
|
|
||||||
|
# Add chunk size attribute
|
||||||
|
instance_data["chunkSize"] = 1
|
||||||
|
# Submit for job publishing
|
||||||
|
instance_data["farm"] = True
|
||||||
|
|
||||||
|
instance = super(CreateArnoldRop, self).create(
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
pre_create_data) # type: plugin.CreatedInstance
|
||||||
|
|
||||||
|
instance_node = hou.node(instance.get("instance_node"))
|
||||||
|
|
||||||
|
ext = pre_create_data.get("image_format")
|
||||||
|
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
ext=ext,
|
||||||
|
)
|
||||||
|
parms = {
|
||||||
|
# Render frame range
|
||||||
|
"trange": 1,
|
||||||
|
|
||||||
|
# Arnold ROP settings
|
||||||
|
"ar_picture": filepath,
|
||||||
|
"ar_exr_half_precision": 1 # half precision
|
||||||
|
}
|
||||||
|
|
||||||
|
instance_node.setParms(parms)
|
||||||
|
|
||||||
|
# Lock any parameters in this list
|
||||||
|
to_lock = ["family", "id"]
|
||||||
|
self.lock_parameters(instance_node, to_lock)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
attrs = super(CreateArnoldRop, self).get_pre_create_attr_defs()
|
||||||
|
|
||||||
|
image_format_enum = [
|
||||||
|
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||||
|
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||||
|
]
|
||||||
|
|
||||||
|
return attrs + [
|
||||||
|
EnumDef("image_format",
|
||||||
|
image_format_enum,
|
||||||
|
default=self.ext,
|
||||||
|
label="Image Format Options")
|
||||||
|
]
|
||||||
114
openpype/hosts/houdini/plugins/create/create_karma_rop.py
Normal file
114
openpype/hosts/houdini/plugins/create/create_karma_rop.py
Normal file
|
|
@ -0,0 +1,114 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Creator plugin to create Karma ROP."""
|
||||||
|
from openpype.hosts.houdini.api import plugin
|
||||||
|
from openpype.pipeline import CreatedInstance
|
||||||
|
from openpype.lib import BoolDef, EnumDef, NumberDef
|
||||||
|
|
||||||
|
|
||||||
|
class CreateKarmaROP(plugin.HoudiniCreator):
|
||||||
|
"""Karma ROP"""
|
||||||
|
identifier = "io.openpype.creators.houdini.karma_rop"
|
||||||
|
label = "Karma ROP"
|
||||||
|
family = "karma_rop"
|
||||||
|
icon = "magic"
|
||||||
|
defaults = ["master"]
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
import hou # noqa
|
||||||
|
|
||||||
|
instance_data.pop("active", None)
|
||||||
|
instance_data.update({"node_type": "karma"})
|
||||||
|
# Add chunk size attribute
|
||||||
|
instance_data["chunkSize"] = 10
|
||||||
|
# Submit for job publishing
|
||||||
|
instance_data["farm"] = True
|
||||||
|
|
||||||
|
instance = super(CreateKarmaROP, self).create(
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
pre_create_data) # type: CreatedInstance
|
||||||
|
|
||||||
|
instance_node = hou.node(instance.get("instance_node"))
|
||||||
|
|
||||||
|
ext = pre_create_data.get("image_format")
|
||||||
|
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
ext=ext,
|
||||||
|
)
|
||||||
|
checkpoint = "{cp_dir}{subset_name}.$F4.checkpoint".format(
|
||||||
|
cp_dir=hou.text.expandString("$HIP/pyblish/"),
|
||||||
|
subset_name=subset_name
|
||||||
|
)
|
||||||
|
|
||||||
|
usd_directory = "{usd_dir}{subset_name}_$RENDERID".format(
|
||||||
|
usd_dir=hou.text.expandString("$HIP/pyblish/renders/usd_renders/"), # noqa
|
||||||
|
subset_name=subset_name
|
||||||
|
)
|
||||||
|
|
||||||
|
parms = {
|
||||||
|
# Render Frame Range
|
||||||
|
"trange": 1,
|
||||||
|
# Karma ROP Setting
|
||||||
|
"picture": filepath,
|
||||||
|
# Karma Checkpoint Setting
|
||||||
|
"productName": checkpoint,
|
||||||
|
# USD Output Directory
|
||||||
|
"savetodirectory": usd_directory,
|
||||||
|
}
|
||||||
|
|
||||||
|
res_x = pre_create_data.get("res_x")
|
||||||
|
res_y = pre_create_data.get("res_y")
|
||||||
|
|
||||||
|
if self.selected_nodes:
|
||||||
|
# If camera found in selection
|
||||||
|
# we will use as render camera
|
||||||
|
camera = None
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
if node.type().name() == "cam":
|
||||||
|
has_camera = pre_create_data.get("cam_res")
|
||||||
|
if has_camera:
|
||||||
|
res_x = node.evalParm("resx")
|
||||||
|
res_y = node.evalParm("resy")
|
||||||
|
|
||||||
|
if not camera:
|
||||||
|
self.log.warning("No render camera found in selection")
|
||||||
|
|
||||||
|
parms.update({
|
||||||
|
"camera": camera or "",
|
||||||
|
"resolutionx": res_x,
|
||||||
|
"resolutiony": res_y,
|
||||||
|
})
|
||||||
|
|
||||||
|
instance_node.setParms(parms)
|
||||||
|
|
||||||
|
# Lock some Avalon attributes
|
||||||
|
to_lock = ["family", "id"]
|
||||||
|
self.lock_parameters(instance_node, to_lock)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
attrs = super(CreateKarmaROP, self).get_pre_create_attr_defs()
|
||||||
|
|
||||||
|
image_format_enum = [
|
||||||
|
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||||
|
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||||
|
]
|
||||||
|
|
||||||
|
return attrs + [
|
||||||
|
EnumDef("image_format",
|
||||||
|
image_format_enum,
|
||||||
|
default="exr",
|
||||||
|
label="Image Format Options"),
|
||||||
|
NumberDef("res_x",
|
||||||
|
label="width",
|
||||||
|
default=1920,
|
||||||
|
decimals=0),
|
||||||
|
NumberDef("res_y",
|
||||||
|
label="height",
|
||||||
|
default=720,
|
||||||
|
decimals=0),
|
||||||
|
BoolDef("cam_res",
|
||||||
|
label="Camera Resolution",
|
||||||
|
default=False)
|
||||||
|
]
|
||||||
88
openpype/hosts/houdini/plugins/create/create_mantra_rop.py
Normal file
88
openpype/hosts/houdini/plugins/create/create_mantra_rop.py
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Creator plugin to create Mantra ROP."""
|
||||||
|
from openpype.hosts.houdini.api import plugin
|
||||||
|
from openpype.pipeline import CreatedInstance
|
||||||
|
from openpype.lib import EnumDef, BoolDef
|
||||||
|
|
||||||
|
|
||||||
|
class CreateMantraROP(plugin.HoudiniCreator):
|
||||||
|
"""Mantra ROP"""
|
||||||
|
identifier = "io.openpype.creators.houdini.mantra_rop"
|
||||||
|
label = "Mantra ROP"
|
||||||
|
family = "mantra_rop"
|
||||||
|
icon = "magic"
|
||||||
|
defaults = ["master"]
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
import hou # noqa
|
||||||
|
|
||||||
|
instance_data.pop("active", None)
|
||||||
|
instance_data.update({"node_type": "ifd"})
|
||||||
|
# Add chunk size attribute
|
||||||
|
instance_data["chunkSize"] = 10
|
||||||
|
# Submit for job publishing
|
||||||
|
instance_data["farm"] = True
|
||||||
|
|
||||||
|
instance = super(CreateMantraROP, self).create(
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
pre_create_data) # type: CreatedInstance
|
||||||
|
|
||||||
|
instance_node = hou.node(instance.get("instance_node"))
|
||||||
|
|
||||||
|
ext = pre_create_data.get("image_format")
|
||||||
|
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.$F4.{ext}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
ext=ext,
|
||||||
|
)
|
||||||
|
|
||||||
|
parms = {
|
||||||
|
# Render Frame Range
|
||||||
|
"trange": 1,
|
||||||
|
# Mantra ROP Setting
|
||||||
|
"vm_picture": filepath,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.selected_nodes:
|
||||||
|
# If camera found in selection
|
||||||
|
# we will use as render camera
|
||||||
|
camera = None
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
if node.type().name() == "cam":
|
||||||
|
camera = node.path()
|
||||||
|
|
||||||
|
if not camera:
|
||||||
|
self.log.warning("No render camera found in selection")
|
||||||
|
|
||||||
|
parms.update({"camera": camera or ""})
|
||||||
|
|
||||||
|
custom_res = pre_create_data.get("override_resolution")
|
||||||
|
if custom_res:
|
||||||
|
parms.update({"override_camerares": 1})
|
||||||
|
instance_node.setParms(parms)
|
||||||
|
|
||||||
|
# Lock some Avalon attributes
|
||||||
|
to_lock = ["family", "id"]
|
||||||
|
self.lock_parameters(instance_node, to_lock)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
attrs = super(CreateMantraROP, self).get_pre_create_attr_defs()
|
||||||
|
|
||||||
|
image_format_enum = [
|
||||||
|
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||||
|
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||||
|
]
|
||||||
|
|
||||||
|
return attrs + [
|
||||||
|
EnumDef("image_format",
|
||||||
|
image_format_enum,
|
||||||
|
default="exr",
|
||||||
|
label="Image Format Options"),
|
||||||
|
BoolDef("override_resolution",
|
||||||
|
label="Override Camera Resolution",
|
||||||
|
tooltip="Override the current camera "
|
||||||
|
"resolution, recommended for IPR.",
|
||||||
|
default=False)
|
||||||
|
]
|
||||||
|
|
@ -1,7 +1,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Creator plugin to create Redshift ROP."""
|
"""Creator plugin to create Redshift ROP."""
|
||||||
|
import hou # noqa
|
||||||
|
|
||||||
from openpype.hosts.houdini.api import plugin
|
from openpype.hosts.houdini.api import plugin
|
||||||
from openpype.pipeline import CreatedInstance
|
from openpype.pipeline import CreatedInstance
|
||||||
|
from openpype.lib import EnumDef
|
||||||
|
|
||||||
|
|
||||||
class CreateRedshiftROP(plugin.HoudiniCreator):
|
class CreateRedshiftROP(plugin.HoudiniCreator):
|
||||||
|
|
@ -11,20 +14,16 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
||||||
family = "redshift_rop"
|
family = "redshift_rop"
|
||||||
icon = "magic"
|
icon = "magic"
|
||||||
defaults = ["master"]
|
defaults = ["master"]
|
||||||
|
ext = "exr"
|
||||||
|
|
||||||
def create(self, subset_name, instance_data, pre_create_data):
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
import hou # noqa
|
|
||||||
|
|
||||||
instance_data.pop("active", None)
|
instance_data.pop("active", None)
|
||||||
instance_data.update({"node_type": "Redshift_ROP"})
|
instance_data.update({"node_type": "Redshift_ROP"})
|
||||||
# Add chunk size attribute
|
# Add chunk size attribute
|
||||||
instance_data["chunkSize"] = 10
|
instance_data["chunkSize"] = 10
|
||||||
|
# Submit for job publishing
|
||||||
# Clear the family prefix from the subset
|
instance_data["farm"] = True
|
||||||
subset = subset_name
|
|
||||||
subset_no_prefix = subset[len(self.family):]
|
|
||||||
subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:]
|
|
||||||
subset_name = subset_no_prefix
|
|
||||||
|
|
||||||
instance = super(CreateRedshiftROP, self).create(
|
instance = super(CreateRedshiftROP, self).create(
|
||||||
subset_name,
|
subset_name,
|
||||||
|
|
@ -34,11 +33,10 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
||||||
instance_node = hou.node(instance.get("instance_node"))
|
instance_node = hou.node(instance.get("instance_node"))
|
||||||
|
|
||||||
basename = instance_node.name()
|
basename = instance_node.name()
|
||||||
instance_node.setName(basename + "_ROP", unique_name=True)
|
|
||||||
|
|
||||||
# Also create the linked Redshift IPR Rop
|
# Also create the linked Redshift IPR Rop
|
||||||
try:
|
try:
|
||||||
ipr_rop = self.parent.createNode(
|
ipr_rop = instance_node.parent().createNode(
|
||||||
"Redshift_IPR", node_name=basename + "_IPR"
|
"Redshift_IPR", node_name=basename + "_IPR"
|
||||||
)
|
)
|
||||||
except hou.OperationFailed:
|
except hou.OperationFailed:
|
||||||
|
|
@ -50,19 +48,58 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
||||||
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
|
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
|
||||||
|
|
||||||
# Set the linked rop to the Redshift ROP
|
# Set the linked rop to the Redshift ROP
|
||||||
ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance))
|
ipr_rop.parm("linked_rop").set(instance_node.path())
|
||||||
|
|
||||||
|
ext = pre_create_data.get("image_format")
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext)
|
||||||
|
)
|
||||||
|
|
||||||
prefix = '${HIP}/render/${HIPNAME}/`chs("subset")`.${AOV}.$F4.exr'
|
|
||||||
parms = {
|
parms = {
|
||||||
# Render frame range
|
# Render frame range
|
||||||
"trange": 1,
|
"trange": 1,
|
||||||
# Redshift ROP settings
|
# Redshift ROP settings
|
||||||
"RS_outputFileNamePrefix": prefix,
|
"RS_outputFileNamePrefix": filepath,
|
||||||
"RS_outputMultilayerMode": 0, # no multi-layered exr
|
"RS_outputMultilayerMode": "1", # no multi-layered exr
|
||||||
"RS_outputBeautyAOVSuffix": "beauty",
|
"RS_outputBeautyAOVSuffix": "beauty",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.selected_nodes:
|
||||||
|
# set up the render camera from the selected node
|
||||||
|
camera = None
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
if node.type().name() == "cam":
|
||||||
|
camera = node.path()
|
||||||
|
parms.update({
|
||||||
|
"RS_renderCamera": camera or ""})
|
||||||
instance_node.setParms(parms)
|
instance_node.setParms(parms)
|
||||||
|
|
||||||
# Lock some Avalon attributes
|
# Lock some Avalon attributes
|
||||||
to_lock = ["family", "id"]
|
to_lock = ["family", "id"]
|
||||||
self.lock_parameters(instance_node, to_lock)
|
self.lock_parameters(instance_node, to_lock)
|
||||||
|
|
||||||
|
def remove_instances(self, instances):
|
||||||
|
for instance in instances:
|
||||||
|
node = instance.data.get("instance_node")
|
||||||
|
|
||||||
|
ipr_node = hou.node(f"{node}_IPR")
|
||||||
|
if ipr_node:
|
||||||
|
ipr_node.destroy()
|
||||||
|
|
||||||
|
return super(CreateRedshiftROP, self).remove_instances(instances)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
attrs = super(CreateRedshiftROP, self).get_pre_create_attr_defs()
|
||||||
|
image_format_enum = [
|
||||||
|
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||||
|
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||||
|
]
|
||||||
|
|
||||||
|
return attrs + [
|
||||||
|
EnumDef("image_format",
|
||||||
|
image_format_enum,
|
||||||
|
default=self.ext,
|
||||||
|
label="Image Format Options")
|
||||||
|
]
|
||||||
|
|
|
||||||
156
openpype/hosts/houdini/plugins/create/create_vray_rop.py
Normal file
156
openpype/hosts/houdini/plugins/create/create_vray_rop.py
Normal file
|
|
@ -0,0 +1,156 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Creator plugin to create VRay ROP."""
|
||||||
|
import hou
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api import plugin
|
||||||
|
from openpype.pipeline import CreatedInstance
|
||||||
|
from openpype.lib import EnumDef, BoolDef
|
||||||
|
|
||||||
|
|
||||||
|
class CreateVrayROP(plugin.HoudiniCreator):
|
||||||
|
"""VRay ROP"""
|
||||||
|
|
||||||
|
identifier = "io.openpype.creators.houdini.vray_rop"
|
||||||
|
label = "VRay ROP"
|
||||||
|
family = "vray_rop"
|
||||||
|
icon = "magic"
|
||||||
|
defaults = ["master"]
|
||||||
|
|
||||||
|
ext = "exr"
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
|
||||||
|
instance_data.pop("active", None)
|
||||||
|
instance_data.update({"node_type": "vray_renderer"})
|
||||||
|
# Add chunk size attribute
|
||||||
|
instance_data["chunkSize"] = 10
|
||||||
|
# Submit for job publishing
|
||||||
|
instance_data["farm"] = True
|
||||||
|
|
||||||
|
instance = super(CreateVrayROP, self).create(
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
pre_create_data) # type: CreatedInstance
|
||||||
|
|
||||||
|
instance_node = hou.node(instance.get("instance_node"))
|
||||||
|
|
||||||
|
# Add IPR for Vray
|
||||||
|
basename = instance_node.name()
|
||||||
|
try:
|
||||||
|
ipr_rop = instance_node.parent().createNode(
|
||||||
|
"vray", node_name=basename + "_IPR"
|
||||||
|
)
|
||||||
|
except hou.OperationFailed:
|
||||||
|
raise plugin.OpenPypeCreatorError(
|
||||||
|
"Cannot create Vray render node. "
|
||||||
|
"Make sure Vray installed and enabled!"
|
||||||
|
)
|
||||||
|
|
||||||
|
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
|
||||||
|
ipr_rop.parm("rop").set(instance_node.path())
|
||||||
|
|
||||||
|
parms = {
|
||||||
|
"trange": 1,
|
||||||
|
"SettingsEXR_bits_per_channel": "16" # half precision
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.selected_nodes:
|
||||||
|
# set up the render camera from the selected node
|
||||||
|
camera = None
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
if node.type().name() == "cam":
|
||||||
|
camera = node.path()
|
||||||
|
parms.update({
|
||||||
|
"render_camera": camera or ""
|
||||||
|
})
|
||||||
|
|
||||||
|
# Enable render element
|
||||||
|
ext = pre_create_data.get("image_format")
|
||||||
|
instance_data["RenderElement"] = pre_create_data.get("render_element_enabled") # noqa
|
||||||
|
if pre_create_data.get("render_element_enabled", True):
|
||||||
|
# Vray has its own tag for AOV file output
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
fmt="${aov}.$F4.{ext}".format(aov="AOV",
|
||||||
|
ext=ext)
|
||||||
|
)
|
||||||
|
filepath = "{}{}".format(
|
||||||
|
hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
"{}/{}.${}.$F4.{}".format(subset_name,
|
||||||
|
subset_name,
|
||||||
|
"AOV",
|
||||||
|
ext)
|
||||||
|
)
|
||||||
|
re_rop = instance_node.parent().createNode(
|
||||||
|
"vray_render_channels",
|
||||||
|
node_name=basename + "_render_element"
|
||||||
|
)
|
||||||
|
# move the render element node next to the vray renderer node
|
||||||
|
re_rop.setPosition(instance_node.position() + hou.Vector2(0, 1))
|
||||||
|
re_path = re_rop.path()
|
||||||
|
parms.update({
|
||||||
|
"use_render_channels": 1,
|
||||||
|
"SettingsOutput_img_file_path": filepath,
|
||||||
|
"render_network_render_channels": re_path
|
||||||
|
})
|
||||||
|
|
||||||
|
else:
|
||||||
|
filepath = "{renders_dir}{subset_name}/{subset_name}.{fmt}".format(
|
||||||
|
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||||
|
subset_name=subset_name,
|
||||||
|
fmt="$F4.{ext}".format(ext=ext)
|
||||||
|
)
|
||||||
|
parms.update({
|
||||||
|
"use_render_channels": 0,
|
||||||
|
"SettingsOutput_img_file_path": filepath
|
||||||
|
})
|
||||||
|
|
||||||
|
custom_res = pre_create_data.get("override_resolution")
|
||||||
|
if custom_res:
|
||||||
|
parms.update({"override_camerares": 1})
|
||||||
|
|
||||||
|
instance_node.setParms(parms)
|
||||||
|
|
||||||
|
# lock parameters from AVALON
|
||||||
|
to_lock = ["family", "id"]
|
||||||
|
self.lock_parameters(instance_node, to_lock)
|
||||||
|
|
||||||
|
def remove_instances(self, instances):
|
||||||
|
for instance in instances:
|
||||||
|
node = instance.data.get("instance_node")
|
||||||
|
# for the extra render node from the plugins
|
||||||
|
# such as vray and redshift
|
||||||
|
ipr_node = hou.node("{}{}".format(node, "_IPR"))
|
||||||
|
if ipr_node:
|
||||||
|
ipr_node.destroy()
|
||||||
|
re_node = hou.node("{}{}".format(node,
|
||||||
|
"_render_element"))
|
||||||
|
if re_node:
|
||||||
|
re_node.destroy()
|
||||||
|
|
||||||
|
return super(CreateVrayROP, self).remove_instances(instances)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
attrs = super(CreateVrayROP, self).get_pre_create_attr_defs()
|
||||||
|
image_format_enum = [
|
||||||
|
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||||
|
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||||
|
]
|
||||||
|
|
||||||
|
return attrs + [
|
||||||
|
EnumDef("image_format",
|
||||||
|
image_format_enum,
|
||||||
|
default=self.ext,
|
||||||
|
label="Image Format Options"),
|
||||||
|
BoolDef("override_resolution",
|
||||||
|
label="Override Camera Resolution",
|
||||||
|
tooltip="Override the current camera "
|
||||||
|
"resolution, recommended for IPR.",
|
||||||
|
default=False),
|
||||||
|
BoolDef("render_element_enabled",
|
||||||
|
label="Render Element",
|
||||||
|
tooltip="Create Render Element Node "
|
||||||
|
"if enabled",
|
||||||
|
default=False)
|
||||||
|
]
|
||||||
135
openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py
Normal file
135
openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import hou
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api import colorspace
|
||||||
|
from openpype.hosts.houdini.api.lib import (
|
||||||
|
evalParmNoFrame, get_color_management_preferences)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect Arnold ROP Render Products
|
||||||
|
|
||||||
|
Collects the instance.data["files"] for the render products.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
instance -> files
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "Arnold ROP Render Products"
|
||||||
|
order = pyblish.api.CollectorOrder + 0.4
|
||||||
|
hosts = ["houdini"]
|
||||||
|
families = ["arnold_rop"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
rop = hou.node(instance.data.get("instance_node"))
|
||||||
|
|
||||||
|
# Collect chunkSize
|
||||||
|
chunk_size_parm = rop.parm("chunkSize")
|
||||||
|
if chunk_size_parm:
|
||||||
|
chunk_size = int(chunk_size_parm.eval())
|
||||||
|
instance.data["chunkSize"] = chunk_size
|
||||||
|
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||||
|
|
||||||
|
default_prefix = evalParmNoFrame(rop, "ar_picture")
|
||||||
|
render_products = []
|
||||||
|
|
||||||
|
# Default beauty AOV
|
||||||
|
beauty_product = self.get_render_product_name(prefix=default_prefix,
|
||||||
|
suffix=None)
|
||||||
|
render_products.append(beauty_product)
|
||||||
|
|
||||||
|
files_by_aov = {
|
||||||
|
"": self.generate_expected_files(instance, beauty_product)
|
||||||
|
}
|
||||||
|
|
||||||
|
num_aovs = rop.evalParm("ar_aovs")
|
||||||
|
for index in range(1, num_aovs + 1):
|
||||||
|
# Skip disabled AOVs
|
||||||
|
if not rop.evalParm("ar_enable_aovP{}".format(index)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if rop.evalParm("ar_aov_exr_enable_layer_name{}".format(index)):
|
||||||
|
label = rop.evalParm("ar_aov_exr_layer_name{}".format(index))
|
||||||
|
else:
|
||||||
|
label = evalParmNoFrame(rop, "ar_aov_label{}".format(index))
|
||||||
|
|
||||||
|
aov_product = self.get_render_product_name(default_prefix,
|
||||||
|
suffix=label)
|
||||||
|
render_products.append(aov_product)
|
||||||
|
files_by_aov[label] = self.generate_expected_files(instance,
|
||||||
|
aov_product)
|
||||||
|
|
||||||
|
for product in render_products:
|
||||||
|
self.log.debug("Found render product: {}".format(product))
|
||||||
|
|
||||||
|
instance.data["files"] = list(render_products)
|
||||||
|
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||||
|
|
||||||
|
# For now by default do NOT try to publish the rendered output
|
||||||
|
instance.data["publishJobState"] = "Suspended"
|
||||||
|
instance.data["attachTo"] = [] # stub required data
|
||||||
|
|
||||||
|
if "expectedFiles" not in instance.data:
|
||||||
|
instance.data["expectedFiles"] = list()
|
||||||
|
instance.data["expectedFiles"].append(files_by_aov)
|
||||||
|
|
||||||
|
# update the colorspace data
|
||||||
|
colorspace_data = get_color_management_preferences()
|
||||||
|
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||||
|
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||||
|
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||||
|
|
||||||
|
def get_render_product_name(self, prefix, suffix):
|
||||||
|
"""Return the output filename using the AOV prefix and suffix"""
|
||||||
|
|
||||||
|
# When AOV is explicitly defined in prefix we just swap it out
|
||||||
|
# directly with the AOV suffix to embed it.
|
||||||
|
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
|
||||||
|
if "%AOV%" in prefix:
|
||||||
|
# It seems that when some special separator characters are present
|
||||||
|
# before the %AOV% token that Redshift will secretly remove it if
|
||||||
|
# there is no suffix for the current product, for example:
|
||||||
|
# foo_%AOV% -> foo.exr
|
||||||
|
pattern = "%AOV%" if suffix else "[._-]?%AOV%"
|
||||||
|
product_name = re.sub(pattern,
|
||||||
|
suffix,
|
||||||
|
prefix,
|
||||||
|
flags=re.IGNORECASE)
|
||||||
|
else:
|
||||||
|
if suffix:
|
||||||
|
# Add ".{suffix}" before the extension
|
||||||
|
prefix_base, ext = os.path.splitext(prefix)
|
||||||
|
product_name = prefix_base + "." + suffix + ext
|
||||||
|
else:
|
||||||
|
product_name = prefix
|
||||||
|
|
||||||
|
return product_name
|
||||||
|
|
||||||
|
def generate_expected_files(self, instance, path):
|
||||||
|
"""Create expected files in instance data"""
|
||||||
|
|
||||||
|
dir = os.path.dirname(path)
|
||||||
|
file = os.path.basename(path)
|
||||||
|
|
||||||
|
if "#" in file:
|
||||||
|
def replace(match):
|
||||||
|
return "%0{}d".format(len(match.group()))
|
||||||
|
|
||||||
|
file = re.sub("#+", replace, file)
|
||||||
|
|
||||||
|
if "%" not in file:
|
||||||
|
return path
|
||||||
|
|
||||||
|
expected_files = []
|
||||||
|
start = instance.data["frameStart"]
|
||||||
|
end = instance.data["frameEnd"]
|
||||||
|
for i in range(int(start), (int(end) + 1)):
|
||||||
|
expected_files.append(
|
||||||
|
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||||
|
|
||||||
|
return expected_files
|
||||||
|
|
@ -0,0 +1,56 @@
|
||||||
|
import hou
|
||||||
|
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
|
||||||
|
class CollectInstanceNodeFrameRange(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect time range frame data for the instance node."""
|
||||||
|
|
||||||
|
order = pyblish.api.CollectorOrder + 0.001
|
||||||
|
label = "Instance Node Frame Range"
|
||||||
|
hosts = ["houdini"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
node_path = instance.data.get("instance_node")
|
||||||
|
node = hou.node(node_path) if node_path else None
|
||||||
|
if not node_path or not node:
|
||||||
|
self.log.debug("No instance node found for instance: "
|
||||||
|
"{}".format(instance))
|
||||||
|
return
|
||||||
|
|
||||||
|
frame_data = self.get_frame_data(node)
|
||||||
|
if not frame_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log.info("Collected time data: {}".format(frame_data))
|
||||||
|
instance.data.update(frame_data)
|
||||||
|
|
||||||
|
def get_frame_data(self, node):
|
||||||
|
"""Get the frame data: start frame, end frame and steps
|
||||||
|
Args:
|
||||||
|
node(hou.Node)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
if node.parm("trange") is None:
|
||||||
|
self.log.debug("Node has no 'trange' parameter: "
|
||||||
|
"{}".format(node.path()))
|
||||||
|
return data
|
||||||
|
|
||||||
|
if node.evalParm("trange") == 0:
|
||||||
|
# Ignore 'render current frame'
|
||||||
|
self.log.debug("Node '{}' has 'Render current frame' set. "
|
||||||
|
"Time range data ignored.".format(node.path()))
|
||||||
|
return data
|
||||||
|
|
||||||
|
data["frameStart"] = node.evalParm("f1")
|
||||||
|
data["frameEnd"] = node.evalParm("f2")
|
||||||
|
data["byFrameStep"] = node.evalParm("f3")
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
@ -118,6 +118,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
data["frameStart"] = node.evalParm("f1")
|
data["frameStart"] = node.evalParm("f1")
|
||||||
data["frameEnd"] = node.evalParm("f2")
|
data["frameEnd"] = node.evalParm("f2")
|
||||||
data["steps"] = node.evalParm("f3")
|
data["byFrameStep"] = node.evalParm("f3")
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
|
||||||
104
openpype/hosts/houdini/plugins/publish/collect_karma_rop.py
Normal file
104
openpype/hosts/houdini/plugins/publish/collect_karma_rop.py
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
import hou
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api.lib import (
|
||||||
|
evalParmNoFrame,
|
||||||
|
get_color_management_preferences
|
||||||
|
)
|
||||||
|
from openpype.hosts.houdini.api import (
|
||||||
|
colorspace
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect Karma Render Products
|
||||||
|
|
||||||
|
Collects the instance.data["files"] for the multipart render product.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
instance -> files
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "Karma ROP Render Products"
|
||||||
|
order = pyblish.api.CollectorOrder + 0.4
|
||||||
|
hosts = ["houdini"]
|
||||||
|
families = ["karma_rop"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
rop = hou.node(instance.data.get("instance_node"))
|
||||||
|
|
||||||
|
# Collect chunkSize
|
||||||
|
chunk_size_parm = rop.parm("chunkSize")
|
||||||
|
if chunk_size_parm:
|
||||||
|
chunk_size = int(chunk_size_parm.eval())
|
||||||
|
instance.data["chunkSize"] = chunk_size
|
||||||
|
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||||
|
|
||||||
|
default_prefix = evalParmNoFrame(rop, "picture")
|
||||||
|
render_products = []
|
||||||
|
|
||||||
|
# Default beauty AOV
|
||||||
|
beauty_product = self.get_render_product_name(
|
||||||
|
prefix=default_prefix, suffix=None
|
||||||
|
)
|
||||||
|
render_products.append(beauty_product)
|
||||||
|
|
||||||
|
files_by_aov = {
|
||||||
|
"beauty": self.generate_expected_files(instance,
|
||||||
|
beauty_product)
|
||||||
|
}
|
||||||
|
|
||||||
|
filenames = list(render_products)
|
||||||
|
instance.data["files"] = filenames
|
||||||
|
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||||
|
|
||||||
|
for product in render_products:
|
||||||
|
self.log.debug("Found render product: %s" % product)
|
||||||
|
|
||||||
|
if "expectedFiles" not in instance.data:
|
||||||
|
instance.data["expectedFiles"] = list()
|
||||||
|
instance.data["expectedFiles"].append(files_by_aov)
|
||||||
|
|
||||||
|
# update the colorspace data
|
||||||
|
colorspace_data = get_color_management_preferences()
|
||||||
|
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||||
|
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||||
|
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||||
|
|
||||||
|
def get_render_product_name(self, prefix, suffix):
|
||||||
|
product_name = prefix
|
||||||
|
if suffix:
|
||||||
|
# Add ".{suffix}" before the extension
|
||||||
|
prefix_base, ext = os.path.splitext(prefix)
|
||||||
|
product_name = "{}.{}{}".format(prefix_base, suffix, ext)
|
||||||
|
|
||||||
|
return product_name
|
||||||
|
|
||||||
|
def generate_expected_files(self, instance, path):
|
||||||
|
"""Create expected files in instance data"""
|
||||||
|
|
||||||
|
dir = os.path.dirname(path)
|
||||||
|
file = os.path.basename(path)
|
||||||
|
|
||||||
|
if "#" in file:
|
||||||
|
def replace(match):
|
||||||
|
return "%0{}d".format(len(match.group()))
|
||||||
|
|
||||||
|
file = re.sub("#+", replace, file)
|
||||||
|
|
||||||
|
if "%" not in file:
|
||||||
|
return path
|
||||||
|
|
||||||
|
expected_files = []
|
||||||
|
start = instance.data["frameStart"]
|
||||||
|
end = instance.data["frameEnd"]
|
||||||
|
for i in range(int(start), (int(end) + 1)):
|
||||||
|
expected_files.append(
|
||||||
|
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||||
|
|
||||||
|
return expected_files
|
||||||
127
openpype/hosts/houdini/plugins/publish/collect_mantra_rop.py
Normal file
127
openpype/hosts/houdini/plugins/publish/collect_mantra_rop.py
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
import hou
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api.lib import (
|
||||||
|
evalParmNoFrame,
|
||||||
|
get_color_management_preferences
|
||||||
|
)
|
||||||
|
from openpype.hosts.houdini.api import (
|
||||||
|
colorspace
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect Mantra Render Products
|
||||||
|
|
||||||
|
Collects the instance.data["files"] for the render products.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
instance -> files
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "Mantra ROP Render Products"
|
||||||
|
order = pyblish.api.CollectorOrder + 0.4
|
||||||
|
hosts = ["houdini"]
|
||||||
|
families = ["mantra_rop"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
rop = hou.node(instance.data.get("instance_node"))
|
||||||
|
|
||||||
|
# Collect chunkSize
|
||||||
|
chunk_size_parm = rop.parm("chunkSize")
|
||||||
|
if chunk_size_parm:
|
||||||
|
chunk_size = int(chunk_size_parm.eval())
|
||||||
|
instance.data["chunkSize"] = chunk_size
|
||||||
|
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||||
|
|
||||||
|
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||||
|
render_products = []
|
||||||
|
|
||||||
|
# Default beauty AOV
|
||||||
|
beauty_product = self.get_render_product_name(
|
||||||
|
prefix=default_prefix, suffix=None
|
||||||
|
)
|
||||||
|
render_products.append(beauty_product)
|
||||||
|
|
||||||
|
files_by_aov = {
|
||||||
|
"beauty": self.generate_expected_files(instance,
|
||||||
|
beauty_product)
|
||||||
|
}
|
||||||
|
|
||||||
|
aov_numbers = rop.evalParm("vm_numaux")
|
||||||
|
if aov_numbers > 0:
|
||||||
|
# get the filenames of the AOVs
|
||||||
|
for i in range(1, aov_numbers + 1):
|
||||||
|
var = rop.evalParm("vm_variable_plane%d" % i)
|
||||||
|
if var:
|
||||||
|
aov_name = "vm_filename_plane%d" % i
|
||||||
|
aov_boolean = "vm_usefile_plane%d" % i
|
||||||
|
aov_enabled = rop.evalParm(aov_boolean)
|
||||||
|
has_aov_path = rop.evalParm(aov_name)
|
||||||
|
if has_aov_path and aov_enabled == 1:
|
||||||
|
aov_prefix = evalParmNoFrame(rop, aov_name)
|
||||||
|
aov_product = self.get_render_product_name(
|
||||||
|
prefix=aov_prefix, suffix=None
|
||||||
|
)
|
||||||
|
render_products.append(aov_product)
|
||||||
|
|
||||||
|
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||||
|
|
||||||
|
for product in render_products:
|
||||||
|
self.log.debug("Found render product: %s" % product)
|
||||||
|
|
||||||
|
filenames = list(render_products)
|
||||||
|
instance.data["files"] = filenames
|
||||||
|
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||||
|
|
||||||
|
# For now by default do NOT try to publish the rendered output
|
||||||
|
instance.data["publishJobState"] = "Suspended"
|
||||||
|
instance.data["attachTo"] = [] # stub required data
|
||||||
|
|
||||||
|
if "expectedFiles" not in instance.data:
|
||||||
|
instance.data["expectedFiles"] = list()
|
||||||
|
instance.data["expectedFiles"].append(files_by_aov)
|
||||||
|
|
||||||
|
# update the colorspace data
|
||||||
|
colorspace_data = get_color_management_preferences()
|
||||||
|
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||||
|
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||||
|
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||||
|
|
||||||
|
def get_render_product_name(self, prefix, suffix):
|
||||||
|
product_name = prefix
|
||||||
|
if suffix:
|
||||||
|
# Add ".{suffix}" before the extension
|
||||||
|
prefix_base, ext = os.path.splitext(prefix)
|
||||||
|
product_name = prefix_base + "." + suffix + ext
|
||||||
|
|
||||||
|
return product_name
|
||||||
|
|
||||||
|
def generate_expected_files(self, instance, path):
|
||||||
|
"""Create expected files in instance data"""
|
||||||
|
|
||||||
|
dir = os.path.dirname(path)
|
||||||
|
file = os.path.basename(path)
|
||||||
|
|
||||||
|
if "#" in file:
|
||||||
|
def replace(match):
|
||||||
|
return "%0{}d".format(len(match.group()))
|
||||||
|
|
||||||
|
file = re.sub("#+", replace, file)
|
||||||
|
|
||||||
|
if "%" not in file:
|
||||||
|
return path
|
||||||
|
|
||||||
|
expected_files = []
|
||||||
|
start = instance.data["frameStart"]
|
||||||
|
end = instance.data["frameEnd"]
|
||||||
|
for i in range(int(start), (int(end) + 1)):
|
||||||
|
expected_files.append(
|
||||||
|
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||||
|
|
||||||
|
return expected_files
|
||||||
|
|
@ -4,52 +4,13 @@ import os
|
||||||
import hou
|
import hou
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api.lib import (
|
||||||
def get_top_referenced_parm(parm):
|
evalParmNoFrame,
|
||||||
|
get_color_management_preferences
|
||||||
processed = set() # disallow infinite loop
|
)
|
||||||
while True:
|
from openpype.hosts.houdini.api import (
|
||||||
if parm.path() in processed:
|
colorspace
|
||||||
raise RuntimeError("Parameter references result in cycle.")
|
)
|
||||||
|
|
||||||
processed.add(parm.path())
|
|
||||||
|
|
||||||
ref = parm.getReferencedParm()
|
|
||||||
if ref.path() == parm.path():
|
|
||||||
# It returns itself when it doesn't reference
|
|
||||||
# another parameter
|
|
||||||
return ref
|
|
||||||
else:
|
|
||||||
parm = ref
|
|
||||||
|
|
||||||
|
|
||||||
def evalParmNoFrame(node, parm, pad_character="#"):
|
|
||||||
|
|
||||||
parameter = node.parm(parm)
|
|
||||||
assert parameter, "Parameter does not exist: %s.%s" % (node, parm)
|
|
||||||
|
|
||||||
# If the parameter has a parameter reference, then get that
|
|
||||||
# parameter instead as otherwise `unexpandedString()` fails.
|
|
||||||
parameter = get_top_referenced_parm(parameter)
|
|
||||||
|
|
||||||
# Substitute out the frame numbering with padded characters
|
|
||||||
try:
|
|
||||||
raw = parameter.unexpandedString()
|
|
||||||
except hou.Error as exc:
|
|
||||||
print("Failed: %s" % parameter)
|
|
||||||
raise RuntimeError(exc)
|
|
||||||
|
|
||||||
def replace(match):
|
|
||||||
padding = 1
|
|
||||||
n = match.group(2)
|
|
||||||
if n and int(n):
|
|
||||||
padding = int(n)
|
|
||||||
return pad_character * padding
|
|
||||||
|
|
||||||
expression = re.sub(r"(\$F([0-9]*))", replace, raw)
|
|
||||||
|
|
||||||
with hou.ScriptEvalContext(parameter):
|
|
||||||
return hou.expandStringAtFrame(expression, 0)
|
|
||||||
|
|
||||||
|
|
||||||
class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -87,6 +48,9 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
prefix=default_prefix, suffix=beauty_suffix
|
prefix=default_prefix, suffix=beauty_suffix
|
||||||
)
|
)
|
||||||
render_products.append(beauty_product)
|
render_products.append(beauty_product)
|
||||||
|
files_by_aov = {
|
||||||
|
"_": self.generate_expected_files(instance,
|
||||||
|
beauty_product)}
|
||||||
|
|
||||||
num_aovs = rop.evalParm("RS_aov")
|
num_aovs = rop.evalParm("RS_aov")
|
||||||
for index in range(num_aovs):
|
for index in range(num_aovs):
|
||||||
|
|
@ -104,11 +68,29 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||||
render_products.append(aov_product)
|
render_products.append(aov_product)
|
||||||
|
|
||||||
|
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||||
|
aov_product) # noqa
|
||||||
|
|
||||||
for product in render_products:
|
for product in render_products:
|
||||||
self.log.debug("Found render product: %s" % product)
|
self.log.debug("Found render product: %s" % product)
|
||||||
|
|
||||||
filenames = list(render_products)
|
filenames = list(render_products)
|
||||||
instance.data["files"] = filenames
|
instance.data["files"] = filenames
|
||||||
|
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||||
|
|
||||||
|
# For now by default do NOT try to publish the rendered output
|
||||||
|
instance.data["publishJobState"] = "Suspended"
|
||||||
|
instance.data["attachTo"] = [] # stub required data
|
||||||
|
|
||||||
|
if "expectedFiles" not in instance.data:
|
||||||
|
instance.data["expectedFiles"] = list()
|
||||||
|
instance.data["expectedFiles"].append(files_by_aov)
|
||||||
|
|
||||||
|
# update the colorspace data
|
||||||
|
colorspace_data = get_color_management_preferences()
|
||||||
|
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||||
|
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||||
|
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||||
|
|
||||||
def get_render_product_name(self, prefix, suffix):
|
def get_render_product_name(self, prefix, suffix):
|
||||||
"""Return the output filename using the AOV prefix and suffix"""
|
"""Return the output filename using the AOV prefix and suffix"""
|
||||||
|
|
@ -133,3 +115,27 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
product_name = prefix
|
product_name = prefix
|
||||||
|
|
||||||
return product_name
|
return product_name
|
||||||
|
|
||||||
|
def generate_expected_files(self, instance, path):
|
||||||
|
"""Create expected files in instance data"""
|
||||||
|
|
||||||
|
dir = os.path.dirname(path)
|
||||||
|
file = os.path.basename(path)
|
||||||
|
|
||||||
|
if "#" in file:
|
||||||
|
def replace(match):
|
||||||
|
return "%0{}d".format(len(match.group()))
|
||||||
|
|
||||||
|
file = re.sub("#+", replace, file)
|
||||||
|
|
||||||
|
if "%" not in file:
|
||||||
|
return path
|
||||||
|
|
||||||
|
expected_files = []
|
||||||
|
start = instance.data["frameStart"]
|
||||||
|
end = instance.data["frameEnd"]
|
||||||
|
for i in range(int(start), (int(end) + 1)):
|
||||||
|
expected_files.append(
|
||||||
|
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||||
|
|
||||||
|
return expected_files
|
||||||
|
|
|
||||||
129
openpype/hosts/houdini/plugins/publish/collect_vray_rop.py
Normal file
129
openpype/hosts/houdini/plugins/publish/collect_vray_rop.py
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
import hou
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.houdini.api.lib import (
|
||||||
|
evalParmNoFrame,
|
||||||
|
get_color_management_preferences
|
||||||
|
)
|
||||||
|
from openpype.hosts.houdini.api import (
|
||||||
|
colorspace
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect Vray Render Products
|
||||||
|
|
||||||
|
Collects the instance.data["files"] for the render products.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
instance -> files
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "VRay ROP Render Products"
|
||||||
|
order = pyblish.api.CollectorOrder + 0.4
|
||||||
|
hosts = ["houdini"]
|
||||||
|
families = ["vray_rop"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
rop = hou.node(instance.data.get("instance_node"))
|
||||||
|
|
||||||
|
# Collect chunkSize
|
||||||
|
chunk_size_parm = rop.parm("chunkSize")
|
||||||
|
if chunk_size_parm:
|
||||||
|
chunk_size = int(chunk_size_parm.eval())
|
||||||
|
instance.data["chunkSize"] = chunk_size
|
||||||
|
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||||
|
|
||||||
|
default_prefix = evalParmNoFrame(rop, "SettingsOutput_img_file_path")
|
||||||
|
render_products = []
|
||||||
|
# TODO: add render elements if render element
|
||||||
|
|
||||||
|
beauty_product = self.get_beauty_render_product(default_prefix)
|
||||||
|
render_products.append(beauty_product)
|
||||||
|
files_by_aov = {
|
||||||
|
"RGB Color": self.generate_expected_files(instance,
|
||||||
|
beauty_product)}
|
||||||
|
|
||||||
|
if instance.data.get("RenderElement", True):
|
||||||
|
render_element = self.get_render_element_name(rop, default_prefix)
|
||||||
|
if render_element:
|
||||||
|
for aov, renderpass in render_element.items():
|
||||||
|
render_products.append(renderpass)
|
||||||
|
files_by_aov[aov] = self.generate_expected_files(instance, renderpass) # noqa
|
||||||
|
|
||||||
|
for product in render_products:
|
||||||
|
self.log.debug("Found render product: %s" % product)
|
||||||
|
filenames = list(render_products)
|
||||||
|
instance.data["files"] = filenames
|
||||||
|
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||||
|
|
||||||
|
# For now by default do NOT try to publish the rendered output
|
||||||
|
instance.data["publishJobState"] = "Suspended"
|
||||||
|
instance.data["attachTo"] = [] # stub required data
|
||||||
|
|
||||||
|
if "expectedFiles" not in instance.data:
|
||||||
|
instance.data["expectedFiles"] = list()
|
||||||
|
instance.data["expectedFiles"].append(files_by_aov)
|
||||||
|
self.log.debug("expectedFiles:{}".format(files_by_aov))
|
||||||
|
|
||||||
|
# update the colorspace data
|
||||||
|
colorspace_data = get_color_management_preferences()
|
||||||
|
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||||
|
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||||
|
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||||
|
|
||||||
|
def get_beauty_render_product(self, prefix, suffix="<reName>"):
|
||||||
|
"""Return the beauty output filename if render element enabled
|
||||||
|
"""
|
||||||
|
aov_parm = ".{}".format(suffix)
|
||||||
|
beauty_product = None
|
||||||
|
if aov_parm in prefix:
|
||||||
|
beauty_product = prefix.replace(aov_parm, "")
|
||||||
|
else:
|
||||||
|
beauty_product = prefix
|
||||||
|
|
||||||
|
return beauty_product
|
||||||
|
|
||||||
|
def get_render_element_name(self, node, prefix, suffix="<reName>"):
|
||||||
|
"""Return the output filename using the AOV prefix and suffix
|
||||||
|
"""
|
||||||
|
render_element_dict = {}
|
||||||
|
# need a rewrite
|
||||||
|
re_path = node.evalParm("render_network_render_channels")
|
||||||
|
if re_path:
|
||||||
|
node_children = hou.node(re_path).children()
|
||||||
|
for element in node_children:
|
||||||
|
if element.shaderName() != "vray:SettingsRenderChannels":
|
||||||
|
aov = str(element)
|
||||||
|
render_product = prefix.replace(suffix, aov)
|
||||||
|
render_element_dict[aov] = render_product
|
||||||
|
return render_element_dict
|
||||||
|
|
||||||
|
def generate_expected_files(self, instance, path):
|
||||||
|
"""Create expected files in instance data"""
|
||||||
|
|
||||||
|
dir = os.path.dirname(path)
|
||||||
|
file = os.path.basename(path)
|
||||||
|
|
||||||
|
if "#" in file:
|
||||||
|
def replace(match):
|
||||||
|
return "%0{}d".format(len(match.group()))
|
||||||
|
|
||||||
|
file = re.sub("#+", replace, file)
|
||||||
|
|
||||||
|
if "%" not in file:
|
||||||
|
return path
|
||||||
|
|
||||||
|
expected_files = []
|
||||||
|
start = instance.data["frameStart"]
|
||||||
|
end = instance.data["frameEnd"]
|
||||||
|
for i in range(int(start), (int(end) + 1)):
|
||||||
|
expected_files.append(
|
||||||
|
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||||
|
|
||||||
|
return expected_files
|
||||||
|
|
@ -2,7 +2,10 @@ import pyblish.api
|
||||||
|
|
||||||
from openpype.lib import version_up
|
from openpype.lib import version_up
|
||||||
from openpype.pipeline import registered_host
|
from openpype.pipeline import registered_host
|
||||||
|
from openpype.action import get_errored_plugins_from_data
|
||||||
from openpype.hosts.houdini.api import HoudiniHost
|
from openpype.hosts.houdini.api import HoudiniHost
|
||||||
|
from openpype.pipeline.publish import KnownPublishError
|
||||||
|
|
||||||
|
|
||||||
class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
||||||
"""Increment the current file.
|
"""Increment the current file.
|
||||||
|
|
@ -14,17 +17,32 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
||||||
label = "Increment current file"
|
label = "Increment current file"
|
||||||
order = pyblish.api.IntegratorOrder + 9.0
|
order = pyblish.api.IntegratorOrder + 9.0
|
||||||
hosts = ["houdini"]
|
hosts = ["houdini"]
|
||||||
families = ["workfile"]
|
families = ["workfile",
|
||||||
|
"redshift_rop",
|
||||||
|
"arnold_rop",
|
||||||
|
"mantra_rop",
|
||||||
|
"karma_rop",
|
||||||
|
"usdrender"]
|
||||||
optional = True
|
optional = True
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
|
|
||||||
|
errored_plugins = get_errored_plugins_from_data(context)
|
||||||
|
if any(
|
||||||
|
plugin.__name__ == "HoudiniSubmitPublishDeadline"
|
||||||
|
for plugin in errored_plugins
|
||||||
|
):
|
||||||
|
raise KnownPublishError(
|
||||||
|
"Skipping incrementing current file because "
|
||||||
|
"submission to deadline failed."
|
||||||
|
)
|
||||||
|
|
||||||
# Filename must not have changed since collecting
|
# Filename must not have changed since collecting
|
||||||
host = registered_host() # type: HoudiniHost
|
host = registered_host() # type: HoudiniHost
|
||||||
current_file = host.current_file()
|
current_file = host.current_file()
|
||||||
assert (
|
assert (
|
||||||
context.data["currentFile"] == current_file
|
context.data["currentFile"] == current_file
|
||||||
), "Collected filename from current scene name."
|
), "Collected filename mismatches from current scene name."
|
||||||
|
|
||||||
new_filepath = version_up(current_file)
|
new_filepath = version_up(current_file)
|
||||||
host.save_workfile(new_filepath)
|
host.save_workfile(new_filepath)
|
||||||
|
|
|
||||||
|
|
@ -662,7 +662,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
# test if there is instance of workfile waiting
|
# test if there is instance of workfile waiting
|
||||||
# to be published.
|
# to be published.
|
||||||
assert i.data["publish"] is True, (
|
assert i.data.get("publish", True) is True, (
|
||||||
"Workfile (scene) must be published along")
|
"Workfile (scene) must be published along")
|
||||||
|
|
||||||
return i
|
return i
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,27 @@
|
||||||
|
import hou
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import json
|
import attr
|
||||||
import getpass
|
import getpass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import requests
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
# import hou ???
|
|
||||||
|
|
||||||
from openpype.pipeline import legacy_io
|
from openpype.pipeline import legacy_io
|
||||||
from openpype.tests.lib import is_in_tests
|
from openpype.tests.lib import is_in_tests
|
||||||
|
from openpype_modules.deadline import abstract_submit_deadline
|
||||||
|
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||||
from openpype.lib import is_running_from_build
|
from openpype.lib import is_running_from_build
|
||||||
|
|
||||||
|
|
||||||
class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
|
@attr.s
|
||||||
|
class DeadlinePluginInfo():
|
||||||
|
SceneFile = attr.ib(default=None)
|
||||||
|
OutputDriver = attr.ib(default=None)
|
||||||
|
Version = attr.ib(default=None)
|
||||||
|
IgnoreInputs = attr.ib(default=True)
|
||||||
|
|
||||||
|
|
||||||
|
class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
||||||
"""Submit Solaris USD Render ROPs to Deadline.
|
"""Submit Solaris USD Render ROPs to Deadline.
|
||||||
|
|
||||||
Renders are submitted to a Deadline Web Service as
|
Renders are submitted to a Deadline Web Service as
|
||||||
|
|
@ -30,83 +38,57 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.IntegratorOrder
|
order = pyblish.api.IntegratorOrder
|
||||||
hosts = ["houdini"]
|
hosts = ["houdini"]
|
||||||
families = ["usdrender",
|
families = ["usdrender",
|
||||||
"redshift_rop"]
|
"redshift_rop",
|
||||||
|
"arnold_rop",
|
||||||
|
"mantra_rop",
|
||||||
|
"karma_rop",
|
||||||
|
"vray_rop"]
|
||||||
targets = ["local"]
|
targets = ["local"]
|
||||||
|
use_published = True
|
||||||
|
|
||||||
def process(self, instance):
|
def get_job_info(self):
|
||||||
|
job_info = DeadlineJobInfo(Plugin="Houdini")
|
||||||
|
|
||||||
|
instance = self._instance
|
||||||
context = instance.context
|
context = instance.context
|
||||||
code = context.data["code"]
|
|
||||||
filepath = context.data["currentFile"]
|
filepath = context.data["currentFile"]
|
||||||
filename = os.path.basename(filepath)
|
filename = os.path.basename(filepath)
|
||||||
comment = context.data.get("comment", "")
|
|
||||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
|
||||||
jobname = "%s - %s" % (filename, instance.name)
|
|
||||||
|
|
||||||
# Support code prefix label for batch name
|
job_info.Name = "{} - {}".format(filename, instance.name)
|
||||||
batch_name = filename
|
job_info.BatchName = filename
|
||||||
if code:
|
job_info.Plugin = "Houdini"
|
||||||
batch_name = "{0} - {1}".format(code, batch_name)
|
job_info.UserName = context.data.get(
|
||||||
|
"deadlineUser", getpass.getuser())
|
||||||
|
|
||||||
if is_in_tests():
|
if is_in_tests():
|
||||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||||
|
|
||||||
# Output driver to render
|
# Deadline requires integers in frame range
|
||||||
driver = instance[0]
|
|
||||||
|
|
||||||
# StartFrame to EndFrame by byFrameStep
|
|
||||||
frames = "{start}-{end}x{step}".format(
|
frames = "{start}-{end}x{step}".format(
|
||||||
start=int(instance.data["frameStart"]),
|
start=int(instance.data["frameStart"]),
|
||||||
end=int(instance.data["frameEnd"]),
|
end=int(instance.data["frameEnd"]),
|
||||||
step=int(instance.data["byFrameStep"]),
|
step=int(instance.data["byFrameStep"]),
|
||||||
)
|
)
|
||||||
|
job_info.Frames = frames
|
||||||
|
|
||||||
# Documentation for keys available at:
|
job_info.Pool = instance.data.get("primaryPool")
|
||||||
# https://docs.thinkboxsoftware.com
|
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||||
# /products/deadline/8.0/1_User%20Manual/manual
|
job_info.ChunkSize = instance.data.get("chunkSize", 10)
|
||||||
# /manual-submission.html#job-info-file-options
|
job_info.Comment = context.data.get("comment")
|
||||||
payload = {
|
|
||||||
"JobInfo": {
|
|
||||||
# Top-level group name
|
|
||||||
"BatchName": batch_name,
|
|
||||||
|
|
||||||
# Job name, as seen in Monitor
|
|
||||||
"Name": jobname,
|
|
||||||
|
|
||||||
# Arbitrary username, for visualisation in Monitor
|
|
||||||
"UserName": deadline_user,
|
|
||||||
|
|
||||||
"Plugin": "Houdini",
|
|
||||||
"Pool": instance.data.get("primaryPool"),
|
|
||||||
"secondaryPool": instance.data.get("secondaryPool"),
|
|
||||||
"Frames": frames,
|
|
||||||
|
|
||||||
"ChunkSize": instance.data.get("chunkSize", 10),
|
|
||||||
|
|
||||||
"Comment": comment
|
|
||||||
},
|
|
||||||
"PluginInfo": {
|
|
||||||
# Input
|
|
||||||
"SceneFile": filepath,
|
|
||||||
"OutputDriver": driver.path(),
|
|
||||||
|
|
||||||
# Mandatory for Deadline
|
|
||||||
# Houdini version without patch number
|
|
||||||
"Version": hou.applicationVersionString().rsplit(".", 1)[0],
|
|
||||||
|
|
||||||
"IgnoreInputs": True
|
|
||||||
},
|
|
||||||
|
|
||||||
# Mandatory for Deadline, may be empty
|
|
||||||
"AuxFiles": []
|
|
||||||
}
|
|
||||||
|
|
||||||
# Include critical environment variables with submission + api.Session
|
|
||||||
keys = [
|
keys = [
|
||||||
# Submit along the current Avalon tool setup that we launched
|
"FTRACK_API_KEY",
|
||||||
# this application with so the Render Slave can build its own
|
"FTRACK_API_USER",
|
||||||
# similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9"
|
"FTRACK_SERVER",
|
||||||
"AVALON_TOOLS"
|
"OPENPYPE_SG_USER",
|
||||||
|
"AVALON_PROJECT",
|
||||||
|
"AVALON_ASSET",
|
||||||
|
"AVALON_TASK",
|
||||||
|
"AVALON_APP_NAME",
|
||||||
|
"OPENPYPE_DEV",
|
||||||
|
"OPENPYPE_LOG_NO_COLORS",
|
||||||
|
"OPENPYPE_VERSION"
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add OpenPype version if we are running from build.
|
# Add OpenPype version if we are running from build.
|
||||||
|
|
@ -114,61 +96,50 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
|
||||||
keys.append("OPENPYPE_VERSION")
|
keys.append("OPENPYPE_VERSION")
|
||||||
|
|
||||||
# Add mongo url if it's enabled
|
# Add mongo url if it's enabled
|
||||||
if context.data.get("deadlinePassMongoUrl"):
|
if self._instance.context.data.get("deadlinePassMongoUrl"):
|
||||||
keys.append("OPENPYPE_MONGO")
|
keys.append("OPENPYPE_MONGO")
|
||||||
|
|
||||||
environment = dict({key: os.environ[key] for key in keys
|
environment = dict({key: os.environ[key] for key in keys
|
||||||
if key in os.environ}, **legacy_io.Session)
|
if key in os.environ}, **legacy_io.Session)
|
||||||
|
for key in keys:
|
||||||
|
value = environment.get(key)
|
||||||
|
if value:
|
||||||
|
job_info.EnvironmentKeyValue[key] = value
|
||||||
|
|
||||||
payload["JobInfo"].update({
|
# to recognize job from PYPE for turning Event On/Off
|
||||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
|
||||||
key=key,
|
|
||||||
value=environment[key]
|
|
||||||
) for index, key in enumerate(environment)
|
|
||||||
})
|
|
||||||
|
|
||||||
# Include OutputFilename entries
|
|
||||||
# The first entry also enables double-click to preview rendered
|
|
||||||
# frames from Deadline Monitor
|
|
||||||
output_data = {}
|
|
||||||
for i, filepath in enumerate(instance.data["files"]):
|
for i, filepath in enumerate(instance.data["files"]):
|
||||||
dirname = os.path.dirname(filepath)
|
dirname = os.path.dirname(filepath)
|
||||||
fname = os.path.basename(filepath)
|
fname = os.path.basename(filepath)
|
||||||
output_data["OutputDirectory%d" % i] = dirname.replace("\\", "/")
|
job_info.OutputDirectory += dirname.replace("\\", "/")
|
||||||
output_data["OutputFilename%d" % i] = fname
|
job_info.OutputFilename += fname
|
||||||
|
|
||||||
# For now ensure destination folder exists otherwise HUSK
|
return job_info
|
||||||
# will fail to render the output image. This is supposedly fixed
|
|
||||||
# in new production builds of Houdini
|
|
||||||
# TODO Remove this workaround with Houdini 18.0.391+
|
|
||||||
if not os.path.exists(dirname):
|
|
||||||
self.log.info("Ensuring output directory exists: %s" %
|
|
||||||
dirname)
|
|
||||||
os.makedirs(dirname)
|
|
||||||
|
|
||||||
payload["JobInfo"].update(output_data)
|
def get_plugin_info(self):
|
||||||
|
|
||||||
self.submit(instance, payload)
|
instance = self._instance
|
||||||
|
context = instance.context
|
||||||
|
|
||||||
def submit(self, instance, payload):
|
# Output driver to render
|
||||||
|
driver = hou.node(instance.data["instance_node"])
|
||||||
|
hou_major_minor = hou.applicationVersionString().rsplit(".", 1)[0]
|
||||||
|
|
||||||
AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE",
|
plugin_info = DeadlinePluginInfo(
|
||||||
"http://localhost:8082")
|
SceneFile=context.data["currentFile"],
|
||||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
OutputDriver=driver.path(),
|
||||||
|
Version=hou_major_minor,
|
||||||
|
IgnoreInputs=True
|
||||||
|
)
|
||||||
|
|
||||||
plugin = payload["JobInfo"]["Plugin"]
|
return attr.asdict(plugin_info)
|
||||||
self.log.info("Using Render Plugin : {}".format(plugin))
|
|
||||||
|
|
||||||
self.log.info("Submitting..")
|
def process(self, instance):
|
||||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
super(HoudiniSubmitDeadline, self).process(instance)
|
||||||
|
|
||||||
# E.g. http://192.168.0.1:8082/api/jobs
|
|
||||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
|
||||||
response = requests.post(url, json=payload)
|
|
||||||
if not response.ok:
|
|
||||||
raise Exception(response.text)
|
|
||||||
|
|
||||||
|
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||||
# Store output dir for unified publisher (filesequence)
|
# Store output dir for unified publisher (filesequence)
|
||||||
output_dir = os.path.dirname(instance.data["files"][0])
|
output_dir = os.path.dirname(instance.data["files"][0])
|
||||||
instance.data["outputDir"] = output_dir
|
instance.data["outputDir"] = output_dir
|
||||||
instance.data["deadlineSubmissionJob"] = response.json()
|
instance.data["toBeRenderedOn"] = "deadline"
|
||||||
|
|
|
||||||
|
|
@ -118,11 +118,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
deadline_plugin = "OpenPype"
|
deadline_plugin = "OpenPype"
|
||||||
targets = ["local"]
|
targets = ["local"]
|
||||||
|
|
||||||
hosts = ["fusion", "max", "maya", "nuke",
|
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||||
"celaction", "aftereffects", "harmony"]
|
"celaction", "aftereffects", "harmony"]
|
||||||
|
|
||||||
families = ["render.farm", "prerender.farm",
|
families = ["render.farm", "prerender.farm",
|
||||||
"renderlayer", "imagesequence", "maxrender", "vrayscene"]
|
"renderlayer", "imagesequence",
|
||||||
|
"vrayscene", "maxrender",
|
||||||
|
"arnold_rop", "mantra_rop",
|
||||||
|
"karma_rop", "vray_rop",
|
||||||
|
"redshift_rop"]
|
||||||
|
|
||||||
aov_filter = {"maya": [r".*([Bb]eauty).*"],
|
aov_filter = {"maya": [r".*([Bb]eauty).*"],
|
||||||
"aftereffects": [r".*"], # for everything from AE
|
"aftereffects": [r".*"], # for everything from AE
|
||||||
|
|
@ -140,7 +144,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
"FTRACK_SERVER",
|
"FTRACK_SERVER",
|
||||||
"AVALON_APP_NAME",
|
"AVALON_APP_NAME",
|
||||||
"OPENPYPE_USERNAME",
|
"OPENPYPE_USERNAME",
|
||||||
"OPENPYPE_SG_USER",
|
"OPENPYPE_VERSION",
|
||||||
|
"OPENPYPE_SG_USER"
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add OpenPype version if we are running from build.
|
# Add OpenPype version if we are running from build.
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ sidebar_label: Houdini
|
||||||
- [Library Loader](artist_tools_library-loader)
|
- [Library Loader](artist_tools_library-loader)
|
||||||
|
|
||||||
## Publishing Alembic Cameras
|
## Publishing Alembic Cameras
|
||||||
You can publish baked camera in Alembic format.
|
You can publish baked camera in Alembic format.
|
||||||
|
|
||||||
Select your camera and go **OpenPype -> Create** and select **Camera (abc)**.
|
Select your camera and go **OpenPype -> Create** and select **Camera (abc)**.
|
||||||
This will create Alembic ROP in **out** with path and frame range already set. This node will have a name you've
|
This will create Alembic ROP in **out** with path and frame range already set. This node will have a name you've
|
||||||
|
|
@ -30,7 +30,7 @@ You can use any COP node and publish the image sequence generated from it. For e
|
||||||

|

|
||||||
|
|
||||||
To publish the output of the `radialblur1` go to **OpenPype -> Create** and
|
To publish the output of the `radialblur1` go to **OpenPype -> Create** and
|
||||||
select **Composite (Image Sequence)**. If you name the variant *Noise* this will create the `/out/imagesequenceNoise` Composite ROP with the frame range set.
|
select **Composite (Image Sequence)**. If you name the variant *Noise* this will create the `/out/imagesequenceNoise` Composite ROP with the frame range set.
|
||||||
|
|
||||||
When you hit **Publish** it will render image sequence from selected node.
|
When you hit **Publish** it will render image sequence from selected node.
|
||||||
|
|
||||||
|
|
@ -56,14 +56,14 @@ Now select the `output0` node and go **OpenPype -> Create** and select **Point C
|
||||||
Alembic ROP `/out/pointcacheStrange`
|
Alembic ROP `/out/pointcacheStrange`
|
||||||
|
|
||||||
## Publishing Reviews (OpenGL)
|
## Publishing Reviews (OpenGL)
|
||||||
To generate a review output from Houdini you need to create a **review** instance.
|
To generate a review output from Houdini you need to create a **review** instance.
|
||||||
Go to **OpenPype -> Create** and select **Review**.
|
Go to **OpenPype -> Create** and select **Review**.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
On create, with the **Use Selection** checkbox enabled it will set up the first
|
On create, with the **Use Selection** checkbox enabled it will set up the first
|
||||||
camera found in your selection as the camera for the OpenGL ROP node and other
|
camera found in your selection as the camera for the OpenGL ROP node and other
|
||||||
non-cameras are set in **Force Objects**. It will then render those even if
|
non-cameras are set in **Force Objects**. It will then render those even if
|
||||||
their display flag is disabled in your scene.
|
their display flag is disabled in your scene.
|
||||||
|
|
||||||
## Redshift
|
## Redshift
|
||||||
|
|
@ -71,6 +71,18 @@ their display flag is disabled in your scene.
|
||||||
This part of documentation is still work in progress.
|
This part of documentation is still work in progress.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
## Publishing Render to Deadline
|
||||||
|
Five Renderers(Arnold, Redshift, Mantra, Karma, VRay) are supported for Render Publishing.
|
||||||
|
They are named with the suffix("_ROP")
|
||||||
|
To submit render to deadline, you need to create a **Render** instance.
|
||||||
|
Go to **Openpype -> Create** and select **Publish**. Before clicking **Create** button,
|
||||||
|
you need select your preferred image rendering format. You can also enable the **Use selection** to
|
||||||
|
select your render camera.
|
||||||
|

|
||||||
|
|
||||||
|
All the render outputs are stored in the pyblish/render directory within your project path.\
|
||||||
|
For Karma-specific render, it also outputs the USD render as default.
|
||||||
|
|
||||||
## USD (experimental support)
|
## USD (experimental support)
|
||||||
### Publishing USD
|
### Publishing USD
|
||||||
You can publish your Solaris Stage as USD file.
|
You can publish your Solaris Stage as USD file.
|
||||||
|
|
|
||||||
BIN
website/docs/assets/houdini_render_publish_creator.png
Normal file
BIN
website/docs/assets/houdini_render_publish_creator.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 80 KiB |
Loading…
Add table
Add a link
Reference in a new issue