mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 05:42:15 +01:00
Merge pull request #1821 from pypeclub/feature/houdini-cb-update
This commit is contained in:
commit
0c613ab134
89 changed files with 5084 additions and 285 deletions
|
|
@ -1,17 +1,21 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import hou
|
||||
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.houdini import pipeline as houdini
|
||||
|
||||
import openpype.hosts.houdini
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
from openpype.lib import any_outdated
|
||||
from openpype.lib import (
|
||||
any_outdated
|
||||
)
|
||||
|
||||
from .lib import get_asset_fps
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
|
|
@ -22,6 +26,7 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
def install():
|
||||
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -29,19 +34,28 @@ def install():
|
|||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
avalon.on("init", on_init)
|
||||
# avalon.on("init", on_init)
|
||||
avalon.before("save", before_save)
|
||||
avalon.on("save", on_save)
|
||||
avalon.on("open", on_open)
|
||||
avalon.on("new", on_new)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("Setting default family states for loader..")
|
||||
avalon.data["familiesStateToggled"] = ["imagesequence"]
|
||||
avalon.data["familiesStateToggled"] = [
|
||||
"imagesequence",
|
||||
"review"
|
||||
]
|
||||
|
||||
# add houdini vendor packages
|
||||
hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor")
|
||||
|
||||
def on_init(*args):
|
||||
houdini.on_houdini_initialize()
|
||||
sys.path.append(hou_pythonpath)
|
||||
|
||||
# Set asset FPS for the empty scene directly after launch of Houdini
|
||||
# so it initializes into the correct scene FPS
|
||||
_set_asset_fps()
|
||||
|
||||
|
||||
def before_save(*args):
|
||||
|
|
@ -59,10 +73,18 @@ def on_save(*args):
|
|||
|
||||
def on_open(*args):
|
||||
|
||||
if not hou.isUIAvailable():
|
||||
log.debug("Batch mode detected, ignoring `on_open` callbacks..")
|
||||
return
|
||||
|
||||
avalon.logger.info("Running callback on open..")
|
||||
|
||||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
lib.validate_fps()
|
||||
|
||||
if any_outdated():
|
||||
from ..widgets import popup
|
||||
from openpype.widgets import popup
|
||||
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
|
|
@ -70,7 +92,7 @@ def on_open(*args):
|
|||
parent = hou.ui.mainQtWindow()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated content pop-up "
|
||||
"because Maya window can't be found.")
|
||||
"because Houdini window can't be found.")
|
||||
else:
|
||||
|
||||
# Show outdated pop-up
|
||||
|
|
@ -79,15 +101,52 @@ def on_open(*args):
|
|||
tool.show(parent=parent)
|
||||
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setWindowTitle("Maya scene has outdated content")
|
||||
dialog.setWindowTitle("Houdini scene has outdated content")
|
||||
dialog.setMessage("There are outdated containers in "
|
||||
"your Maya scene.")
|
||||
dialog.on_show.connect(_on_show_inventory)
|
||||
"your Houdini scene.")
|
||||
dialog.on_clicked.connect(_on_show_inventory)
|
||||
dialog.show()
|
||||
|
||||
|
||||
def on_new(_):
|
||||
"""Set project resolution and fps when create a new file"""
|
||||
avalon.logger.info("Running callback on new..")
|
||||
_set_asset_fps()
|
||||
|
||||
|
||||
def _set_asset_fps():
|
||||
"""Set Houdini scene FPS to the default required for current asset"""
|
||||
|
||||
# Set new scene fps
|
||||
fps = get_asset_fps()
|
||||
print("Setting scene FPS to %i" % fps)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
"""Toggle saver tool passthrough states on instance toggles."""
|
||||
@contextlib.contextmanager
|
||||
def main_take(no_update=True):
|
||||
"""Enter root take during context"""
|
||||
original_take = hou.takes.currentTake()
|
||||
original_update_mode = hou.updateModeSetting()
|
||||
root = hou.takes.rootTake()
|
||||
has_changed = False
|
||||
try:
|
||||
if original_take != root:
|
||||
has_changed = True
|
||||
if no_update:
|
||||
hou.setUpdateMode(hou.updateMode.Manual)
|
||||
hou.takes.setCurrentTake(root)
|
||||
yield
|
||||
finally:
|
||||
if has_changed:
|
||||
if no_update:
|
||||
hou.setUpdateMode(original_update_mode)
|
||||
hou.takes.setCurrentTake(original_take)
|
||||
|
||||
if not instance.data.get("_allowToggleBypass", True):
|
||||
return
|
||||
|
||||
nodes = instance[:]
|
||||
if not nodes:
|
||||
|
|
@ -96,8 +155,20 @@ def on_pyblish_instance_toggled(instance, new_value, old_value):
|
|||
# Assume instance node is first node
|
||||
instance_node = nodes[0]
|
||||
|
||||
if not hasattr(instance_node, "isBypassed"):
|
||||
# Likely not a node that can actually be bypassed
|
||||
log.debug("Can't bypass node: %s", instance_node.path())
|
||||
return
|
||||
|
||||
if instance_node.isBypassed() != (not old_value):
|
||||
print("%s old bypass state didn't match old instance state, "
|
||||
"updating anyway.." % instance_node.path())
|
||||
|
||||
instance_node.bypass(not new_value)
|
||||
try:
|
||||
# Go into the main take, because when in another take changing
|
||||
# the bypass state of a note cannot be done due to it being locked
|
||||
# by default.
|
||||
with main_take(no_update=True):
|
||||
instance_node.bypass(not new_value)
|
||||
except hou.PermissionError as exc:
|
||||
log.warning("%s - %s", instance_node.path(), exc)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
import uuid
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
|
||||
import hou
|
||||
|
||||
from openpype import lib
|
||||
|
||||
from openpype.api import get_asset
|
||||
from avalon import api, io
|
||||
from avalon.houdini import lib as houdini
|
||||
|
||||
import hou
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
"""Return current asset fps."""
|
||||
return get_asset()["data"].get("fps")
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
|
||||
|
|
@ -171,10 +176,10 @@ def get_output_parameter(node):
|
|||
node_type = node.type().name()
|
||||
if node_type == "geometry":
|
||||
return node.parm("sopoutput")
|
||||
|
||||
elif node_type == "alembic":
|
||||
return node.parm("filename")
|
||||
|
||||
elif node_type == "comp":
|
||||
return node.parm("copoutput")
|
||||
else:
|
||||
raise TypeError("Node type '%s' not supported" % node_type)
|
||||
|
||||
|
|
@ -205,7 +210,7 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = lib.get_asset()["data"]["fps"]
|
||||
fps = get_asset_fps()
|
||||
current_fps = hou.fps() # returns float
|
||||
|
||||
if current_fps != fps:
|
||||
|
|
@ -217,18 +222,123 @@ def validate_fps():
|
|||
if parent is None:
|
||||
pass
|
||||
else:
|
||||
dialog = popup.Popup2(parent=parent)
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setModal(True)
|
||||
dialog.setWindowTitle("Houdini scene not in line with project")
|
||||
dialog.setMessage("The FPS is out of sync, please fix it")
|
||||
dialog.setWindowTitle("Houdini scene does not match project FPS")
|
||||
dialog.setMessage("Scene %i FPS does not match project %i FPS" %
|
||||
(current_fps, fps))
|
||||
dialog.setButtonText("Fix")
|
||||
|
||||
# Set new text for button (add optional argument for the popup?)
|
||||
toggle = dialog.widgets["toggle"]
|
||||
toggle.setEnabled(False)
|
||||
dialog.on_show.connect(lambda: set_scene_fps(fps))
|
||||
# on_show is the Fix button clicked callback
|
||||
dialog.on_clicked.connect(lambda: set_scene_fps(fps))
|
||||
|
||||
dialog.show()
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def create_remote_publish_node(force=True):
|
||||
"""Function to create a remote publish node in /out
|
||||
|
||||
This is a hacked "Shell" node that does *nothing* except for triggering
|
||||
`colorbleed.lib.publish_remote()` as pre-render script.
|
||||
|
||||
All default attributes of the Shell node are hidden to the Artist to
|
||||
avoid confusion.
|
||||
|
||||
Additionally some custom attributes are added that can be collected
|
||||
by a Collector to set specific settings for the publish, e.g. whether
|
||||
to separate the jobs per instance or process in one single job.
|
||||
|
||||
"""
|
||||
|
||||
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
|
||||
|
||||
existing = hou.node("/out/REMOTE_PUBLISH")
|
||||
if existing:
|
||||
if force:
|
||||
log.warning("Removing existing '/out/REMOTE_PUBLISH' node..")
|
||||
existing.destroy()
|
||||
else:
|
||||
raise RuntimeError("Node already exists /out/REMOTE_PUBLISH. "
|
||||
"Please remove manually or set `force` to "
|
||||
"True.")
|
||||
|
||||
# Create the shell node
|
||||
out = hou.node("/out")
|
||||
node = out.createNode("shell", node_name="REMOTE_PUBLISH")
|
||||
node.moveToGoodPosition()
|
||||
|
||||
# Set color make it stand out (avalon/pyblish color)
|
||||
node.setColor(hou.Color(0.439, 0.709, 0.933))
|
||||
|
||||
# Set the pre-render script
|
||||
node.setParms({
|
||||
"prerender": cmd,
|
||||
"lprerender": "python" # command language
|
||||
})
|
||||
|
||||
# Lock the attributes to ensure artists won't easily mess things up.
|
||||
node.parm("prerender").lock(True)
|
||||
node.parm("lprerender").lock(True)
|
||||
|
||||
# Lock up the actual shell command
|
||||
command_parm = node.parm("command")
|
||||
command_parm.set("")
|
||||
command_parm.lock(True)
|
||||
shellexec_parm = node.parm("shellexec")
|
||||
shellexec_parm.set(False)
|
||||
shellexec_parm.lock(True)
|
||||
|
||||
# Get the node's parm template group so we can customize it
|
||||
template = node.parmTemplateGroup()
|
||||
|
||||
# Hide default tabs
|
||||
template.hideFolder("Shell", True)
|
||||
template.hideFolder("Scripts", True)
|
||||
|
||||
# Hide default settings
|
||||
template.hide("execute", True)
|
||||
template.hide("renderdialog", True)
|
||||
template.hide("trange", True)
|
||||
template.hide("f", True)
|
||||
template.hide("take", True)
|
||||
|
||||
# Add custom settings to this node.
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Submission Settings")
|
||||
|
||||
# Separate Jobs per Instance
|
||||
parm = hou.ToggleParmTemplate(name="separateJobPerInstance",
|
||||
label="Separate Job per Instance",
|
||||
default_value=False)
|
||||
parm_folder.addParmTemplate(parm)
|
||||
|
||||
# Add our custom Submission Settings folder
|
||||
template.append(parm_folder)
|
||||
|
||||
# Apply template back to the node
|
||||
node.setParmTemplateGroup(template)
|
||||
|
||||
|
||||
def render_rop(ropnode):
|
||||
"""Render ROP node utility for Publishing.
|
||||
|
||||
This renders a ROP node with the settings we want during Publishing.
|
||||
"""
|
||||
# Print verbose when in batch mode without UI
|
||||
verbose = not hou.isUIAvailable()
|
||||
|
||||
# Render
|
||||
try:
|
||||
ropnode.render(verbose=verbose,
|
||||
# Allow Deadline to capture completion percentage
|
||||
output_progress=verbose)
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Houdini specific Avalon/Pyblish plugin definitions."""
|
||||
import sys
|
||||
from avalon import houdini
|
||||
import six
|
||||
|
||||
import hou
|
||||
from openpype.api import PypeCreatorMixin
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, houdini.Creator):
|
||||
class OpenPypeCreatorError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Creator(PypeCreatorMixin, houdini.Creator):
|
||||
def process(self):
|
||||
try:
|
||||
# re-raise as standard Python exception so
|
||||
# Avalon can catch it
|
||||
instance = super(Creator, self).process()
|
||||
self._process(instance)
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
|
|
|||
255
openpype/hosts/houdini/api/usd.py
Normal file
255
openpype/hosts/houdini/api/usd.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
"""Houdini-specific USD Library functions."""
|
||||
|
||||
import contextlib
|
||||
|
||||
import logging
|
||||
from Qt import QtCore, QtGui
|
||||
from avalon.tools.widgets import AssetWidget
|
||||
from avalon import style
|
||||
|
||||
from pxr import Sdf
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pick_asset(node):
|
||||
"""Show a user interface to select an Asset in the project
|
||||
|
||||
When double clicking an asset it will set the Asset value in the
|
||||
'asset' parameter.
|
||||
|
||||
"""
|
||||
|
||||
pos = QtGui.QCursor.pos()
|
||||
|
||||
parm = node.parm("asset_name")
|
||||
if not parm:
|
||||
log.error("Node has no 'asset' parameter: %s", node)
|
||||
return
|
||||
|
||||
# Construct the AssetWidget as a frameless popup so it automatically
|
||||
# closes when clicked outside of it.
|
||||
global tool
|
||||
tool = AssetWidget(silo_creatable=False)
|
||||
tool.setContentsMargins(5, 5, 5, 5)
|
||||
tool.setWindowTitle("Pick Asset")
|
||||
tool.setStyleSheet(style.load_stylesheet())
|
||||
tool.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup)
|
||||
tool.refresh()
|
||||
|
||||
# Select the current asset if there is any
|
||||
name = parm.eval()
|
||||
if name:
|
||||
from avalon import io
|
||||
|
||||
db_asset = io.find_one({"name": name, "type": "asset"})
|
||||
if db_asset:
|
||||
silo = db_asset.get("silo")
|
||||
if silo:
|
||||
tool.set_silo(silo)
|
||||
tool.select_assets([name], expand=True)
|
||||
|
||||
# Show cursor (top right of window) near cursor
|
||||
tool.resize(250, 400)
|
||||
tool.move(tool.mapFromGlobal(pos) - QtCore.QPoint(tool.width(), 0))
|
||||
|
||||
def set_parameter_callback(index):
|
||||
name = index.data(tool.model.DocumentRole)["name"]
|
||||
parm.set(name)
|
||||
tool.close()
|
||||
|
||||
tool.view.doubleClicked.connect(set_parameter_callback)
|
||||
tool.show()
|
||||
|
||||
|
||||
def add_usd_output_processor(ropnode, processor):
|
||||
"""Add USD Output Processor to USD Rop node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processor (str): The output processor name. This is the basename of
|
||||
the python file that contains the Houdini USD Output Processor.
|
||||
|
||||
"""
|
||||
|
||||
import loputils
|
||||
|
||||
loputils.handleOutputProcessorAdd(
|
||||
{
|
||||
"node": ropnode,
|
||||
"parm": ropnode.parm("outputprocessors"),
|
||||
"script_value": processor,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def remove_usd_output_processor(ropnode, processor):
|
||||
"""Removes USD Output Processor from USD Rop node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processor (str): The output processor name. This is the basename of
|
||||
the python file that contains the Houdini USD Output Processor.
|
||||
|
||||
"""
|
||||
import loputils
|
||||
|
||||
parm = ropnode.parm(processor + "_remove")
|
||||
if not parm:
|
||||
raise RuntimeError(
|
||||
"Output Processor %s does not "
|
||||
"exist on %s" % (processor, ropnode.name())
|
||||
)
|
||||
|
||||
loputils.handleOutputProcessorRemove({"node": ropnode, "parm": parm})
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def outputprocessors(ropnode, processors=tuple(), disable_all_others=True):
|
||||
"""Context manager to temporarily add Output Processors to USD ROP node.
|
||||
|
||||
Args:
|
||||
ropnode (hou.RopNode): The USD Rop node.
|
||||
processors (tuple or list): The processors to add.
|
||||
disable_all_others (bool, Optional): Whether to disable all
|
||||
output processors currently on the ROP node that are not in the
|
||||
`processors` list passed to this function.
|
||||
|
||||
"""
|
||||
# TODO: Add support for forcing the correct Order of the processors
|
||||
|
||||
original = []
|
||||
prefix = "enableoutputprocessor_"
|
||||
processor_parms = ropnode.globParms(prefix + "*")
|
||||
for parm in processor_parms:
|
||||
original.append((parm, parm.eval()))
|
||||
|
||||
if disable_all_others:
|
||||
for parm in processor_parms:
|
||||
parm.set(False)
|
||||
|
||||
added = []
|
||||
for processor in processors:
|
||||
|
||||
parm = ropnode.parm(prefix + processor)
|
||||
if parm:
|
||||
# If processor already exists, just enable it
|
||||
parm.set(True)
|
||||
|
||||
else:
|
||||
# Else add the new processor
|
||||
add_usd_output_processor(ropnode, processor)
|
||||
added.append(processor)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
||||
# Remove newly added processors
|
||||
for processor in added:
|
||||
remove_usd_output_processor(ropnode, processor)
|
||||
|
||||
# Revert to original values
|
||||
for parm, value in original:
|
||||
if parm:
|
||||
parm.set(value)
|
||||
|
||||
|
||||
def get_usd_rop_loppath(node):
|
||||
|
||||
# Get sop path
|
||||
node_type = node.type().name()
|
||||
if node_type == "usd":
|
||||
return node.parm("loppath").evalAsNode()
|
||||
|
||||
elif node_type in {"usd_rop", "usdrender_rop"}:
|
||||
# Inside Solaris e.g. /stage (not in ROP context)
|
||||
# When incoming connection is present it takes it directly
|
||||
inputs = node.inputs()
|
||||
if inputs:
|
||||
return inputs[0]
|
||||
else:
|
||||
return node.parm("loppath").evalAsNode()
|
||||
|
||||
|
||||
def get_layer_save_path(layer):
|
||||
"""Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer.
|
||||
|
||||
Args:
|
||||
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
|
||||
|
||||
Returns:
|
||||
str or None: Path to save to when data exists.
|
||||
|
||||
"""
|
||||
hou_layer_info = layer.rootPrims.get("HoudiniLayerInfo")
|
||||
if not hou_layer_info:
|
||||
return
|
||||
|
||||
save_path = hou_layer_info.customData.get("HoudiniSavePath", None)
|
||||
if save_path:
|
||||
# Unfortunately this doesn't actually resolve the full absolute path
|
||||
return layer.ComputeAbsolutePath(save_path)
|
||||
|
||||
|
||||
def get_referenced_layers(layer):
|
||||
"""Return SdfLayers for all external references of the current layer
|
||||
|
||||
Args:
|
||||
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
|
||||
|
||||
Returns:
|
||||
list: List of pxr.Sdf.Layer that are external references to this layer
|
||||
|
||||
"""
|
||||
|
||||
layers = []
|
||||
for layer_id in layer.GetExternalReferences():
|
||||
layer = Sdf.Layer.Find(layer_id)
|
||||
if not layer:
|
||||
# A file may not be in memory and is
|
||||
# referenced from disk. As such it cannot
|
||||
# be found. We will ignore those layers.
|
||||
continue
|
||||
|
||||
layers.append(layer)
|
||||
|
||||
return layers
|
||||
|
||||
|
||||
def iter_layer_recursive(layer):
|
||||
"""Recursively iterate all 'external' referenced layers"""
|
||||
|
||||
layers = get_referenced_layers(layer)
|
||||
traversed = set(layers) # Avoid recursion to itself (if even possible)
|
||||
traverse = list(layers)
|
||||
for layer in traverse:
|
||||
|
||||
# Include children layers (recursion)
|
||||
children_layers = get_referenced_layers(layer)
|
||||
children_layers = [x for x in children_layers if x not in traversed]
|
||||
traverse.extend(children_layers)
|
||||
traversed.update(children_layers)
|
||||
|
||||
yield layer
|
||||
|
||||
|
||||
def get_configured_save_layers(usd_rop):
|
||||
|
||||
lop_node = get_usd_rop_loppath(usd_rop)
|
||||
stage = lop_node.stage(apply_viewport_overrides=False)
|
||||
if not stage:
|
||||
raise RuntimeError(
|
||||
"No valid USD stage for ROP node: " "%s" % usd_rop.path()
|
||||
)
|
||||
|
||||
root_layer = stage.GetRootLayer()
|
||||
|
||||
save_layers = []
|
||||
for layer in iter_layer_recursive(root_layer):
|
||||
save_path = get_layer_save_path(layer)
|
||||
if save_path is not None:
|
||||
save_layers.append(layer)
|
||||
|
||||
return save_layers
|
||||
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
"""Set current dir to workdir.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["houdini"]
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
||||
os.chdir(workdir)
|
||||
|
|
@ -18,28 +18,29 @@ class CreateAlembicCamera(plugin.Creator):
|
|||
# Set node type to create for output
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAlembicCamera, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
"use_sop_path": False
|
||||
"use_sop_path": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
path = node.path()
|
||||
|
||||
# Split the node path into the first root and the remainder
|
||||
# So we can set the root and objects parameters correctly
|
||||
_, root, remainder = path.split("/", 2)
|
||||
parms.update({
|
||||
"root": "/" + root,
|
||||
"objects": remainder
|
||||
})
|
||||
parms.update({"root": "/" + root, "objects": remainder})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock the Use Sop Path setting so the
|
||||
# user doesn't accidentally enable it.
|
||||
instance.parm("use_sop_path").lock(True)
|
||||
instance.parm("trange").set(1)
|
||||
|
|
|
|||
44
openpype/hosts/houdini/plugins/create/create_composite.py
Normal file
44
openpype/hosts/houdini/plugins/create/create_composite.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateCompositeSequence(plugin.Creator):
|
||||
"""Composite ROP to Image Sequence"""
|
||||
|
||||
label = "Composite (Image Sequence)"
|
||||
family = "imagesequence"
|
||||
icon = "gears"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateCompositeSequence, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
# Type of ROP node to create
|
||||
self.data.update({"node_type": "comp"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"coppath": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
try:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
except AttributeError:
|
||||
# missing lock pattern
|
||||
self.log.debug(
|
||||
"missing lock pattern {}".format(name))
|
||||
|
|
@ -17,21 +17,29 @@ class CreatePointCache(plugin.Creator):
|
|||
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreatePointCache, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
parms = {"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name}
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"facesets": 0, # No face sets (by default exclude them)
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
instance.parm("trange").set(1)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
|
|
|
|||
70
openpype/hosts/houdini/plugins/create/create_redshift_rop.py
Normal file
70
openpype/hosts/houdini/plugins/create/create_redshift_rop.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import hou
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateRedshiftROP(plugin.Creator):
|
||||
"""Redshift ROP"""
|
||||
|
||||
label = "Redshift ROP"
|
||||
family = "redshift_rop"
|
||||
icon = "magic"
|
||||
defaults = ["master"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateRedshiftROP, self).__init__(*args, **kwargs)
|
||||
|
||||
# Clear the family prefix from the subset
|
||||
subset = self.data["subset"]
|
||||
subset_no_prefix = subset[len(self.family):]
|
||||
subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:]
|
||||
self.data["subset"] = subset_no_prefix
|
||||
|
||||
# Add chunk size attribute
|
||||
self.data["chunkSize"] = 10
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "Redshift_ROP"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
basename = instance.name()
|
||||
instance.setName(basename + "_ROP", unique_name=True)
|
||||
|
||||
# Also create the linked Redshift IPR Rop
|
||||
try:
|
||||
ipr_rop = self.parent.createNode(
|
||||
"Redshift_IPR", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise Exception(("Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"))
|
||||
|
||||
# Move it to directly under the Redshift ROP
|
||||
ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1))
|
||||
|
||||
# Set the linked rop to the Redshift ROP
|
||||
ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance))
|
||||
|
||||
prefix = '${HIP}/render/${HIPNAME}/`chs("subset")`.${AOV}.$F4.exr'
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Redshift ROP settings
|
||||
"RS_outputFileNamePrefix": prefix,
|
||||
"RS_outputMultilayerMode": 0, # no multi-layered exr
|
||||
"RS_outputBeautyAOVSuffix": "beauty",
|
||||
}
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
47
openpype/hosts/houdini/plugins/create/create_usd.py
Normal file
47
openpype/hosts/houdini/plugins/create/create_usd.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateUSD(plugin.Creator):
|
||||
"""Universal Scene Description"""
|
||||
|
||||
label = "USD (experimental)"
|
||||
family = "usd"
|
||||
icon = "gears"
|
||||
enabled = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSD, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "usd"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"lopoutput": "$HIP/pyblish/%s.usd" % self.name,
|
||||
"enableoutputprocessor_simplerelativepaths": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = [
|
||||
"fileperframe",
|
||||
# Lock some Avalon attributes
|
||||
"family",
|
||||
"id",
|
||||
]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
42
openpype/hosts/houdini/plugins/create/create_usdrender.py
Normal file
42
openpype/hosts/houdini/plugins/create/create_usdrender.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import hou
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateUSDRender(plugin.Creator):
|
||||
"""USD Render ROP in /stage"""
|
||||
|
||||
label = "USD Render (experimental)"
|
||||
family = "usdrender"
|
||||
icon = "magic"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSDRender, self).__init__(*args, **kwargs)
|
||||
|
||||
self.parent = hou.node("/stage")
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({"node_type": "usdrender"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1
|
||||
}
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
|
|
@ -18,11 +18,18 @@ class CreateVDBCache(plugin.Creator):
|
|||
# Set node type to create for output
|
||||
self.data["node_type"] = "geometry"
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateVDBCache, self).process()
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
parms = {"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"initsim": True}
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"initsim": True,
|
||||
"trange": 1
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
|
|
|
|||
86
openpype/hosts/houdini/plugins/load/actions.py
Normal file
86
openpype/hosts/houdini/plugins/load/actions.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
"""A module containing generic loader actions that will display in the Loader.
|
||||
|
||||
"""
|
||||
|
||||
from avalon import api
|
||||
|
||||
|
||||
class SetFrameRangeLoader(api.Loader):
|
||||
"""Set Houdini frame range"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
"camera",
|
||||
"pointcache",
|
||||
"vdbcache",
|
||||
"usd",
|
||||
]
|
||||
representations = ["abc", "vdb", "usd"]
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
import hou
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print(
|
||||
"Skipping setting frame range because start or "
|
||||
"end frame data is missing.."
|
||||
)
|
||||
return
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(api.Loader):
|
||||
"""Set Maya frame range including pre- and post-handles"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
"camera",
|
||||
"pointcache",
|
||||
"vdbcache",
|
||||
"usd",
|
||||
]
|
||||
representations = ["abc", "vdb", "usd"]
|
||||
|
||||
label = "Set frame range (with handles)"
|
||||
order = 12
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
import hou
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print(
|
||||
"Skipping setting frame range because start or "
|
||||
"end frame data is missing.."
|
||||
)
|
||||
return
|
||||
|
||||
# Include handles
|
||||
handles = version_data.get("handles", 0)
|
||||
start -= handles
|
||||
end += handles
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
|
@ -6,9 +6,7 @@ from avalon.houdini import pipeline, lib
|
|||
class AbcLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["model",
|
||||
"animation",
|
||||
"pointcache"]
|
||||
families = ["model", "animation", "pointcache", "gpuCache"]
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
|
|
@ -68,8 +66,9 @@ class AbcLoader(api.Loader):
|
|||
null = container.createNode("null", node_name="OUT".format(name))
|
||||
null.setInput(0, normal_node)
|
||||
|
||||
# Set display on last node
|
||||
null.setDisplayFlag(True)
|
||||
# Ensure display flag is on the Alembic input node and not on the OUT
|
||||
# node to optimize "debug" displaying in the viewport.
|
||||
alembic.setDisplayFlag(True)
|
||||
|
||||
# Set new position for unpack node else it gets cluttered
|
||||
nodes = [container, alembic, unpack, normal_node, null]
|
||||
|
|
@ -78,18 +77,22 @@ class AbcLoader(api.Loader):
|
|||
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__)
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
alembic_node = next(n for n in node.children() if
|
||||
n.type().name() == "alembic")
|
||||
alembic_node = next(
|
||||
n for n in node.children() if n.type().name() == "alembic"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `alembic`")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,8 +1,79 @@
|
|||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")'
|
||||
'.alembicGetCameraDict')
|
||||
|
||||
|
||||
def transfer_non_default_values(src, dest, ignore=None):
|
||||
"""Copy parm from src to dest.
|
||||
|
||||
Because the Alembic Archive rebuilds the entire node
|
||||
hierarchy on triggering "Build Hierarchy" we want to
|
||||
preserve any local tweaks made by the user on the camera
|
||||
for ease of use. That could be a background image, a
|
||||
resolution change or even Redshift camera parameters.
|
||||
|
||||
We try to do so by finding all Parms that exist on both
|
||||
source and destination node, include only those that both
|
||||
are not at their default value, they must be visible,
|
||||
we exclude those that have the special "alembic archive"
|
||||
channel expression and ignore certain Parm types.
|
||||
|
||||
"""
|
||||
import hou
|
||||
|
||||
src.updateParmStates()
|
||||
|
||||
for parm in src.allParms():
|
||||
|
||||
if ignore and parm.name() in ignore:
|
||||
continue
|
||||
|
||||
# If destination parm does not exist, ignore..
|
||||
dest_parm = dest.parm(parm.name())
|
||||
if not dest_parm:
|
||||
continue
|
||||
|
||||
# Ignore values that are currently at default
|
||||
if parm.isAtDefault() and dest_parm.isAtDefault():
|
||||
continue
|
||||
|
||||
if not parm.isVisible():
|
||||
# Ignore hidden parameters, assume they
|
||||
# are implementation details
|
||||
continue
|
||||
|
||||
expression = None
|
||||
try:
|
||||
expression = parm.expression()
|
||||
except hou.OperationFailed:
|
||||
# No expression present
|
||||
pass
|
||||
|
||||
if expression is not None and ARCHIVE_EXPRESSION in expression:
|
||||
# Assume it's part of the automated connections that the
|
||||
# Alembic Archive makes on loading of the camera and thus we do
|
||||
# not want to transfer the expression
|
||||
continue
|
||||
|
||||
# Ignore folders, separators, etc.
|
||||
ignore_types = {
|
||||
hou.parmTemplateType.Toggle,
|
||||
hou.parmTemplateType.Menu,
|
||||
hou.parmTemplateType.Button,
|
||||
hou.parmTemplateType.FolderSet,
|
||||
hou.parmTemplateType.Separator,
|
||||
hou.parmTemplateType.Label,
|
||||
}
|
||||
if parm.parmTemplate().type() in ignore_types:
|
||||
continue
|
||||
|
||||
print("Preserving attribute: %s" % parm.name())
|
||||
dest_parm.setFromParm(parm)
|
||||
|
||||
|
||||
class CameraLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
|
|
@ -30,7 +101,7 @@ class CameraLoader(api.Loader):
|
|||
counter = 1
|
||||
asset_name = context["asset"]["name"]
|
||||
|
||||
namespace = namespace if namespace else asset_name
|
||||
namespace = namespace or asset_name
|
||||
formatted = "{}_{}".format(namespace, name) if namespace else name
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
|
||||
|
|
@ -59,7 +130,8 @@ class CameraLoader(api.Loader):
|
|||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__)
|
||||
self.__class__.__name__,
|
||||
suffix="")
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
|
|
@ -73,14 +145,40 @@ class CameraLoader(api.Loader):
|
|||
node.setParms({"fileName": file_path,
|
||||
"representation": str(representation["_id"])})
|
||||
|
||||
# Store the cam temporarily next to the Alembic Archive
|
||||
# so that we can preserve parm values the user set on it
|
||||
# after build hierarchy was triggered.
|
||||
old_camera = self._get_camera(node)
|
||||
temp_camera = old_camera.copyTo(node.parent())
|
||||
|
||||
# Rebuild
|
||||
node.parm("buildHierarchy").pressButton()
|
||||
|
||||
# Apply values to the new camera
|
||||
new_camera = self._get_camera(node)
|
||||
transfer_non_default_values(temp_camera,
|
||||
new_camera,
|
||||
# The hidden uniform scale attribute
|
||||
# gets a default connection to
|
||||
# "icon_scale" just skip that completely
|
||||
ignore={"scale"})
|
||||
|
||||
temp_camera.destroy()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def _get_camera(self, node):
|
||||
import hou
|
||||
cameras = node.recursiveGlob("*",
|
||||
filter=hou.nodeTypeFilter.ObjCamera,
|
||||
include_subnets=False)
|
||||
|
||||
assert len(cameras) == 1, "Camera instance must have only one camera"
|
||||
return cameras[0]
|
||||
|
||||
def create_and_connect(self, node, node_type, name=None):
|
||||
"""Create a node within a node which and connect it to the input
|
||||
|
||||
|
|
@ -93,27 +191,10 @@ class CameraLoader(api.Loader):
|
|||
hou.Node
|
||||
|
||||
"""
|
||||
if name:
|
||||
new_node = node.createNode(node_type, node_name=name)
|
||||
else:
|
||||
new_node = node.createNode(node_type)
|
||||
|
||||
import hou
|
||||
|
||||
try:
|
||||
|
||||
if name:
|
||||
new_node = node.createNode(node_type, node_name=name)
|
||||
else:
|
||||
new_node = node.createNode(node_type)
|
||||
|
||||
new_node.moveToGoodPosition()
|
||||
|
||||
try:
|
||||
input_node = next(i for i in node.allItems() if
|
||||
isinstance(i, hou.SubnetIndirectInput))
|
||||
except StopIteration:
|
||||
return new_node
|
||||
|
||||
new_node.setInput(0, input_node)
|
||||
return new_node
|
||||
|
||||
except Exception:
|
||||
raise RuntimeError("Could not created node type `%s` in node `%s`"
|
||||
% (node_type, node))
|
||||
new_node.moveToGoodPosition()
|
||||
return new_node
|
||||
|
|
|
|||
123
openpype/hosts/houdini/plugins/load/load_image.py
Normal file
123
openpype/hosts/houdini/plugins/load/load_image.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
import os
|
||||
|
||||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def get_image_avalon_container():
|
||||
"""The COP2 files must be in a COP2 network.
|
||||
|
||||
So we maintain a single entry point within AVALON_CONTAINERS,
|
||||
just for ease of use.
|
||||
|
||||
"""
|
||||
|
||||
path = pipeline.AVALON_CONTAINERS
|
||||
avalon_container = hou.node(path)
|
||||
if not avalon_container:
|
||||
# Let's create avalon container secretly
|
||||
# but make sure the pipeline still is built the
|
||||
# way we anticipate it was built, asserting it.
|
||||
assert path == "/obj/AVALON_CONTAINERS"
|
||||
|
||||
parent = hou.node("/obj")
|
||||
avalon_container = parent.createNode(
|
||||
"subnet", node_name="AVALON_CONTAINERS"
|
||||
)
|
||||
|
||||
image_container = hou.node(path + "/IMAGES")
|
||||
if not image_container:
|
||||
image_container = avalon_container.createNode(
|
||||
"cop2net", node_name="IMAGES"
|
||||
)
|
||||
image_container.moveToGoodPosition()
|
||||
|
||||
return image_container
|
||||
|
||||
|
||||
class ImageLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["colorbleed.imagesequence"]
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
|
||||
# Get the root node
|
||||
parent = get_image_avalon_container()
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
node = parent.createNode("file", node_name=node_name)
|
||||
node.moveToGoodPosition()
|
||||
|
||||
node.setParms({"filename1": file_path})
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(node, data)
|
||||
|
||||
return node
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filename1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Let's clean up the IMAGES COP2 network
|
||||
# if it ends up being empty and we deleted
|
||||
# the last file node. Store the parent
|
||||
# before we delete the node.
|
||||
parent = node.parent()
|
||||
|
||||
node.destroy()
|
||||
|
||||
if not parent.children():
|
||||
parent.destroy()
|
||||
|
||||
def _get_file_sequence(self, root):
|
||||
files = sorted(os.listdir(root))
|
||||
|
||||
first_fname = files[0]
|
||||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
80
openpype/hosts/houdini/plugins/load/load_usd_layer.py
Normal file
80
openpype/hosts/houdini/plugins/load/load_usd_layer.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
class USDSublayerLoader(api.Loader):
|
||||
"""Sublayer USD file in Solaris"""
|
||||
|
||||
families = [
|
||||
"colorbleed.usd",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"usdCamera",
|
||||
]
|
||||
label = "Sublayer USD"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
|
||||
order = 1
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
stage = hou.node("/stage")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create USD reference
|
||||
container = stage.createNode("sublayer", node_name=node_name)
|
||||
container.setParms({"filepath1": file_path})
|
||||
container.moveToGoodPosition()
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(container, data)
|
||||
|
||||
return container
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filepath1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
# Reload files
|
||||
node.parm("reload").pressButton()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
80
openpype/hosts/houdini/plugins/load/load_usd_reference.py
Normal file
80
openpype/hosts/houdini/plugins/load/load_usd_reference.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from avalon import api
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
class USDReferenceLoader(api.Loader):
|
||||
"""Reference USD file in Solaris"""
|
||||
|
||||
families = [
|
||||
"colorbleed.usd",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"usdCamera",
|
||||
]
|
||||
label = "Reference USD"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc", "abc"]
|
||||
order = -8
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
stage = hou.node("/stage")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create USD reference
|
||||
container = stage.createNode("reference", node_name=node_name)
|
||||
container.setParms({"filepath1": file_path})
|
||||
container.moveToGoodPosition()
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
"schema": "avalon-core:container-2.0",
|
||||
"id": pipeline.AVALON_CONTAINER_ID,
|
||||
"name": node_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
}
|
||||
|
||||
# todo: add folder="Avalon"
|
||||
lib.imprint(container, data)
|
||||
|
||||
return container
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filepath1": file_path,
|
||||
"representation": str(representation["_id"]),
|
||||
}
|
||||
)
|
||||
|
||||
# Reload files
|
||||
node.parm("reload").pressButton()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
110
openpype/hosts/houdini/plugins/load/load_vdb.py
Normal file
110
openpype/hosts/houdini/plugins/load/load_vdb.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
import os
|
||||
import re
|
||||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline
|
||||
|
||||
|
||||
class VdbLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["vdbcache"]
|
||||
label = "Load VDB"
|
||||
representations = ["vdb"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import hou
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["asset"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a new geo node
|
||||
container = obj.createNode("geo", node_name=node_name)
|
||||
|
||||
# Remove the file node, it only loads static meshes
|
||||
# Houdini 17 has removed the file node from the geo node
|
||||
file_node = container.node("file1")
|
||||
if file_node:
|
||||
file_node.destroy()
|
||||
|
||||
# Explicitly create a file node
|
||||
file_node = container.createNode("file", node_name=node_name)
|
||||
file_node.setParms({"file": self.format_path(self.fname)})
|
||||
|
||||
# Set display on last node
|
||||
file_node.setDisplayFlag(True)
|
||||
|
||||
nodes = [container, file_node]
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def format_path(self, path):
|
||||
"""Format file path correctly for single vdb or vdb sequence."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
# The path is either a single file or sequence in a folder.
|
||||
is_single_file = os.path.isfile(path)
|
||||
if is_single_file:
|
||||
filename = path
|
||||
else:
|
||||
# The path points to the publish .vdb sequence folder so we
|
||||
# find the first file in there that ends with .vdb
|
||||
files = sorted(os.listdir(path))
|
||||
first = next((x for x in files if x.endswith(".vdb")), None)
|
||||
if first is None:
|
||||
raise RuntimeError(
|
||||
"Couldn't find first .vdb file of "
|
||||
"sequence in: %s" % path
|
||||
)
|
||||
|
||||
# Set <frame>.vdb to $F.vdb
|
||||
first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first)
|
||||
|
||||
filename = os.path.join(path, first)
|
||||
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
return filename
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
file_node = next(
|
||||
n for n in node.children() if n.type().name() == "file"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `alembic`")
|
||||
return
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = self.format_path(file_path)
|
||||
|
||||
file_node.setParms({"fileName": file_path})
|
||||
|
||||
# Update attribute
|
||||
node.setParms({"representation": str(representation["_id"])})
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
43
openpype/hosts/houdini/plugins/load/show_usdview.py
Normal file
43
openpype/hosts/houdini/plugins/load/show_usdview.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class ShowInUsdview(api.Loader):
|
||||
"""Open USD file in usdview"""
|
||||
|
||||
families = ["colorbleed.usd"]
|
||||
label = "Show in usdview"
|
||||
representations = ["usd", "usda", "usdlc", "usdnc"]
|
||||
order = 10
|
||||
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import avalon.lib as lib
|
||||
|
||||
usdview = lib.which("usdview")
|
||||
|
||||
filepath = os.path.normpath(self.fname)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
self.log.error("File does not exist: %s" % filepath)
|
||||
return
|
||||
|
||||
self.log.info("Start houdini variant of usdview...")
|
||||
|
||||
# For now avoid some pipeline environment variables that initialize
|
||||
# Avalon in Houdini as it is redundant for usdview and slows boot time
|
||||
env = os.environ.copy()
|
||||
env.pop("PYTHONPATH", None)
|
||||
env.pop("HOUDINI_SCRIPT_PATH", None)
|
||||
env.pop("HOUDINI_MENU_PATH", None)
|
||||
|
||||
# Force string to avoid unicode issues
|
||||
env = {str(key): str(value) for key, value in env.items()}
|
||||
|
||||
subprocess.Popen([usdview, filepath, "--renderer", "GL"], env=env)
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
||||
"""Collect default active state for instance from its node bypass state.
|
||||
|
||||
This is done at the very end of the CollectorOrder so that any required
|
||||
collecting of data iterating over instances (with InstancePlugin) will
|
||||
actually collect the data for when the user enables the state in the UI.
|
||||
Otherwise potentially required data might have skipped collecting.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.299
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Instance Active State"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Must have node to check for bypass state
|
||||
if len(instance) == 0:
|
||||
return
|
||||
|
||||
# Check bypass state and reverse
|
||||
node = instance[0]
|
||||
active = not node.isBypassed()
|
||||
|
||||
# Set instance active state
|
||||
instance.data.update(
|
||||
{
|
||||
"active": active,
|
||||
# temporarily translation of `active` to `publish` till
|
||||
# issue has been resolved:
|
||||
# https://github.com/pyblish/pyblish-base/issues/307
|
||||
"publish": active,
|
||||
}
|
||||
)
|
||||
|
|
@ -9,7 +9,7 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Houdini Current File"
|
||||
hosts = ['houdini']
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
|
|
@ -27,8 +27,10 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
|||
# could have existed already. We will allow it if the file exists,
|
||||
# but show a warning for this edge case to clarify the potential
|
||||
# false positive.
|
||||
self.log.warning("Current file is 'untitled.hip' and we are "
|
||||
"unable to detect whether the current scene is "
|
||||
"saved correctly.")
|
||||
self.log.warning(
|
||||
"Current file is 'untitled.hip' and we are "
|
||||
"unable to detect whether the current scene is "
|
||||
"saved correctly."
|
||||
)
|
||||
|
||||
context.data['currentFile'] = filepath
|
||||
context.data["currentFile"] = filepath
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ from openpype.hosts.houdini.api import lib
|
|||
|
||||
|
||||
class CollectFrames(pyblish.api.InstancePlugin):
|
||||
"""Collect all frames which would be a resukl"""
|
||||
"""Collect all frames which would be saved from the ROP nodes"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Frames"
|
||||
families = ["vdbcache"]
|
||||
families = ["vdbcache", "imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -19,10 +19,17 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
output_parm = lib.get_output_parameter(ropnode)
|
||||
output = output_parm.eval()
|
||||
|
||||
_, ext = os.path.splitext(output)
|
||||
file_name = os.path.basename(output)
|
||||
match = re.match("(\w+)\.(\d+)\.vdb", file_name)
|
||||
result = file_name
|
||||
|
||||
# Get the filename pattern match from the output
|
||||
# path so we can compute all frames that would
|
||||
# come out from rendering the ROP node if there
|
||||
# is a frame pattern in the name
|
||||
pattern = r"\w+\.(\d+)" + re.escape(ext)
|
||||
match = re.match(pattern, file_name)
|
||||
|
||||
start_frame = instance.data.get("frameStart", None)
|
||||
end_frame = instance.data.get("frameEnd", None)
|
||||
|
||||
|
|
@ -31,10 +38,12 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
# Check if frames are bigger than 1 (file collection)
|
||||
# override the result
|
||||
if end_frame - start_frame > 1:
|
||||
result = self.create_file_list(match,
|
||||
int(start_frame),
|
||||
int(end_frame))
|
||||
result = self.create_file_list(
|
||||
match, int(start_frame), int(end_frame)
|
||||
)
|
||||
|
||||
# todo: `frames` currently conflicts with "explicit frames" for a
|
||||
# for a custom frame list. So this should be refactored.
|
||||
instance.data.update({"frames": result})
|
||||
|
||||
def create_file_list(self, match, start_frame, end_frame):
|
||||
|
|
@ -50,17 +59,24 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
# Get the padding length
|
||||
frame = match.group(1)
|
||||
padding = len(frame)
|
||||
|
||||
# Get the parts of the filename surrounding the frame number
|
||||
# so we can put our own frame numbers in.
|
||||
span = match.span(1)
|
||||
prefix = match.string[: span[0]]
|
||||
suffix = match.string[span[1]:]
|
||||
|
||||
# Generate filenames for all frames
|
||||
result = []
|
||||
for i in range(start_frame, end_frame + 1):
|
||||
|
||||
padding = len(match.group(2))
|
||||
name = match.group(1)
|
||||
padding_format = "{number:0{width}d}"
|
||||
# Format frame number by the padding amount
|
||||
str_frame = "{number:0{width}d}".format(number=i, width=padding)
|
||||
|
||||
count = start_frame
|
||||
while count <= end_frame:
|
||||
str_count = padding_format.format(number=count, width=padding)
|
||||
file_name = "{}.{}.vdb".format(name, str_count)
|
||||
file_name = prefix + str_frame + suffix
|
||||
result.append(file_name)
|
||||
count += 1
|
||||
|
||||
return result
|
||||
|
|
|
|||
120
openpype/hosts/houdini/plugins/publish/collect_inputs.py
Normal file
120
openpype/hosts/houdini/plugins/publish/collect_inputs.py
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import avalon.api as api
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def collect_input_containers(nodes):
|
||||
"""Collect containers that contain any of the node in `nodes`.
|
||||
|
||||
This will return any loaded Avalon container that contains at least one of
|
||||
the nodes. As such, the Avalon container is an input for it. Or in short,
|
||||
there are member nodes of that container.
|
||||
|
||||
Returns:
|
||||
list: Input avalon containers
|
||||
|
||||
"""
|
||||
|
||||
# Lookup by node ids
|
||||
lookup = frozenset(nodes)
|
||||
|
||||
containers = []
|
||||
host = api.registered_host()
|
||||
for container in host.ls():
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Usually the loaded containers don't have any complex references
|
||||
# and the contained children should be all we need. So we disregard
|
||||
# checking for .references() on the nodes.
|
||||
members = set(node.allSubChildren())
|
||||
members.add(node) # include the node itself
|
||||
|
||||
# If there's an intersection
|
||||
if not lookup.isdisjoint(members):
|
||||
containers.append(container)
|
||||
|
||||
return containers
|
||||
|
||||
|
||||
def iter_upstream(node):
|
||||
"""Yields all upstream inputs for the current node.
|
||||
|
||||
This includes all `node.inputAncestors()` but also traverses through all
|
||||
`node.references()` for the node itself and for any of the upstream nodes.
|
||||
This method has no max-depth and will collect all upstream inputs.
|
||||
|
||||
Yields:
|
||||
hou.Node: The upstream nodes, including references.
|
||||
|
||||
"""
|
||||
|
||||
upstream = node.inputAncestors(
|
||||
include_ref_inputs=True, follow_subnets=True
|
||||
)
|
||||
|
||||
# Initialize process queue with the node's ancestors itself
|
||||
queue = list(upstream)
|
||||
collected = set(upstream)
|
||||
|
||||
# Traverse upstream references for all nodes and yield them as we
|
||||
# process the queue.
|
||||
while queue:
|
||||
upstream_node = queue.pop()
|
||||
yield upstream_node
|
||||
|
||||
# Find its references that are not collected yet.
|
||||
references = upstream_node.references()
|
||||
references = [n for n in references if n not in collected]
|
||||
|
||||
queue.extend(references)
|
||||
collected.update(references)
|
||||
|
||||
# Include the references' ancestors that have not been collected yet.
|
||||
for reference in references:
|
||||
ancestors = reference.inputAncestors(
|
||||
include_ref_inputs=True, follow_subnets=True
|
||||
)
|
||||
ancestors = [n for n in ancestors if n not in collected]
|
||||
|
||||
queue.extend(ancestors)
|
||||
collected.update(ancestors)
|
||||
|
||||
|
||||
class CollectUpstreamInputs(pyblish.api.InstancePlugin):
|
||||
"""Collect source input containers used for this publish.
|
||||
|
||||
This will include `inputs` data of which loaded publishes were used in the
|
||||
generation of this publish. This leaves an upstream trace to what was used
|
||||
as input.
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Inputs"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, instance):
|
||||
# We can't get the "inputAncestors" directly from the ROP
|
||||
# node, so we find the related output node (set in SOP/COP path)
|
||||
# and include that together with its ancestors
|
||||
output = instance.data.get("output_node")
|
||||
|
||||
if output is None:
|
||||
# If no valid output node is set then ignore it as validation
|
||||
# will be checking those cases.
|
||||
self.log.debug(
|
||||
"No output node found, skipping " "collecting of inputs.."
|
||||
)
|
||||
return
|
||||
|
||||
# Collect all upstream parents
|
||||
nodes = list(iter_upstream(output))
|
||||
nodes.append(output)
|
||||
|
||||
# Collect containers for the given set of nodes
|
||||
containers = collect_input_containers(nodes)
|
||||
|
||||
inputs = [c["representation"] for c in containers]
|
||||
instance.data["inputs"] = inputs
|
||||
|
||||
self.log.info("Collected inputs: %s" % inputs)
|
||||
|
|
@ -31,6 +31,13 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
|
||||
nodes = hou.node("/out").children()
|
||||
|
||||
# Include instances in USD stage only when it exists so it
|
||||
# remains backwards compatible with version before houdini 18
|
||||
stage = hou.node("/stage")
|
||||
if stage:
|
||||
nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
|
|
@ -55,6 +62,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Create nice name if the instance has a frame range.
|
||||
label = data.get("name", node.name())
|
||||
label += " (%s)" % data["asset"] # include asset in name
|
||||
|
||||
if "frameStart" in data and "frameEnd" in data:
|
||||
frames = "[{frameStart} - {frameEnd}]".format(**data)
|
||||
label = "{} {}".format(label, frames)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,152 @@
|
|||
import hou
|
||||
import pyblish.api
|
||||
from avalon.houdini import lib
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
import openpype.lib.usdlib as usdlib
|
||||
|
||||
|
||||
class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):
|
||||
"""Collect Instances from a ROP Network and its configured layer paths.
|
||||
|
||||
The output nodes of the ROP node will only be published when *any* of the
|
||||
layers remain set to 'publish' by the user.
|
||||
|
||||
This works differently from most of our Avalon instances in the pipeline.
|
||||
As opposed to storing `pyblish.avalon.instance` as id on the node we store
|
||||
`pyblish.avalon.usdlayered`.
|
||||
|
||||
Additionally this instance has no need for storing family, asset, subset
|
||||
or name on the nodes. Instead all information is retrieved solely from
|
||||
the output filepath, which is an Avalon URI:
|
||||
avalon://{asset}/{subset}.{representation}
|
||||
|
||||
Each final ROP node is considered a dependency for any of the Configured
|
||||
Save Path layers it sets along the way. As such, the instances shown in
|
||||
the Pyblish UI are solely the configured layers. The encapsulating usd
|
||||
files are generated whenever *any* of the dependencies is published.
|
||||
|
||||
These dependency instances are stored in:
|
||||
instance.data["publishDependencies"]
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances (USD Configured Layers)"
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
stage = hou.node("/stage")
|
||||
if not stage:
|
||||
# Likely Houdini version <18
|
||||
return
|
||||
|
||||
nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.evalParm("id") != "pyblish.avalon.usdlayered":
|
||||
continue
|
||||
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
self.process_node(node, context)
|
||||
|
||||
def sort_by_family(instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def process_node(self, node, context):
|
||||
|
||||
# Allow a single ROP node or a full ROP network of USD ROP nodes
|
||||
# to be processed as a single entry that should "live together" on
|
||||
# a publish.
|
||||
if node.type().name() == "ropnet":
|
||||
# All rop nodes inside ROP Network
|
||||
ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
else:
|
||||
# A single node
|
||||
ropnodes = [node]
|
||||
|
||||
data = lib.read(node)
|
||||
|
||||
# Don't use the explicit "colorbleed.usd.layered" family for publishing
|
||||
# instead use the "colorbleed.usd" family to integrate.
|
||||
data["publishFamilies"] = ["colorbleed.usd"]
|
||||
|
||||
# For now group ALL of them into USD Layer subset group
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
data["subsetGroup"] = "USD Layer"
|
||||
|
||||
instances = list()
|
||||
dependencies = []
|
||||
for ropnode in ropnodes:
|
||||
|
||||
# Create a dependency instance per ROP Node.
|
||||
lopoutput = ropnode.evalParm("lopoutput")
|
||||
dependency_save_data = self.get_save_data(lopoutput)
|
||||
dependency = context.create_instance(dependency_save_data["name"])
|
||||
dependency.append(ropnode)
|
||||
dependency.data.update(data)
|
||||
dependency.data.update(dependency_save_data)
|
||||
dependency.data["family"] = "colorbleed.usd.dependency"
|
||||
dependency.data["optional"] = False
|
||||
dependencies.append(dependency)
|
||||
|
||||
# Hide the dependency instance from the context
|
||||
context.pop()
|
||||
|
||||
# Get all configured layers for this USD ROP node
|
||||
# and create a Pyblish instance for each one
|
||||
layers = hou_usdlib.get_configured_save_layers(ropnode)
|
||||
for layer in layers:
|
||||
save_path = hou_usdlib.get_layer_save_path(layer)
|
||||
save_data = self.get_save_data(save_path)
|
||||
if not save_data:
|
||||
continue
|
||||
self.log.info(save_path)
|
||||
|
||||
instance = context.create_instance(save_data["name"])
|
||||
instance[:] = [node]
|
||||
|
||||
# Set the instance data
|
||||
instance.data.update(data)
|
||||
instance.data.update(save_data)
|
||||
instance.data["usdLayer"] = layer
|
||||
|
||||
# Don't allow the Pyblish `instanceToggled` we have installed
|
||||
# to set this node to bypass.
|
||||
instance.data["_allowToggleBypass"] = False
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Store the collected ROP node dependencies
|
||||
self.log.debug("Collected dependencies: %s" % (dependencies,))
|
||||
for instance in instances:
|
||||
instance.data["publishDependencies"] = dependencies
|
||||
|
||||
def get_save_data(self, save_path):
|
||||
|
||||
# Resolve Avalon URI
|
||||
uri_data = usdlib.parse_avalon_uri(save_path)
|
||||
if not uri_data:
|
||||
self.log.warning("Non Avalon URI Layer Path: %s" % save_path)
|
||||
return {}
|
||||
|
||||
# Collect asset + subset from URI
|
||||
name = "{subset} ({asset})".format(**uri_data)
|
||||
fname = "{asset}_{subset}.{ext}".format(**uri_data)
|
||||
|
||||
data = dict(uri_data)
|
||||
data["usdSavePath"] = save_path
|
||||
data["usdFilename"] = fname
|
||||
data["name"] = name
|
||||
return data
|
||||
|
|
@ -2,13 +2,20 @@ import pyblish.api
|
|||
|
||||
|
||||
class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
||||
"""Collect the out node's SOP Path value."""
|
||||
"""Collect the out node's SOP/COP Path value."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["pointcache",
|
||||
"vdbcache"]
|
||||
families = [
|
||||
"pointcache",
|
||||
"camera",
|
||||
"vdbcache",
|
||||
"imagesequence",
|
||||
"usd",
|
||||
"usdrender",
|
||||
]
|
||||
|
||||
hosts = ["houdini"]
|
||||
label = "Collect Output SOP Path"
|
||||
label = "Collect Output Node Path"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -17,12 +24,44 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
|||
node = instance[0]
|
||||
|
||||
# Get sop path
|
||||
if node.type().name() == "alembic":
|
||||
sop_path_parm = "sop_path"
|
||||
node_type = node.type().name()
|
||||
if node_type == "geometry":
|
||||
out_node = node.parm("soppath").evalAsNode()
|
||||
|
||||
elif node_type == "alembic":
|
||||
|
||||
# Alembic can switch between using SOP Path or object
|
||||
if node.parm("use_sop_path").eval():
|
||||
out_node = node.parm("sop_path").evalAsNode()
|
||||
else:
|
||||
root = node.parm("root").eval()
|
||||
objects = node.parm("objects").eval()
|
||||
path = root + "/" + objects
|
||||
out_node = hou.node(path)
|
||||
|
||||
elif node_type == "comp":
|
||||
out_node = node.parm("coppath").evalAsNode()
|
||||
|
||||
elif node_type == "usd" or node_type == "usdrender":
|
||||
out_node = node.parm("loppath").evalAsNode()
|
||||
|
||||
elif node_type == "usd_rop" or node_type == "usdrender_rop":
|
||||
# Inside Solaris e.g. /stage (not in ROP context)
|
||||
# When incoming connection is present it takes it directly
|
||||
inputs = node.inputs()
|
||||
if inputs:
|
||||
out_node = inputs[0]
|
||||
else:
|
||||
out_node = node.parm("loppath").evalAsNode()
|
||||
|
||||
else:
|
||||
sop_path_parm = "soppath"
|
||||
raise ValueError(
|
||||
"ROP node type '%s' is" " not supported." % node_type
|
||||
)
|
||||
|
||||
sop_path = node.parm(sop_path_parm).eval()
|
||||
out_node = hou.node(sop_path)
|
||||
if not out_node:
|
||||
self.log.warning("No output node collected.")
|
||||
return
|
||||
|
||||
self.log.debug("Output node: %s" % out_node.path())
|
||||
instance.data["output_node"] = out_node
|
||||
|
|
|
|||
135
openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
Normal file
135
openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_top_referenced_parm(parm):
|
||||
|
||||
processed = set() # disallow infinite loop
|
||||
while True:
|
||||
if parm.path() in processed:
|
||||
raise RuntimeError("Parameter references result in cycle.")
|
||||
|
||||
processed.add(parm.path())
|
||||
|
||||
ref = parm.getReferencedParm()
|
||||
if ref.path() == parm.path():
|
||||
# It returns itself when it doesn't reference
|
||||
# another parameter
|
||||
return ref
|
||||
else:
|
||||
parm = ref
|
||||
|
||||
|
||||
def evalParmNoFrame(node, parm, pad_character="#"):
|
||||
|
||||
parameter = node.parm(parm)
|
||||
assert parameter, "Parameter does not exist: %s.%s" % (node, parm)
|
||||
|
||||
# If the parameter has a parameter reference, then get that
|
||||
# parameter instead as otherwise `unexpandedString()` fails.
|
||||
parameter = get_top_referenced_parm(parameter)
|
||||
|
||||
# Substitute out the frame numbering with padded characters
|
||||
try:
|
||||
raw = parameter.unexpandedString()
|
||||
except hou.Error as exc:
|
||||
print("Failed: %s" % parameter)
|
||||
raise RuntimeError(exc)
|
||||
|
||||
def replace(match):
|
||||
padding = 1
|
||||
n = match.group(2)
|
||||
if n and int(n):
|
||||
padding = int(n)
|
||||
return pad_character * padding
|
||||
|
||||
expression = re.sub(r"(\$F([0-9]*))", replace, raw)
|
||||
|
||||
with hou.ScriptEvalContext(parameter):
|
||||
return hou.expandStringAtFrame(expression, 0)
|
||||
|
||||
|
||||
class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
||||
"""Collect USD Render Products
|
||||
|
||||
Collects the instance.data["files"] for the render products.
|
||||
|
||||
Provides:
|
||||
instance -> files
|
||||
|
||||
"""
|
||||
|
||||
label = "Redshift ROP Render Products"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
families = ["redshift_rop"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
|
||||
# Collect chunkSize
|
||||
chunk_size_parm = rop.parm("chunkSize")
|
||||
if chunk_size_parm:
|
||||
chunk_size = int(chunk_size_parm.eval())
|
||||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix")
|
||||
beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix")
|
||||
render_products = []
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=beauty_suffix
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
for index in range(num_aovs):
|
||||
i = index + 1
|
||||
|
||||
# Skip disabled AOVs
|
||||
if not rop.evalParm("RS_aovEnable_%s" % i):
|
||||
continue
|
||||
|
||||
aov_suffix = rop.evalParm("RS_aovSuffix_%s" % i)
|
||||
aov_prefix = evalParmNoFrame(rop, "RS_aovCustomPrefix_%s" % i)
|
||||
if not aov_prefix:
|
||||
aov_prefix = default_prefix
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
|
||||
def get_render_product_name(self, prefix, suffix):
|
||||
"""Return the output filename using the AOV prefix and suffix"""
|
||||
|
||||
# When AOV is explicitly defined in prefix we just swap it out
|
||||
# directly with the AOV suffix to embed it.
|
||||
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
|
||||
has_aov_in_prefix = "%AOV%" in prefix
|
||||
if has_aov_in_prefix:
|
||||
# It seems that when some special separator characters are present
|
||||
# before the %AOV% token that Redshift will secretly remove it if
|
||||
# there is no suffix for the current product, for example:
|
||||
# foo_%AOV% -> foo.exr
|
||||
pattern = "%AOV%" if suffix else "[._-]?%AOV%"
|
||||
product_name = re.sub(pattern, suffix, prefix, flags=re.IGNORECASE)
|
||||
else:
|
||||
if suffix:
|
||||
# Add ".{suffix}" before the extension
|
||||
prefix_base, ext = os.path.splitext(prefix)
|
||||
product_name = prefix_base + "." + suffix + ext
|
||||
else:
|
||||
product_name = prefix
|
||||
|
||||
return product_name
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
from avalon.houdini import lib
|
||||
|
||||
|
||||
class CollectRemotePublishSettings(pyblish.api.ContextPlugin):
|
||||
"""Collect custom settings of the Remote Publish node."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish Submission Settings"
|
||||
actions = [openpype.api.RepairAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
return
|
||||
|
||||
attributes = lib.read(node)
|
||||
|
||||
# Debug the settings we have collected
|
||||
for key, value in sorted(attributes.items()):
|
||||
self.log.debug("Collected %s: %s" % (key, value))
|
||||
|
||||
context.data.update(attributes)
|
||||
|
|
@ -0,0 +1,133 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
import hou
|
||||
import pxr.UsdRender
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_var_changed(variable=None):
|
||||
"""Return changed variables and operators that use it.
|
||||
|
||||
Note: `varchange` hscript states that it forces a recook of the nodes
|
||||
that use Variables. That was tested in Houdini
|
||||
18.0.391.
|
||||
|
||||
Args:
|
||||
variable (str, Optional): A specific variable to query the operators
|
||||
for. When None is provided it will return all variables that have
|
||||
had recent changes and require a recook. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Variable that changed with the operators that use it.
|
||||
|
||||
"""
|
||||
cmd = "varchange -V"
|
||||
if variable:
|
||||
cmd += " {0}".format(variable)
|
||||
output, _ = hou.hscript(cmd)
|
||||
|
||||
changed = {}
|
||||
for line in output.split("Variable: "):
|
||||
if not line.strip():
|
||||
continue
|
||||
|
||||
split = line.split()
|
||||
var = split[0]
|
||||
operators = split[1:]
|
||||
changed[var] = operators
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
class CollectRenderProducts(pyblish.api.InstancePlugin):
|
||||
"""Collect USD Render Products."""
|
||||
|
||||
label = "Collect Render Products"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance.data.get("output_node")
|
||||
if not node:
|
||||
rop_path = instance[0].path()
|
||||
raise RuntimeError(
|
||||
"No output node found. Make sure to connect an "
|
||||
"input to the USD ROP: %s" % rop_path
|
||||
)
|
||||
|
||||
# Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically
|
||||
# update after scene save.
|
||||
if hou.applicationVersion() == (18, 0, 391):
|
||||
self.log.debug(
|
||||
"Checking for recook to workaround " "$HIPNAME refresh bug..."
|
||||
)
|
||||
changed = get_var_changed("HIPNAME").get("HIPNAME")
|
||||
if changed:
|
||||
self.log.debug("Recooking for $HIPNAME refresh bug...")
|
||||
for operator in changed:
|
||||
hou.node(operator).cook(force=True)
|
||||
|
||||
# Make sure to recook any 'cache' nodes in the history chain
|
||||
chain = [node]
|
||||
chain.extend(node.inputAncestors())
|
||||
for input_node in chain:
|
||||
if input_node.type().name() == "cache":
|
||||
input_node.cook(force=True)
|
||||
|
||||
stage = node.stage()
|
||||
|
||||
filenames = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
if not prim.IsA(pxr.UsdRender.Product):
|
||||
continue
|
||||
|
||||
# Get Render Product Name
|
||||
product = pxr.UsdRender.Product(prim)
|
||||
|
||||
# We force taking it from any random time sample as opposed to
|
||||
# "default" that the USD Api falls back to since that won't return
|
||||
# time sampled values if they were set per time sample.
|
||||
name = product.GetProductNameAttr().Get(time=0)
|
||||
dirname = os.path.dirname(name)
|
||||
basename = os.path.basename(name)
|
||||
|
||||
dollarf_regex = r"(\$F([0-9]?))"
|
||||
frame_regex = r"^(.+\.)([0-9]+)(\.[a-zA-Z]+)$"
|
||||
if re.match(dollarf_regex, basename):
|
||||
# TODO: Confirm this actually is allowed USD stages and HUSK
|
||||
# Substitute $F
|
||||
def replace(match):
|
||||
"""Replace $F4 with padded #."""
|
||||
padding = int(match.group(2)) if match.group(2) else 1
|
||||
return "#" * padding
|
||||
|
||||
filename_base = re.sub(dollarf_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
else:
|
||||
# Substitute basename.0001.ext
|
||||
def replace(match):
|
||||
prefix, frame, ext = match.groups()
|
||||
padding = "#" * len(frame)
|
||||
return prefix + padding + ext
|
||||
|
||||
filename_base = re.sub(frame_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
assert "#" in filename, (
|
||||
"Couldn't resolve render product name "
|
||||
"with frame number: %s" % name
|
||||
)
|
||||
|
||||
filenames.append(filename)
|
||||
|
||||
prim_path = str(prim.GetPath())
|
||||
self.log.info("Collected %s name: %s" % (prim_path, filename))
|
||||
|
||||
# Filenames for Deadline
|
||||
instance.data["files"] = filenames
|
||||
110
openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
Normal file
110
openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
import openpype.lib.usdlib as usdlib
|
||||
|
||||
|
||||
class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
||||
"""Collect special Asset/Shot bootstrap instances if those are needed.
|
||||
|
||||
Some specific subsets are intended to be part of the default structure
|
||||
of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset
|
||||
we layer a Model and Shade USD file over each other and expose that in
|
||||
a Asset USD file, ready to use.
|
||||
|
||||
On the first publish of any of the components of a Asset or Shot the
|
||||
missing pieces are bootstrapped and generated in the pipeline too. This
|
||||
means that on the very first publish of your model the Asset USD file
|
||||
will exist too.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Bootstrap"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd", "usd.layered"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Detect whether the current subset is a subset in a pipeline
|
||||
def get_bootstrap(instance):
|
||||
instance_subset = instance.data["subset"]
|
||||
for name, layers in usdlib.PIPELINE.items():
|
||||
if instance_subset in set(layers):
|
||||
return name # e.g. "asset"
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
bootstrap = get_bootstrap(instance)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(instance, bootstrap)
|
||||
|
||||
# Check if any of the dependencies requires a bootstrap
|
||||
for dependency in instance.data.get("publishDependencies", list()):
|
||||
bootstrap = get_bootstrap(dependency)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(dependency, bootstrap)
|
||||
|
||||
def add_bootstrap(self, instance, bootstrap):
|
||||
|
||||
self.log.debug("Add bootstrap for: %s" % bootstrap)
|
||||
|
||||
asset = io.find_one({"name": instance.data["asset"], "type": "asset"})
|
||||
assert asset, "Asset must exist: %s" % asset
|
||||
|
||||
# Check which are not about to be created and don't exist yet
|
||||
required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap)
|
||||
|
||||
require_all_layers = instance.data.get("requireAllLayers", False)
|
||||
if require_all_layers:
|
||||
# USD files load fine in usdview and Houdini even when layered or
|
||||
# referenced files do not exist. So by default we don't require
|
||||
# the layers to exist.
|
||||
layers = usdlib.PIPELINE.get(bootstrap)
|
||||
if layers:
|
||||
required += list(layers)
|
||||
|
||||
self.log.debug("Checking required bootstrap: %s" % required)
|
||||
for subset in required:
|
||||
if self._subset_exists(instance, subset, asset):
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Creating {0} USD bootstrap: {1} {2}".format(
|
||||
bootstrap, asset["name"], subset
|
||||
)
|
||||
)
|
||||
|
||||
new = instance.context.create_instance(subset)
|
||||
new.data["subset"] = subset
|
||||
new.data["label"] = "{0} ({1})".format(subset, asset["name"])
|
||||
new.data["family"] = "usd.bootstrap"
|
||||
new.data["comment"] = "Automated bootstrap USD file."
|
||||
new.data["publishFamilies"] = ["usd"]
|
||||
|
||||
# Do not allow the user to toggle this instance
|
||||
new.data["optional"] = False
|
||||
|
||||
# Copy some data from the instance for which we bootstrap
|
||||
for key in ["asset"]:
|
||||
new.data[key] = instance.data[key]
|
||||
|
||||
def _subset_exists(self, instance, subset, asset):
|
||||
"""Return whether subset exists in current context or in database."""
|
||||
# Allow it to be created during this publish session
|
||||
context = instance.context
|
||||
for inst in context:
|
||||
if (
|
||||
inst.data["subset"] == subset
|
||||
and inst.data["asset"] == asset["name"]
|
||||
):
|
||||
return True
|
||||
|
||||
# Or, if they already exist in the database we can
|
||||
# skip them too.
|
||||
return bool(
|
||||
io.find_one(
|
||||
{"name": subset, "type": "subset", "parent": asset["_id"]}
|
||||
)
|
||||
)
|
||||
61
openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
Normal file
61
openpype/hosts/houdini/plugins/publish/collect_usd_layers.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.houdini.api.usd as usdlib
|
||||
|
||||
|
||||
class CollectUsdLayers(pyblish.api.InstancePlugin):
|
||||
"""Collect the USD Layers that have configured save paths."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Layers"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
output = instance.data.get("output_node")
|
||||
if not output:
|
||||
self.log.debug("No output node found..")
|
||||
return
|
||||
|
||||
rop_node = instance[0]
|
||||
|
||||
save_layers = []
|
||||
for layer in usdlib.get_configured_save_layers(rop_node):
|
||||
|
||||
info = layer.rootPrims.get("HoudiniLayerInfo")
|
||||
save_path = info.customData.get("HoudiniSavePath")
|
||||
creator = info.customData.get("HoudiniCreatorNode")
|
||||
|
||||
self.log.debug("Found configured save path: "
|
||||
"%s -> %s" % (layer, save_path))
|
||||
|
||||
# Log node that configured this save path
|
||||
if creator:
|
||||
self.log.debug("Created by: %s" % creator)
|
||||
|
||||
save_layers.append((layer, save_path))
|
||||
|
||||
# Store on the instance
|
||||
instance.data["usdConfiguredSavePaths"] = save_layers
|
||||
|
||||
# Create configured layer instances so User can disable updating
|
||||
# specific configured layers for publishing.
|
||||
context = instance.context
|
||||
for layer, save_path in save_layers:
|
||||
name = os.path.basename(save_path)
|
||||
label = "{0} -> {1}".format(instance.data["name"], name)
|
||||
layer_inst = context.create_instance(name)
|
||||
|
||||
family = "colorbleed.usdlayer"
|
||||
layer_inst.data["family"] = family
|
||||
layer_inst.data["families"] = [family]
|
||||
layer_inst.data["subset"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.append(instance[0]) # include same USD ROP
|
||||
layer_inst.append((layer, save_path)) # include layer data
|
||||
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
layer_inst.data["subsetGroup"] = "USD Layer"
|
||||
|
|
@ -3,7 +3,7 @@ import hou
|
|||
|
||||
|
||||
class CollectWorksceneFPS(pyblish.api.ContextPlugin):
|
||||
"""Get the FPS of the work scene"""
|
||||
"""Get the FPS of the work scene."""
|
||||
|
||||
label = "Workscene FPS"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractAlembic(openpype.api.Extractor):
|
||||
|
|
@ -13,29 +14,20 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("filename")
|
||||
staging_dir = os.path.dirname(output)
|
||||
# instance.data["stagingDir"] = staging_dir
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||
staging_dir))
|
||||
try:
|
||||
ropnode.render()
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
35
openpype/hosts/houdini/plugins/publish/extract_composite.py
Normal file
35
openpype/hosts/houdini/plugins/publish/extract_composite.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractComposite(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Composite (Image Sequence)"
|
||||
hosts = ["houdini"]
|
||||
families = ["imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the copoutput parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
output = ropnode.evalParm("copoutput")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
|
||||
frames = instance.data["frames"]
|
||||
instance.data["files"].append(frames)
|
||||
42
openpype/hosts/houdini/plugins/publish/extract_usd.py
Normal file
42
openpype/hosts/houdini/plugins/publish/extract_usd.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractUSD(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract USD"
|
||||
hosts = ["houdini"]
|
||||
families = ["usd",
|
||||
"usdModel",
|
||||
"usdSetDress"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("lopoutput")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
assert os.path.exists(output), "Output does not exist: %s" % output
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
315
openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
Normal file
315
openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
Normal file
|
|
@ -0,0 +1,315 @@
|
|||
import os
|
||||
import contextlib
|
||||
import hou
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExitStack(object):
|
||||
"""Context manager for dynamic management of a stack of exit callbacks.
|
||||
|
||||
For example:
|
||||
|
||||
with ExitStack() as stack:
|
||||
files = [stack.enter_context(open(fname)) for fname in filenames]
|
||||
# All opened files will automatically be closed at the end of
|
||||
# the with statement, even if attempts to open files later
|
||||
# in the list raise an exception
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._exit_callbacks = deque()
|
||||
|
||||
def pop_all(self):
|
||||
"""Preserve the context stack by transferring it to a new instance"""
|
||||
new_stack = type(self)()
|
||||
new_stack._exit_callbacks = self._exit_callbacks
|
||||
self._exit_callbacks = deque()
|
||||
return new_stack
|
||||
|
||||
def _push_cm_exit(self, cm, cm_exit):
|
||||
"""Helper to correctly register callbacks to __exit__ methods"""
|
||||
|
||||
def _exit_wrapper(*exc_details):
|
||||
return cm_exit(cm, *exc_details)
|
||||
|
||||
_exit_wrapper.__self__ = cm
|
||||
self.push(_exit_wrapper)
|
||||
|
||||
def push(self, exit):
|
||||
"""Registers a callback with the standard __exit__ method signature.
|
||||
|
||||
Can suppress exceptions the same way __exit__ methods can.
|
||||
|
||||
Also accepts any object with an __exit__ method (registering a call
|
||||
to the method instead of the object itself)
|
||||
|
||||
"""
|
||||
# We use an unbound method rather than a bound method to follow
|
||||
# the standard lookup behaviour for special methods
|
||||
_cb_type = type(exit)
|
||||
try:
|
||||
exit_method = _cb_type.__exit__
|
||||
except AttributeError:
|
||||
# Not a context manager, so assume its a callable
|
||||
self._exit_callbacks.append(exit)
|
||||
else:
|
||||
self._push_cm_exit(exit, exit_method)
|
||||
return exit # Allow use as a decorator
|
||||
|
||||
def callback(self, callback, *args, **kwds):
|
||||
"""Registers an arbitrary callback and arguments.
|
||||
|
||||
Cannot suppress exceptions.
|
||||
"""
|
||||
|
||||
def _exit_wrapper(exc_type, exc, tb):
|
||||
callback(*args, **kwds)
|
||||
|
||||
# We changed the signature, so using @wraps is not appropriate, but
|
||||
# setting __wrapped__ may still help with introspection
|
||||
_exit_wrapper.__wrapped__ = callback
|
||||
self.push(_exit_wrapper)
|
||||
return callback # Allow use as a decorator
|
||||
|
||||
def enter_context(self, cm):
|
||||
"""Enters the supplied context manager
|
||||
|
||||
If successful, also pushes its __exit__ method as a callback and
|
||||
returns the result of the __enter__ method.
|
||||
"""
|
||||
# We look up the special methods on the type to match the with
|
||||
# statement
|
||||
_cm_type = type(cm)
|
||||
_exit = _cm_type.__exit__
|
||||
result = _cm_type.__enter__(cm)
|
||||
self._push_cm_exit(cm, _exit)
|
||||
return result
|
||||
|
||||
def close(self):
|
||||
"""Immediately unwind the context stack"""
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
# We manipulate the exception state so it behaves as though
|
||||
# we were actually nesting multiple with statements
|
||||
frame_exc = sys.exc_info()[1]
|
||||
|
||||
def _fix_exception_context(new_exc, old_exc):
|
||||
while 1:
|
||||
exc_context = new_exc.__context__
|
||||
if exc_context in (None, frame_exc):
|
||||
break
|
||||
new_exc = exc_context
|
||||
new_exc.__context__ = old_exc
|
||||
|
||||
# Callbacks are invoked in LIFO order to match the behaviour of
|
||||
# nested context managers
|
||||
suppressed_exc = False
|
||||
while self._exit_callbacks:
|
||||
cb = self._exit_callbacks.pop()
|
||||
try:
|
||||
if cb(*exc_details):
|
||||
suppressed_exc = True
|
||||
exc_details = (None, None, None)
|
||||
except Exception:
|
||||
new_exc_details = sys.exc_info()
|
||||
# simulate the stack of exceptions by setting the context
|
||||
_fix_exception_context(new_exc_details[1], exc_details[1])
|
||||
if not self._exit_callbacks:
|
||||
raise
|
||||
exc_details = new_exc_details
|
||||
return suppressed_exc
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def parm_values(overrides):
|
||||
"""Override Parameter values during the context."""
|
||||
|
||||
originals = []
|
||||
try:
|
||||
for parm, value in overrides:
|
||||
originals.append((parm, parm.eval()))
|
||||
parm.set(value)
|
||||
yield
|
||||
finally:
|
||||
for parm, value in originals:
|
||||
# Parameter might not exist anymore so first
|
||||
# check whether it's still valid
|
||||
if hou.parm(parm.path()):
|
||||
parm.set(value)
|
||||
|
||||
|
||||
class ExtractUSDLayered(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Layered USD"
|
||||
hosts = ["houdini"]
|
||||
families = ["usdLayered", "usdShade"]
|
||||
|
||||
# Force Output Processors so it will always save any file
|
||||
# into our unique staging directory with processed Avalon paths
|
||||
output_processors = ["avalon_uri_processor", "stagingdir_processor"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Extracting: %s" % instance)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
fname = instance.data.get("usdFilename")
|
||||
|
||||
# The individual rop nodes are collected as "publishDependencies"
|
||||
dependencies = instance.data["publishDependencies"]
|
||||
ropnodes = [dependency[0] for dependency in dependencies]
|
||||
assert all(
|
||||
node.type().name() in {"usd", "usd_rop"} for node in ropnodes
|
||||
)
|
||||
|
||||
# Main ROP node, either a USD Rop or ROP network with
|
||||
# multiple USD ROPs
|
||||
node = instance[0]
|
||||
|
||||
# Collect any output dependencies that have not been processed yet
|
||||
# during extraction of other instances
|
||||
outputs = [fname]
|
||||
active_dependencies = [
|
||||
dep
|
||||
for dep in dependencies
|
||||
if dep.data.get("publish", True)
|
||||
and not dep.data.get("_isExtracted", False)
|
||||
]
|
||||
for dependency in active_dependencies:
|
||||
outputs.append(dependency.data["usdFilename"])
|
||||
|
||||
pattern = r"*[/\]{0} {0}"
|
||||
save_pattern = " ".join(pattern.format(fname) for fname in outputs)
|
||||
|
||||
# Run a stack of context managers before we start the render to
|
||||
# temporarily adjust USD ROP settings for our publish output.
|
||||
rop_overrides = {
|
||||
# This sets staging directory on the processor to force our
|
||||
# output files to end up in the Staging Directory.
|
||||
"stagingdiroutputprocessor_stagingDir": staging_dir,
|
||||
# Force the Avalon URI Output Processor to refactor paths for
|
||||
# references, payloads and layers to published paths.
|
||||
"avalonurioutputprocessor_use_publish_paths": True,
|
||||
# Only write out specific USD files based on our outputs
|
||||
"savepattern": save_pattern,
|
||||
}
|
||||
overrides = list()
|
||||
with ExitStack() as stack:
|
||||
|
||||
for ropnode in ropnodes:
|
||||
manager = hou_usdlib.outputprocessors(
|
||||
ropnode,
|
||||
processors=self.output_processors,
|
||||
disable_all_others=True,
|
||||
)
|
||||
stack.enter_context(manager)
|
||||
|
||||
# Some of these must be added after we enter the output
|
||||
# processor context manager because those parameters only
|
||||
# exist when the Output Processor is added to the ROP node.
|
||||
for name, value in rop_overrides.items():
|
||||
parm = ropnode.parm(name)
|
||||
assert parm, "Parm not found: %s.%s" % (
|
||||
ropnode.path(),
|
||||
name,
|
||||
)
|
||||
overrides.append((parm, value))
|
||||
|
||||
stack.enter_context(parm_values(overrides))
|
||||
|
||||
# Render the single ROP node or the full ROP network
|
||||
render_rop(node)
|
||||
|
||||
# Assert all output files in the Staging Directory
|
||||
for output_fname in outputs:
|
||||
path = os.path.join(staging_dir, output_fname)
|
||||
assert os.path.exists(path), "Output file must exist: %s" % path
|
||||
|
||||
# Set up the dependency for publish if they have new content
|
||||
# compared to previous publishes
|
||||
for dependency in active_dependencies:
|
||||
dependency_fname = dependency.data["usdFilename"]
|
||||
|
||||
filepath = os.path.join(staging_dir, dependency_fname)
|
||||
similar = self._compare_with_latest_publish(dependency, filepath)
|
||||
if similar:
|
||||
# Deactivate this dependency
|
||||
self.log.debug(
|
||||
"Dependency matches previous publish version,"
|
||||
" deactivating %s for publish" % dependency
|
||||
)
|
||||
dependency.data["publish"] = False
|
||||
else:
|
||||
self.log.debug("Extracted dependency: %s" % dependency)
|
||||
# This dependency should be published
|
||||
dependency.data["files"] = [dependency_fname]
|
||||
dependency.data["stagingDir"] = staging_dir
|
||||
dependency.data["_isExtracted"] = True
|
||||
|
||||
# Store the created files on the instance
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
instance.data["files"].append(fname)
|
||||
|
||||
def _compare_with_latest_publish(self, dependency, new_file):
|
||||
|
||||
from avalon import api, io
|
||||
import filecmp
|
||||
|
||||
_, ext = os.path.splitext(new_file)
|
||||
|
||||
# Compare this dependency with the latest published version
|
||||
# to detect whether we should make this into a new publish
|
||||
# version. If not, skip it.
|
||||
asset = io.find_one(
|
||||
{"name": dependency.data["asset"], "type": "asset"}
|
||||
)
|
||||
subset = io.find_one(
|
||||
{
|
||||
"name": dependency.data["subset"],
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
}
|
||||
)
|
||||
if not subset:
|
||||
# Subset doesn't exist yet. Definitely new file
|
||||
self.log.debug("No existing subset..")
|
||||
return False
|
||||
|
||||
version = io.find_one(
|
||||
{"type": "version", "parent": subset["_id"], },
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if not version:
|
||||
self.log.debug("No existing version..")
|
||||
return False
|
||||
|
||||
representation = io.find_one(
|
||||
{
|
||||
"name": ext.lstrip("."),
|
||||
"type": "representation",
|
||||
"parent": version["_id"],
|
||||
}
|
||||
)
|
||||
if not representation:
|
||||
self.log.debug("No existing representation..")
|
||||
return False
|
||||
|
||||
old_file = api.get_representation_path(representation)
|
||||
if not os.path.exists(old_file):
|
||||
return False
|
||||
|
||||
return filecmp.cmp(old_file, new_file)
|
||||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractVDBCache(openpype.api.Extractor):
|
||||
|
|
@ -13,8 +14,6 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
|
|
@ -25,15 +24,8 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
file_name = os.path.basename(sop_output)
|
||||
|
||||
self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir))
|
||||
try:
|
||||
ropnode.render()
|
||||
except hou.Error as exc:
|
||||
# The hou.Error is not inherited from a Python Exception class,
|
||||
# so we explicitly capture the houdini error, otherwise pyblish
|
||||
# will remain hanging.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
|
|
@ -41,9 +33,9 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'mov',
|
||||
'ext': 'mov',
|
||||
'files': output,
|
||||
"name": "vdb",
|
||||
"ext": "vdb",
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import version_up
|
||||
from openpype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
class IncrementCurrentFile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["houdini"]
|
||||
families = ["colorbleed.usdrender", "redshift_rop"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
context = instance.context
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(
|
||||
plugin.__name__ == "HoudiniSubmitPublishDeadline"
|
||||
for plugin in errored_plugins
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because "
|
||||
"submission to deadline failed."
|
||||
)
|
||||
|
||||
# Filename must not have changed since collecting
|
||||
host = avalon.api.registered_host()
|
||||
current_file = host.current_file()
|
||||
assert (
|
||||
context.data["currentFile"] == current_file
|
||||
), "Collected filename from current scene name."
|
||||
|
||||
new_filepath = version_up(current_file)
|
||||
host.save(new_filepath)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
|
||||
import hou
|
||||
from openpype.api import version_up
|
||||
from openpype.action import get_errored_plugins_from_data
|
||||
|
||||
|
||||
class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin):
|
||||
"""Increment the current file.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
|
||||
"""
|
||||
|
||||
label = "Increment current file"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
if any(
|
||||
plugin.__name__ == "HoudiniSubmitPublishDeadline"
|
||||
for plugin in errored_plugins
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because "
|
||||
"submission to deadline failed."
|
||||
)
|
||||
|
||||
current_filepath = context.data["currentFile"]
|
||||
new_filepath = version_up(current_filepath)
|
||||
|
||||
hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True)
|
||||
37
openpype/hosts/houdini/plugins/publish/save_scene.py
Normal file
37
openpype/hosts/houdini/plugins/publish/save_scene.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
|
||||
class SaveCurrentScene(pyblish.api.InstancePlugin):
|
||||
"""Save current scene"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.IntegratorOrder - 0.49
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender",
|
||||
"redshift_rop"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# This should be a ContextPlugin, but this is a workaround
|
||||
# for a bug in pyblish to run once for a family: issue #250
|
||||
context = instance.context
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
# Filename must not have changed since collecting
|
||||
host = avalon.api.registered_host()
|
||||
current_file = host.current_file()
|
||||
assert context.data['currentFile'] == current_file, (
|
||||
"Collected filename from current scene name."
|
||||
)
|
||||
|
||||
if host.has_unsaved_changes():
|
||||
self.log.info("Saving current file..")
|
||||
host.save_file(current_file)
|
||||
else:
|
||||
self.log.debug("No unsaved changes, skipping file save..")
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class SaveCurrentSceneDeadline(pyblish.api.ContextPlugin):
|
||||
"""Save current scene"""
|
||||
|
||||
label = "Save current file"
|
||||
order = pyblish.api.IntegratorOrder - 0.49
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
|
||||
def process(self, context):
|
||||
import hou
|
||||
|
||||
assert (
|
||||
context.data["currentFile"] == hou.hipFile.path()
|
||||
), "Collected filename from current scene name."
|
||||
|
||||
if hou.hipFile.hasUnsavedChanges():
|
||||
self.log.info("Saving current file..")
|
||||
hou.hipFile.save(save_to_recent_files=True)
|
||||
else:
|
||||
self.log.debug("No unsaved changes, skipping file save..")
|
||||
|
|
@ -3,7 +3,7 @@ import openpype.api
|
|||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
|
@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node is not"
|
||||
"of type VDB!")
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,132 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
||||
"""Validate Alembic ROP Primitive to Detail attribute is consistent.
|
||||
|
||||
The Alembic ROP crashes Houdini whenever an attribute in the "Primitive to
|
||||
Detail" parameter exists on only a part of the primitives that belong to
|
||||
the same hierarchy path. Whenever it encounters inconsistent values,
|
||||
specifically where some are empty as opposed to others then Houdini
|
||||
crashes. (Tested in Houdini 17.5.229)
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Primitive to Detail (Abc)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Primitives found with inconsistent primitive "
|
||||
"to detail attributes. See log."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
output = instance.data["output_node"]
|
||||
|
||||
rop = instance[0]
|
||||
pattern = rop.parm("prim_to_detail_pattern").eval().strip()
|
||||
if not pattern:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has no 'Primitive to Detail' pattern. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
|
||||
# Let's assume each attribute is explicitly named for now and has no
|
||||
# wildcards for Primitive to Detail. This simplifies the check.
|
||||
cls.log.debug("Checking Primitive to Detail pattern: %s" % pattern)
|
||||
cls.log.debug("Checking with path attribute: %s" % path_attr)
|
||||
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
|
||||
# If there are no primitives on the start frame then it might be
|
||||
# something that is emitted over time. As such we can't actually
|
||||
# validate whether the attributes exist, because they won't exist
|
||||
# yet. In that case, just warn the user and allow it.
|
||||
if len(geo.iterPrims()) == 0:
|
||||
cls.log.warning(
|
||||
"No primitives found on current frame. Validation"
|
||||
" for Primitive to Detail will be skipped."
|
||||
)
|
||||
return
|
||||
|
||||
attrib = geo.findPrimAttrib(path_attr)
|
||||
if not attrib:
|
||||
cls.log.info(
|
||||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
cls.log.info(
|
||||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
paths = None
|
||||
for attr in pattern.split(" "):
|
||||
if not attr.strip():
|
||||
# Ignore empty values
|
||||
continue
|
||||
|
||||
# Check if the primitive attribute exists
|
||||
attrib = geo.findPrimAttrib(attr)
|
||||
if not attrib:
|
||||
# It is allowed to not have the attribute at all
|
||||
continue
|
||||
|
||||
# The issue can only happen if at least one string attribute is
|
||||
# present. So we ignore cases with no values whatsoever.
|
||||
if not attrib.strings():
|
||||
continue
|
||||
|
||||
check = defaultdict(set)
|
||||
values = geo.primStringAttribValues(attr)
|
||||
if paths is None:
|
||||
paths = geo.primStringAttribValues(path_attr)
|
||||
|
||||
for path, value in zip(paths, values):
|
||||
check[path].add(value)
|
||||
|
||||
for path, values in check.items():
|
||||
# Whenever a single path has multiple values for the
|
||||
# Primitive to Detail attribute then we consider it
|
||||
# inconsistent and invalidate the ROP node's content.
|
||||
if len(values) > 1:
|
||||
cls.log.warning(
|
||||
"Path has multiple values: %s (path: %s)"
|
||||
% (list(values), path)
|
||||
)
|
||||
return [output.path()]
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
|
||||
"""Validate Face Sets are disabled for extraction to pointcache.
|
||||
|
||||
When groups are saved as Face Sets with the Alembic these show up
|
||||
as shadingEngine connections in Maya - however, with animated groups
|
||||
these connections in Maya won't work as expected, it won't update per
|
||||
frame. Additionally, it can break shader assignments in some cases
|
||||
where it requires to first break this connection to allow a shader to
|
||||
be assigned.
|
||||
|
||||
It is allowed to include Face Sets, so only an issue is logged to
|
||||
identify that it could introduce issues down the pipeline.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Alembic ROP Face Sets"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
facesets = rop.parm("facesets").eval()
|
||||
|
||||
# 0 = No Face Sets
|
||||
# 1 = Save Non-Empty Groups as Face Sets
|
||||
# 2 = Save All Groups As Face Sets
|
||||
if facesets != 0:
|
||||
self.log.warning(
|
||||
"Alembic ROP saves 'Face Sets' for Geometry. "
|
||||
"Are you sure you want this?"
|
||||
)
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output is correct
|
||||
"""Validate that the node connected to the output is correct.
|
||||
|
||||
The connected node cannot be of the following types for Alembic:
|
||||
- VDB
|
||||
|
|
@ -11,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
order = colorbleed.api.ValidateContentsOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (Abc)"
|
||||
|
|
@ -19,19 +19,35 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node incorrect")
|
||||
raise RuntimeError(
|
||||
"Primitive types found that are not supported"
|
||||
"for Alembic output."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid_nodes = ["VDB", "Volume"]
|
||||
invalid_prim_types = ["VDB", "Volume"]
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
if not hasattr(node, "geometry"):
|
||||
# In the case someone has explicitly set an Object
|
||||
# node instead of a SOP node in Geometry context
|
||||
# then for now we ignore - this allows us to also
|
||||
# export object transforms.
|
||||
cls.log.warning("No geometry output node found, skipping check..")
|
||||
return
|
||||
|
||||
for prim in prims:
|
||||
prim_type = prim.type().name()
|
||||
if prim_type in invalid_nodes:
|
||||
cls.log.error("Found a primitive which is of type '%s' !"
|
||||
% prim_type)
|
||||
return [instance]
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = node.geometryAtFrame(frame)
|
||||
|
||||
invalid = False
|
||||
for prim_type in invalid_prim_types:
|
||||
if geo.countPrimType(prim_type) > 0:
|
||||
cls.log.error(
|
||||
"Found a primitive which is of type '%s' !" % prim_type
|
||||
)
|
||||
invalid = True
|
||||
|
||||
if invalid:
|
||||
return [instance]
|
||||
|
|
|
|||
|
|
@ -29,8 +29,9 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output settings do no match for '%s'" %
|
||||
instance)
|
||||
raise RuntimeError(
|
||||
"Output settings do no match for '%s'" % instance
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -18,12 +18,17 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
if len(instance) == 0:
|
||||
# Ignore instances without any nodes
|
||||
# e.g. in memory bootstrap instances
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
rop = invalid[0]
|
||||
raise RuntimeError(
|
||||
"ROP node %s is set to bypass, publishing cannot continue.." %
|
||||
rop.path()
|
||||
"ROP node %s is set to bypass, publishing cannot continue.."
|
||||
% rop.path()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
"""Validate Camera ROP settings."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ['camera']
|
||||
hosts = ['houdini']
|
||||
label = 'Camera ROP'
|
||||
families = ["camera"]
|
||||
hosts = ["houdini"]
|
||||
label = "Camera ROP"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -16,8 +16,10 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
|
||||
node = instance[0]
|
||||
if node.parm("use_sop_path").eval():
|
||||
raise RuntimeError("Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable.")
|
||||
raise RuntimeError(
|
||||
"Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable."
|
||||
)
|
||||
|
||||
# Get the root and objects parameter of the Alembic ROP node
|
||||
root = node.parm("root").eval()
|
||||
|
|
@ -34,8 +36,8 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
if not camera:
|
||||
raise ValueError("Camera path does not exist: %s" % path)
|
||||
|
||||
if not camera.type().name() == "cam":
|
||||
raise ValueError("Object set in Alembic ROP is not a camera: "
|
||||
"%s (type: %s)" % (camera, camera.type().name()))
|
||||
|
||||
|
||||
if camera.type().name() != "cam":
|
||||
raise ValueError(
|
||||
"Object set in Alembic ROP is not a camera: "
|
||||
"%s (type: %s)" % (camera, camera.type().name())
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance COP Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The COP Path is set.
|
||||
- The COP Path refers to an existing object.
|
||||
- The COP Path node is a COP node.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["imagesequence"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate COP Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
"Ensure a valid COP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.CopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a COP node. "
|
||||
"COP Path must point to a COP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Cop2 to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Cop2", (
|
||||
"Output node %s is not of category Cop2. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class ValidateFileExtension(pyblish.api.InstancePlugin):
|
||||
"""Validate the output file extension fits the output family.
|
||||
|
||||
File extensions:
|
||||
- Pointcache must be .abc
|
||||
- Camera must be .abc
|
||||
- VDB must be .vdb
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Output File Extension"
|
||||
|
||||
family_extensions = {
|
||||
"pointcache": ".abc",
|
||||
"camera": ".abc",
|
||||
"vdbcache": ".vdb",
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"ROP node has incorrect " "file extension: %s" % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
# Get ROP node from instance
|
||||
node = instance[0]
|
||||
|
||||
# Create lookup for current family in instance
|
||||
families = []
|
||||
family = instance.data.get("family", None)
|
||||
if family:
|
||||
families.append(family)
|
||||
families = set(families)
|
||||
|
||||
# Perform extension check
|
||||
output = lib.get_output_parameter(node).eval()
|
||||
_, output_extension = os.path.splitext(output)
|
||||
|
||||
for family in families:
|
||||
extension = cls.family_extensions.get(family, None)
|
||||
if extension is None:
|
||||
raise RuntimeError("Unsupported family: %s" % family)
|
||||
|
||||
if output_extension != extension:
|
||||
return [node.path()]
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class ValidateFrameToken(pyblish.api.InstancePlugin):
|
||||
"""Validate if the unexpanded string contains the frame ('$F') token.
|
||||
|
||||
This validator will *only* check the output parameter of the node if
|
||||
the Valid Frame Range is not set to 'Render Current Frame'
|
||||
|
||||
Rules:
|
||||
If you render out a frame range it is mandatory to have the
|
||||
frame token - '$F4' or similar - to ensure that each frame gets
|
||||
written. If this is not the case you will override the same file
|
||||
every time a frame is written out.
|
||||
|
||||
Examples:
|
||||
Good: 'my_vbd_cache.$F4.vdb'
|
||||
Bad: 'my_vbd_cache.vdb'
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Frame Token"
|
||||
families = ["vdbcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output settings do no match for '%s'" % instance
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
if frame_range == 0:
|
||||
return []
|
||||
|
||||
output_parm = lib.get_output_parameter(node)
|
||||
unexpanded_str = output_parm.unexpandedString()
|
||||
|
||||
if "$F" not in unexpanded_str:
|
||||
cls.log.error("No frame token found in '%s'" % node.path())
|
||||
return [instance]
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
|
||||
"""Validate the Houdini instance runs a Commercial license.
|
||||
|
||||
When extracting USD files from a non-commercial Houdini license, even with
|
||||
Houdini Indie license, the resulting files will get "scrambled" with
|
||||
a license protection and get a special .usdnc or .usdlc suffix.
|
||||
|
||||
This currently breaks the Subset/representation pipeline so we disallow
|
||||
any publish with those licenses. Only the commercial license is valid.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd"]
|
||||
hosts = ["houdini"]
|
||||
label = "Houdini Commercial License"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
license = hou.licenseCategory()
|
||||
if license != hou.licenseCategoryType.Commercial:
|
||||
raise RuntimeError(
|
||||
"USD Publishing requires a full Commercial "
|
||||
"license. You are on: %s" % license
|
||||
)
|
||||
|
|
@ -6,18 +6,18 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
|||
"""Validate Create Intermediate Directories is enabled on ROP node."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ['pointcache',
|
||||
'camera',
|
||||
'vdbcache']
|
||||
hosts = ['houdini']
|
||||
label = 'Create Intermediate Directories Checked'
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Create Intermediate Directories Checked"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found ROP node with Create Intermediate "
|
||||
"Directories turned off: %s" % invalid)
|
||||
raise RuntimeError(
|
||||
"Found ROP node with Create Intermediate "
|
||||
"Directories turned off: %s" % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
65
openpype/hosts/houdini/plugins/publish/validate_no_errors.py
Normal file
65
openpype/hosts/houdini/plugins/publish/validate_no_errors.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import hou
|
||||
|
||||
|
||||
def cook_in_range(node, start, end):
|
||||
current = hou.intFrame()
|
||||
if start >= current >= end:
|
||||
# Allow cooking current frame since we're in frame range
|
||||
node.cook(force=False)
|
||||
else:
|
||||
node.cook(force=False, frame_range=(start, start))
|
||||
|
||||
|
||||
def get_errors(node):
|
||||
"""Get cooking errors.
|
||||
|
||||
If node already has errors check whether it needs to recook
|
||||
If so, then recook first to see if that solves it.
|
||||
|
||||
"""
|
||||
if node.errors() and node.needsToCook():
|
||||
node.cook()
|
||||
|
||||
return node.errors()
|
||||
|
||||
|
||||
class ValidateNoErrors(pyblish.api.InstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
label = "Validate no errors"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
validate_nodes = []
|
||||
|
||||
if len(instance) > 0:
|
||||
validate_nodes.append(instance[0])
|
||||
output_node = instance.data.get("output_node")
|
||||
if output_node:
|
||||
validate_nodes.append(output_node)
|
||||
|
||||
for node in validate_nodes:
|
||||
self.log.debug("Validating for errors: %s" % node.path())
|
||||
errors = get_errors(node)
|
||||
|
||||
if errors:
|
||||
# If there are current errors, then try an unforced cook
|
||||
# to see whether the error will disappear.
|
||||
self.log.debug(
|
||||
"Recooking to revalidate error "
|
||||
"is up to date for: %s" % node.path()
|
||||
)
|
||||
current_frame = hou.intFrame()
|
||||
start = instance.data.get("frameStart", current_frame)
|
||||
end = instance.data.get("frameEnd", current_frame)
|
||||
cook_in_range(node, start=start, end=end)
|
||||
|
||||
# Check for errors again after the forced recook
|
||||
errors = get_errors(node)
|
||||
if errors:
|
||||
self.log.error(errors)
|
||||
raise RuntimeError("Node has errors: %s" % node.path())
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidatOutputNodeExists(pyblish.api.InstancePlugin):
|
||||
"""Validate if node attribute Create intermediate Directories is turned on
|
||||
|
||||
Rules:
|
||||
* The node must have Create intermediate Directories turned on to
|
||||
ensure the output file will be created
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ["*"]
|
||||
hosts = ['houdini']
|
||||
label = "Output Node Exists"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Could not find output node(s)!")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
result = set()
|
||||
|
||||
node = instance[0]
|
||||
if node.type().name() == "alembic":
|
||||
soppath_parm = "sop_path"
|
||||
else:
|
||||
# Fall back to geometry node
|
||||
soppath_parm = "soppath"
|
||||
|
||||
sop_path = node.parm(soppath_parm).eval()
|
||||
output_node = hou.node(sop_path)
|
||||
|
||||
if output_node is None:
|
||||
cls.log.error("Node at '%s' does not exist" % sop_path)
|
||||
result.add(node.path())
|
||||
|
||||
# Added cam as this is a legit output type (cameras can't
|
||||
if output_node.type().name() not in ["output", "cam"]:
|
||||
cls.log.error("SOP Path does not end path at output node")
|
||||
result.add(node.path())
|
||||
|
||||
return result
|
||||
|
|
@ -14,8 +14,7 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache",
|
||||
"vdbcache"]
|
||||
families = ["pointcache", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
|
|
@ -23,8 +22,10 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid)
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -35,39 +36,42 @@ class ValidateOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error("SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set."
|
||||
% node.path())
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.SopNode):
|
||||
cls.log.error("Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s" % (
|
||||
output_node.path(),
|
||||
output_node.type().category().name()
|
||||
)
|
||||
)
|
||||
cls.log.error(
|
||||
"Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.." %
|
||||
output_node.path()
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
||||
# Check if output node has incoming connections
|
||||
if not output_node.inputConnections():
|
||||
cls.log.error("Output node `%s` has no incoming connections"
|
||||
% output_node.path())
|
||||
cls.log.error(
|
||||
"Output node `%s` has no incoming connections"
|
||||
% output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure the output node has at least Geometry data
|
||||
if not output_node.geometry():
|
||||
cls.log.error("Output node `%s` has no geometry data."
|
||||
% output_node.path())
|
||||
cls.log.error(
|
||||
"Output node `%s` has no geometry data." % output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -19,8 +19,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("See log for details. "
|
||||
"Invalid nodes: {0}".format(invalid))
|
||||
raise RuntimeError(
|
||||
"See log for details. " "Invalid nodes: {0}".format(invalid)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
@ -28,48 +29,68 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
import hou
|
||||
|
||||
output = instance.data["output_node"]
|
||||
prims = output.geometry().prims()
|
||||
|
||||
rop = instance[0]
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug("Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored..")
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
"Validation is ignored.."
|
||||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error("The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled.")
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
|
||||
cls.log.debug("Checking for attribute: %s" % path_attr)
|
||||
|
||||
missing_attr = []
|
||||
invalid_attr = []
|
||||
for prim in prims:
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("startFrame", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
|
||||
try:
|
||||
path = prim.stringAttribValue(path_attr)
|
||||
except hou.OperationFailed:
|
||||
# Attribute does not exist.
|
||||
missing_attr.append(prim)
|
||||
continue
|
||||
# If there are no primitives on the current frame then we can't
|
||||
# check whether the path names are correct. So we'll just issue a
|
||||
# warning that the check can't be done consistently and skip
|
||||
# validation.
|
||||
if len(geo.iterPrims()) == 0:
|
||||
cls.log.warning(
|
||||
"No primitives found on current frame. Validation"
|
||||
" for primitive hierarchy paths will be skipped,"
|
||||
" thus can't be validated."
|
||||
)
|
||||
return
|
||||
|
||||
if not path:
|
||||
# Empty path value is invalid.
|
||||
invalid_attr.append(prim)
|
||||
continue
|
||||
|
||||
if missing_attr:
|
||||
cls.log.info("Prims are missing attribute `%s`" % path_attr)
|
||||
|
||||
if invalid_attr:
|
||||
cls.log.info("Prims have no value for attribute `%s` "
|
||||
"(%s of %s prims)" % (path_attr,
|
||||
len(invalid_attr),
|
||||
len(prims)))
|
||||
|
||||
if missing_attr or invalid_attr:
|
||||
# Check if there are any values for the primitives
|
||||
attrib = geo.findPrimAttrib(path_attr)
|
||||
if not attrib:
|
||||
cls.log.info(
|
||||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
cls.log.info(
|
||||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
|
||||
paths = geo.primStringAttribValues(path_attr)
|
||||
# Ensure all primitives are set to a valid path
|
||||
# Collect all invalid primitive numbers
|
||||
invalid_prims = [i for i, path in enumerate(paths) if not path]
|
||||
if invalid_prims:
|
||||
num_prims = len(geo.iterPrims()) # faster than len(geo.prims())
|
||||
cls.log.info(
|
||||
"Prims have no value for attribute `%s` "
|
||||
"(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims)
|
||||
)
|
||||
return [output.path()]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
|
||||
"""Validate the remote publish out node exists for Deadline to trigger."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.4
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish ROP node"
|
||||
actions = [openpype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()"
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
# We ensure it's a shell node and that it has the pre-render script
|
||||
# set correctly. Plus the shell script it will trigger should be
|
||||
# completely empty (doing nothing)
|
||||
assert node.type().name() == "shell", "Must be shell ROP node"
|
||||
assert node.parm("command").eval() == "", "Must have no command"
|
||||
assert not node.parm("shellexec").eval(), "Must not execute in shell"
|
||||
assert (
|
||||
node.parm("prerender").eval() == cmd
|
||||
), "REMOTE_PUBLISH node does not have correct prerender script."
|
||||
assert (
|
||||
node.parm("lprerender").eval() == "python"
|
||||
), "REMOTE_PUBLISH node prerender script type not set to 'python'"
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""(Re)create the node if it fails to pass validation."""
|
||||
lib.create_remote_publish_node(force=True)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
|
||||
"""Validate the remote publish node is *not* bypassed."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.39
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["deadline"]
|
||||
label = "Remote Publish ROP enabled"
|
||||
actions = [openpype.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
if node.isBypassed():
|
||||
raise RuntimeError("REMOTE_PUBLISH must not be bypassed.")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""(Re)create the node if it fails to pass validation."""
|
||||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
|
||||
cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH")
|
||||
node.bypass(False)
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance SOP Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The SOP Path is set.
|
||||
- The SOP Path refers to an existing object.
|
||||
- The SOP Path node is a SOP node.
|
||||
- The SOP Path node has at least one input connection (has an input)
|
||||
- The SOP Path has geometry data.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.SopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
||||
# Ensure the node is cooked and succeeds to cook so we can correctly
|
||||
# check for its geometry data.
|
||||
if output_node.needsToCook():
|
||||
cls.log.debug("Cooking node: %s" % output_node.path())
|
||||
try:
|
||||
output_node.cook()
|
||||
except hou.Error as exc:
|
||||
cls.log.error("Cook failed: %s" % exc)
|
||||
cls.log.error(output_node.errors()[0])
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure the output node has at least Geometry data
|
||||
if not output_node.geometry():
|
||||
cls.log.error(
|
||||
"Output node `%s` has no geometry data." % output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
||||
"""Validate USD loaded paths have no backslashes.
|
||||
|
||||
This is a crucial validation for HUSK USD rendering as Houdini's
|
||||
USD Render ROP will fail to write out a .usd file for rendering that
|
||||
correctly preserves the backslashes, e.g. it will incorrectly convert a
|
||||
'\t' to a TAB character disallowing HUSK to find those specific files.
|
||||
|
||||
This validation is redundant for usdModel since that flattens the model
|
||||
before write. As such it will never have any used layers with a path.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress", "usdShade", "usd", "usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "USD Layer path backslashes"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for layer in stage.GetUsedLayers():
|
||||
references = layer.externalReferences
|
||||
|
||||
for ref in references:
|
||||
|
||||
# Ignore anonymous layers
|
||||
if ref.startswith("anon:"):
|
||||
continue
|
||||
|
||||
# If any backslashes in the path consider it invalid
|
||||
if "\\" in ref:
|
||||
self.log.error("Found invalid path: %s" % ref)
|
||||
invalid.append(layer)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Loaded layers have backslashes. "
|
||||
"This is invalid for HUSK USD rendering."
|
||||
)
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
from pxr import UsdShade, UsdRender, UsdLux
|
||||
|
||||
|
||||
def fullname(o):
|
||||
"""Get fully qualified class name"""
|
||||
module = o.__module__
|
||||
if module is None or module == str.__module__:
|
||||
return o.__name__
|
||||
return module + "." + o.__name__
|
||||
|
||||
|
||||
class ValidateUsdModel(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Model.
|
||||
|
||||
Disallow Shaders, Render settings, products and vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdModel"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Model"
|
||||
optional = True
|
||||
|
||||
disallowed = [
|
||||
UsdShade.Shader,
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
for klass in self.disallowed:
|
||||
if klass(prim):
|
||||
# Get full class name without pxr. prefix
|
||||
name = fullname(klass).split("pxr.", 1)[-1]
|
||||
path = str(prim.GetPath())
|
||||
self.log.warning("Disallowed %s: %s" % (name, path))
|
||||
|
||||
invalid.append(prim)
|
||||
|
||||
if invalid:
|
||||
prim_paths = sorted([str(prim.GetPath()) for prim in invalid])
|
||||
raise RuntimeError("Found invalid primitives: %s" % prim_paths)
|
||||
|
||||
|
||||
class ValidateUsdShade(ValidateUsdModel):
|
||||
"""Validate usdShade.
|
||||
|
||||
Disallow Render settings, products, vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
families = ["usdShade"]
|
||||
label = "Validate USD Shade"
|
||||
|
||||
disallowed = [
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance USD LOPs Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The LOP Path is set.
|
||||
- The LOP Path refers to an existing object.
|
||||
- The LOP Path node is a LOP node.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (USD)"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"USD node '%s' LOP path does not exist. "
|
||||
"Ensure a valid LOP path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.LopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a LOP node. "
|
||||
"LOP Path must point to a LOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Render Product names are correctly set absolute paths."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render Product Names"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = []
|
||||
for filepath in instance.data["files"]:
|
||||
|
||||
if not filepath:
|
||||
invalid.append("Detected empty output filepath.")
|
||||
|
||||
if not os.path.isabs(filepath):
|
||||
invalid.append(
|
||||
"Output file path is not " "absolute path: %s" % filepath
|
||||
)
|
||||
|
||||
if invalid:
|
||||
for message in invalid:
|
||||
self.log.error(message)
|
||||
raise RuntimeError("USD Render Paths are invalid.")
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Set Dress.
|
||||
|
||||
Must only have references or payloads. May not generate new mesh or
|
||||
flattened meshes.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Set Dress"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
from pxr import UsdGeom
|
||||
|
||||
rop = instance[0]
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for node in stage.Traverse():
|
||||
|
||||
if UsdGeom.Mesh(node):
|
||||
# This solely checks whether there is any USD involved
|
||||
# in this Prim's Stack and doesn't accurately tell us
|
||||
# whether it was generated locally or not.
|
||||
# TODO: More accurately track whether the Prim was created
|
||||
# in the local scene
|
||||
stack = node.GetPrimStack()
|
||||
for sdf in stack:
|
||||
path = sdf.layer.realPath
|
||||
if path:
|
||||
break
|
||||
else:
|
||||
prim_path = node.GetPath()
|
||||
self.log.error(
|
||||
"%s is not referenced geometry." % prim_path
|
||||
)
|
||||
invalid.append(node)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"SetDress contains local geometry. "
|
||||
"This is not allowed, it must be an assembly "
|
||||
"of referenced assets."
|
||||
)
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from avalon import io
|
||||
|
||||
|
||||
class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade model exists"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
asset = instance.data["asset"]
|
||||
subset = instance.data["subset"]
|
||||
|
||||
# Assume shading variation starts after a dot separator
|
||||
shade_subset = subset.split(".", 1)[0]
|
||||
model_subset = re.sub("^usdShade", "usdModel", shade_subset)
|
||||
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
if not asset_doc:
|
||||
raise RuntimeError("Asset does not exist: %s" % asset)
|
||||
|
||||
subset_doc = io.find_one(
|
||||
{
|
||||
"name": model_subset,
|
||||
"type": "subset",
|
||||
"parent": asset_doc["_id"],
|
||||
}
|
||||
)
|
||||
if not subset_doc:
|
||||
raise RuntimeError(
|
||||
"USD Model subset not found: "
|
||||
"%s (%s)" % (model_subset, asset)
|
||||
)
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
||||
"""Validate USD Shading Workspace is correct version.
|
||||
|
||||
There have been some issues with outdated/erroneous Shading Workspaces
|
||||
so this is to confirm everything is set as it should.
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade Workspace"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
workspace = rop.parent()
|
||||
|
||||
definition = workspace.type().definition()
|
||||
name = definition.nodeType().name()
|
||||
library = definition.libraryFilePath()
|
||||
|
||||
all_definitions = hou.hda.definitionsInFile(library)
|
||||
node_type, version = name.rsplit(":", 1)
|
||||
version = float(version)
|
||||
|
||||
highest = version
|
||||
for other_definition in all_definitions:
|
||||
other_name = other_definition.nodeType().name()
|
||||
other_node_type, other_version = other_name.rsplit(":", 1)
|
||||
other_version = float(other_version)
|
||||
|
||||
if node_type != other_node_type:
|
||||
continue
|
||||
|
||||
# Get highest version
|
||||
highest = max(highest, other_version)
|
||||
|
||||
if version != highest:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace is not the latest version."
|
||||
" Found %s. Latest is %s." % (version, highest)
|
||||
)
|
||||
|
||||
# There were some issues with the editable node not having the right
|
||||
# configured path. So for now let's assure that is correct to.from
|
||||
value = (
|
||||
'avalon://`chs("../asset_name")`/'
|
||||
'usdShade`chs("../model_variantname1")`.usd'
|
||||
)
|
||||
rop_value = rop.parm("lopoutput").rawValue()
|
||||
if rop_value != value:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace has invalid 'lopoutput'"
|
||||
" parameter value. The Shading Workspace"
|
||||
" needs to be reset to its default values."
|
||||
)
|
||||
|
|
@ -3,7 +3,7 @@ import openpype.api
|
|||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
|
@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node is not"
|
||||
"of type VDB!")
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,73 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (VDB)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" " of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
if node is None:
|
||||
cls.log.error(
|
||||
"SOP path is not correctly set on "
|
||||
"ROP node '%s'." % instance[0].path()
|
||||
)
|
||||
return [instance]
|
||||
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geometry = node.geometryAtFrame(frame)
|
||||
if geometry is None:
|
||||
# No geometry data on this node, maybe the node hasn't cooked?
|
||||
cls.log.error(
|
||||
"SOP node has no geometry data. "
|
||||
"Is it cooked? %s" % node.path()
|
||||
)
|
||||
return [node]
|
||||
|
||||
prims = geometry.prims()
|
||||
nr_of_prims = len(prims)
|
||||
|
||||
# All primitives must be hou.VDB
|
||||
invalid_prim = False
|
||||
for prim in prims:
|
||||
if not isinstance(prim, hou.VDB):
|
||||
cls.log.error("Found non-VDB primitive: %s" % prim)
|
||||
invalid_prim = True
|
||||
if invalid_prim:
|
||||
return [instance]
|
||||
|
||||
nr_of_points = len(geometry.points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
from avalon import api, houdini
|
||||
import hou
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
|||
1
openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py
vendored
Normal file
1
openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
168
openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py
vendored
Normal file
168
openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py
vendored
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
import hou
|
||||
import husdoutputprocessors.base as base
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
import colorbleed.usdlib as usdlib
|
||||
|
||||
|
||||
def _get_project_publish_template():
|
||||
"""Return publish template from database for current project"""
|
||||
from avalon import io
|
||||
project = io.find_one({"type": "project"},
|
||||
projection={"config.template.publish": True})
|
||||
return project["config"]["template"]["publish"]
|
||||
|
||||
|
||||
class AvalonURIOutputProcessor(base.OutputProcessorBase):
|
||||
"""Process Avalon URIs into their full path equivalents.
|
||||
|
||||
"""
|
||||
|
||||
_parameters = None
|
||||
_param_prefix = 'avalonurioutputprocessor_'
|
||||
_parms = {
|
||||
"use_publish_paths": _param_prefix + "use_publish_paths"
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
""" There is only one object of each output processor class that is
|
||||
ever created in a Houdini session. Therefore be very careful
|
||||
about what data gets put in this object.
|
||||
"""
|
||||
self._template = None
|
||||
self._use_publish_paths = False
|
||||
self._cache = dict()
|
||||
|
||||
def displayName(self):
|
||||
return 'Avalon URI Output Processor'
|
||||
|
||||
def parameters(self):
|
||||
|
||||
if not self._parameters:
|
||||
parameters = hou.ParmTemplateGroup()
|
||||
use_publish_path = hou.ToggleParmTemplate(
|
||||
name=self._parms["use_publish_paths"],
|
||||
label='Resolve Reference paths to publish paths',
|
||||
default_value=False,
|
||||
help=("When enabled any paths for Layers, References or "
|
||||
"Payloads are resolved to published master versions.\n"
|
||||
"This is usually only used by the publishing pipeline, "
|
||||
"but can be used for testing too."))
|
||||
parameters.append(use_publish_path)
|
||||
self._parameters = parameters.asDialogScript()
|
||||
|
||||
return self._parameters
|
||||
|
||||
def beginSave(self, config_node, t):
|
||||
self._template = _get_project_publish_template()
|
||||
|
||||
parm = self._parms["use_publish_paths"]
|
||||
self._use_publish_paths = config_node.parm(parm).evalAtTime(t)
|
||||
self._cache.clear()
|
||||
|
||||
def endSave(self):
|
||||
self._template = None
|
||||
self._use_publish_paths = None
|
||||
self._cache.clear()
|
||||
|
||||
def processAsset(self,
|
||||
asset_path,
|
||||
asset_path_for_save,
|
||||
referencing_layer_path,
|
||||
asset_is_layer,
|
||||
for_save):
|
||||
"""
|
||||
Args:
|
||||
asset_path (str): The incoming file path you want to alter or not.
|
||||
asset_path_for_save (bool): Whether the current path is a
|
||||
referenced path in the USD file. When True, return the path
|
||||
you want inside USD file.
|
||||
referencing_layer_path (str): ???
|
||||
asset_is_layer (bool): Whether this asset is a USD layer file.
|
||||
If this is False, the asset is something else (for example,
|
||||
a texture or volume file).
|
||||
for_save (bool): Whether the asset path is for a file to be saved
|
||||
out. If so, then return actual written filepath.
|
||||
|
||||
Returns:
|
||||
The refactored asset path.
|
||||
|
||||
"""
|
||||
|
||||
# Retrieve from cache if this query occurred before (optimization)
|
||||
cache_key = (asset_path, asset_path_for_save, asset_is_layer, for_save)
|
||||
if cache_key in self._cache:
|
||||
return self._cache[cache_key]
|
||||
|
||||
relative_template = "{asset}_{subset}.{ext}"
|
||||
uri_data = usdlib.parse_avalon_uri(asset_path)
|
||||
if uri_data:
|
||||
|
||||
if for_save:
|
||||
# Set save output path to a relative path so other
|
||||
# processors can potentially manage it easily?
|
||||
path = relative_template.format(**uri_data)
|
||||
|
||||
print("Avalon URI Resolver: %s -> %s" % (asset_path, path))
|
||||
self._cache[cache_key] = path
|
||||
return path
|
||||
|
||||
if self._use_publish_paths:
|
||||
# Resolve to an Avalon published asset for embedded paths
|
||||
path = self._get_usd_master_path(**uri_data)
|
||||
else:
|
||||
path = relative_template.format(**uri_data)
|
||||
|
||||
print("Avalon URI Resolver: %s -> %s" % (asset_path, path))
|
||||
self._cache[cache_key] = path
|
||||
return path
|
||||
|
||||
self._cache[cache_key] = asset_path
|
||||
return asset_path
|
||||
|
||||
def _get_usd_master_path(self,
|
||||
asset,
|
||||
subset,
|
||||
ext):
|
||||
"""Get the filepath for a .usd file of a subset.
|
||||
|
||||
This will return the path to an unversioned master file generated by
|
||||
`usd_master_file.py`.
|
||||
|
||||
"""
|
||||
|
||||
from avalon import api, io
|
||||
|
||||
PROJECT = api.Session["AVALON_PROJECT"]
|
||||
asset_doc = io.find_one({"name": asset,
|
||||
"type": "asset"})
|
||||
if not asset_doc:
|
||||
raise RuntimeError("Invalid asset name: '%s'" % asset)
|
||||
|
||||
root = api.registered_root()
|
||||
path = self._template.format(**{
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"silo": asset_doc["silo"],
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset,
|
||||
"representation": ext,
|
||||
"version": 0 # stub version zero
|
||||
})
|
||||
|
||||
# Remove the version folder
|
||||
subset_folder = os.path.dirname(os.path.dirname(path))
|
||||
master_folder = os.path.join(subset_folder, "master")
|
||||
fname = "{0}.{1}".format(subset, ext)
|
||||
|
||||
return os.path.join(master_folder, fname).replace("\\", "/")
|
||||
|
||||
|
||||
output_processor = AvalonURIOutputProcessor()
|
||||
|
||||
|
||||
def usdOutputProcessor():
|
||||
return output_processor
|
||||
|
||||
90
openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py
vendored
Normal file
90
openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py
vendored
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
import hou
|
||||
import husdoutputprocessors.base as base
|
||||
import os
|
||||
|
||||
|
||||
class StagingDirOutputProcessor(base.OutputProcessorBase):
|
||||
"""Output all USD Rop file nodes into the Staging Directory
|
||||
|
||||
Ignore any folders and paths set in the Configured Layers
|
||||
and USD Rop node, just take the filename and save into a
|
||||
single directory.
|
||||
|
||||
"""
|
||||
theParameters = None
|
||||
parameter_prefix = "stagingdiroutputprocessor_"
|
||||
stagingdir_parm_name = parameter_prefix + "stagingDir"
|
||||
|
||||
def __init__(self):
|
||||
self.staging_dir = None
|
||||
|
||||
def displayName(self):
|
||||
return 'StagingDir Output Processor'
|
||||
|
||||
def parameters(self):
|
||||
if not self.theParameters:
|
||||
parameters = hou.ParmTemplateGroup()
|
||||
rootdirparm = hou.StringParmTemplate(
|
||||
self.stagingdir_parm_name,
|
||||
'Staging Directory', 1,
|
||||
string_type=hou.stringParmType.FileReference,
|
||||
file_type=hou.fileType.Directory
|
||||
)
|
||||
parameters.append(rootdirparm)
|
||||
self.theParameters = parameters.asDialogScript()
|
||||
return self.theParameters
|
||||
|
||||
def beginSave(self, config_node, t):
|
||||
|
||||
# Use the Root Directory parameter if it is set.
|
||||
root_dir_parm = config_node.parm(self.stagingdir_parm_name)
|
||||
if root_dir_parm:
|
||||
self.staging_dir = root_dir_parm.evalAtTime(t)
|
||||
|
||||
if not self.staging_dir:
|
||||
out_file_parm = config_node.parm('lopoutput')
|
||||
if out_file_parm:
|
||||
self.staging_dir = out_file_parm.evalAtTime(t)
|
||||
if self.staging_dir:
|
||||
(self.staging_dir, filename) = os.path.split(self.staging_dir)
|
||||
|
||||
def endSave(self):
|
||||
self.staging_dir = None
|
||||
|
||||
def processAsset(self, asset_path,
|
||||
asset_path_for_save,
|
||||
referencing_layer_path,
|
||||
asset_is_layer,
|
||||
for_save):
|
||||
"""
|
||||
Args:
|
||||
asset_path (str): The incoming file path you want to alter or not.
|
||||
asset_path_for_save (bool): Whether the current path is a
|
||||
referenced path in the USD file. When True, return the path
|
||||
you want inside USD file.
|
||||
referencing_layer_path (str): ???
|
||||
asset_is_layer (bool): Whether this asset is a USD layer file.
|
||||
If this is False, the asset is something else (for example,
|
||||
a texture or volume file).
|
||||
for_save (bool): Whether the asset path is for a file to be saved
|
||||
out. If so, then return actual written filepath.
|
||||
|
||||
Returns:
|
||||
The refactored asset path.
|
||||
|
||||
"""
|
||||
|
||||
# Treat save paths as being relative to the output path.
|
||||
if for_save and self.staging_dir:
|
||||
# Whenever we're processing a Save Path make sure to
|
||||
# resolve it to the Staging Directory
|
||||
filename = os.path.basename(asset_path)
|
||||
return os.path.join(self.staging_dir, filename)
|
||||
|
||||
return asset_path
|
||||
|
||||
|
||||
output_processor = StagingDirOutputProcessor()
|
||||
def usdOutputProcessor():
|
||||
return output_processor
|
||||
|
||||
350
openpype/lib/usdlib.py
Normal file
350
openpype/lib/usdlib.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
try:
|
||||
from pxr import Usd, UsdGeom, Sdf, Kind
|
||||
except ImportError:
|
||||
# Allow to fall back on Multiverse 6.3.0+ pxr usd library
|
||||
from mvpxr import Usd, UsdGeom, Sdf, Kind
|
||||
|
||||
from avalon import io, api
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# The predefined steps order used for bootstrapping USD Shots and Assets.
|
||||
# These are ordered in order from strongest to weakest opinions, like in USD.
|
||||
PIPELINE = {
|
||||
"shot": [
|
||||
"usdLighting",
|
||||
"usdFx",
|
||||
"usdSimulation",
|
||||
"usdAnimation",
|
||||
"usdLayout",
|
||||
],
|
||||
"asset": ["usdShade", "usdModel"],
|
||||
}
|
||||
|
||||
|
||||
def create_asset(
|
||||
filepath, asset_name, reference_layers, kind=Kind.Tokens.component
|
||||
):
|
||||
"""
|
||||
Creates an asset file that consists of a top level layer and sublayers for
|
||||
shading and geometry.
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
reference_layers (list): USD Files to reference in the asset.
|
||||
Note that the bottom layer (first file, like a model) would
|
||||
be last in the list. The strongest layer will be the first
|
||||
index.
|
||||
asset_name (str): The name for the Asset identifier and default prim.
|
||||
kind (pxr.Kind): A USD Kind for the root asset.
|
||||
|
||||
"""
|
||||
# Also see create_asset.py in PixarAnimationStudios/USD endToEnd example
|
||||
|
||||
log.info("Creating asset at %s", filepath)
|
||||
|
||||
# Make the layer ascii - good for readability, plus the file is small
|
||||
root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
|
||||
# Define a prim for the asset and make it the default for the stage.
|
||||
asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim()
|
||||
stage.SetDefaultPrim(asset_prim)
|
||||
|
||||
# Let viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# Usually we will "loft up" the kind authored into the exported geometry
|
||||
# layer rather than re-stamping here; we'll leave that for a later
|
||||
# tutorial, and just be explicit here.
|
||||
model = Usd.ModelAPI(asset_prim)
|
||||
if kind:
|
||||
model.SetKind(kind)
|
||||
|
||||
model.SetAssetName(asset_name)
|
||||
model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name))
|
||||
|
||||
# Add references to the asset prim
|
||||
references = asset_prim.GetReferences()
|
||||
for reference_filepath in reference_layers:
|
||||
references.AddReference(reference_filepath)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shot(filepath, layers, create_layers=False):
|
||||
"""Create a shot with separate layers for departments.
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
layers (str): When provided this will be added verbatim in the
|
||||
subLayerPaths layers. When the provided layer paths do not exist
|
||||
they are generated using Sdf.Layer.CreateNew
|
||||
create_layers (bool): Whether to create the stub layers on disk if
|
||||
they do not exist yet.
|
||||
|
||||
Returns:
|
||||
str: The saved shot file path
|
||||
|
||||
"""
|
||||
# Also see create_shot.py in PixarAnimationStudios/USD endToEnd example
|
||||
|
||||
stage = Usd.Stage.CreateNew(filepath)
|
||||
log.info("Creating shot at %s" % filepath)
|
||||
|
||||
for layer_path in layers:
|
||||
if create_layers and not os.path.exists(layer_path):
|
||||
# We use the Sdf API here to quickly create layers. Also, we're
|
||||
# using it as a way to author the subLayerPaths as there is no
|
||||
# way to do that directly in the Usd API.
|
||||
layer_folder = os.path.dirname(layer_path)
|
||||
if not os.path.exists(layer_folder):
|
||||
os.makedirs(layer_folder)
|
||||
|
||||
Sdf.Layer.CreateNew(layer_path)
|
||||
|
||||
stage.GetRootLayer().subLayerPaths.append(layer_path)
|
||||
|
||||
# Lets viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
def create_model(filename, asset, variant_subsets):
|
||||
"""Create a USD Model file.
|
||||
|
||||
For each of the variation paths it will payload the path and set its
|
||||
relevant variation name.
|
||||
|
||||
"""
|
||||
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
for subset in variant_subsets:
|
||||
prefix = "usdModel"
|
||||
if subset.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = subset[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model subsets must start " "with usdModel: %s" % subset
|
||||
)
|
||||
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename,
|
||||
variants=variants,
|
||||
variantset="model",
|
||||
variant_prim="/root",
|
||||
reference_prim="/root/geo",
|
||||
as_payload=True,
|
||||
)
|
||||
|
||||
UsdGeom.SetStageMetersPerUnit(stage, 1)
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# modelAPI = Usd.ModelAPI(root_prim)
|
||||
# modelAPI.SetKind(Kind.Tokens.component)
|
||||
|
||||
# See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details
|
||||
# for more on assetInfo
|
||||
# modelAPI.SetAssetName(asset)
|
||||
# modelAPI.SetAssetIdentifier(asset)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shade(filename, asset, variant_subsets):
|
||||
"""Create a master USD shade file for an asset.
|
||||
|
||||
For each available model variation this should generate a reference
|
||||
to a `usdShade_{modelVariant}` subset.
|
||||
|
||||
"""
|
||||
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
|
||||
for subset in variant_subsets:
|
||||
prefix = "usdModel"
|
||||
if subset.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = subset[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model subsets must start " "with usdModel: %s" % subset
|
||||
)
|
||||
|
||||
shade_subset = re.sub("^usdModel", "usdShade", subset)
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=shade_subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="model", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shade_variation(filename, asset, model_variant, shade_variants):
|
||||
"""Create the master Shade file for a specific model variant.
|
||||
|
||||
This should reference all shade variants for the specific model variant.
|
||||
|
||||
"""
|
||||
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
for variant in shade_variants:
|
||||
subset = "usdShade_{model}_{shade}".format(
|
||||
model=model_variant, shade=variant
|
||||
)
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="shade", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def _create_variants_file(
|
||||
filename,
|
||||
variants,
|
||||
variantset,
|
||||
default_variant=None,
|
||||
variant_prim="/root",
|
||||
reference_prim=None,
|
||||
set_default_variant=True,
|
||||
as_payload=False,
|
||||
skip_variant_on_single_file=True,
|
||||
):
|
||||
|
||||
root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
|
||||
root_prim = stage.DefinePrim(variant_prim)
|
||||
stage.SetDefaultPrim(root_prim)
|
||||
|
||||
def _reference(path):
|
||||
"""Reference/Payload path depending on function arguments"""
|
||||
|
||||
if reference_prim:
|
||||
prim = stage.DefinePrim(reference_prim)
|
||||
else:
|
||||
prim = root_prim
|
||||
|
||||
if as_payload:
|
||||
# Payload
|
||||
prim.GetPayloads().AddPayload(Sdf.Payload(path))
|
||||
else:
|
||||
# Reference
|
||||
prim.GetReferences().AddReference(Sdf.Reference(path))
|
||||
|
||||
assert variants, "Must have variants, got: %s" % variants
|
||||
|
||||
log.info(filename)
|
||||
|
||||
if skip_variant_on_single_file and len(variants) == 1:
|
||||
# Reference directly, no variants
|
||||
variant_path = variants[0][1]
|
||||
_reference(variant_path)
|
||||
|
||||
log.info("Non-variants..")
|
||||
log.info("Path: %s" % variant_path)
|
||||
|
||||
else:
|
||||
# Variants
|
||||
append = Usd.ListPositionBackOfAppendList
|
||||
variant_set = root_prim.GetVariantSets().AddVariantSet(
|
||||
variantset, append
|
||||
)
|
||||
|
||||
for variant, variant_path in variants:
|
||||
|
||||
if default_variant is None:
|
||||
default_variant = variant
|
||||
|
||||
variant_set.AddVariant(variant, append)
|
||||
variant_set.SetVariantSelection(variant)
|
||||
with variant_set.GetVariantEditContext():
|
||||
_reference(variant_path)
|
||||
|
||||
log.info("Variants..")
|
||||
log.info("Variant: %s" % variant)
|
||||
log.info("Path: %s" % variant_path)
|
||||
|
||||
if set_default_variant:
|
||||
variant_set.SetVariantSelection(default_variant)
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def get_usd_master_path(asset, subset, representation):
|
||||
"""Get the filepath for a .usd file of a subset.
|
||||
|
||||
This will return the path to an unversioned master file generated by
|
||||
`usd_master_file.py`.
|
||||
|
||||
"""
|
||||
|
||||
project = io.find_one(
|
||||
{"type": "project"}, projection={"config.template.publish": True}
|
||||
)
|
||||
template = project["config"]["template"]["publish"]
|
||||
|
||||
if isinstance(asset, dict) and "silo" in asset and "name" in asset:
|
||||
# Allow explicitly passing asset document
|
||||
asset_doc = asset
|
||||
else:
|
||||
asset_doc = io.find_one({"name": asset, "type": "asset"})
|
||||
|
||||
path = template.format(
|
||||
**{
|
||||
"root": api.registered_root(),
|
||||
"project": api.Session["AVALON_PROJECT"],
|
||||
"silo": asset_doc["silo"],
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset,
|
||||
"representation": representation,
|
||||
"version": 0, # stub version zero
|
||||
}
|
||||
)
|
||||
|
||||
# Remove the version folder
|
||||
subset_folder = os.path.dirname(os.path.dirname(path))
|
||||
master_folder = os.path.join(subset_folder, "master")
|
||||
fname = "{0}.{1}".format(subset, representation)
|
||||
|
||||
return os.path.join(master_folder, fname).replace("\\", "/")
|
||||
|
||||
|
||||
def parse_avalon_uri(uri):
|
||||
# URI Pattern: avalon://{asset}/{subset}.{ext}
|
||||
pattern = r"avalon://(?P<asset>[^/.]*)/(?P<subset>[^/]*)\.(?P<ext>.*)"
|
||||
if uri.startswith("avalon://"):
|
||||
match = re.match(pattern, uri)
|
||||
if match:
|
||||
return match.groupdict()
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import hou
|
||||
|
||||
from avalon import api, io
|
||||
from avalon.vendor import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
|
||||
"""Submit Houdini scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
|
||||
Submission is done through the Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["*"]
|
||||
targets = ["deadline"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Ensure no errors so far
|
||||
assert all(
|
||||
result["success"] for result in context.data["results"]
|
||||
), "Errors found, aborting integration.."
|
||||
|
||||
# Deadline connection
|
||||
AVALON_DEADLINE = api.Session.get(
|
||||
"AVALON_DEADLINE", "http://localhost:8082"
|
||||
)
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
|
||||
# Note that `publish` data member might change in the future.
|
||||
# See: https://github.com/pyblish/pyblish-base/issues/307
|
||||
actives = [i for i in context if i.data["publish"]]
|
||||
instance_names = sorted(instance.name for instance in actives)
|
||||
|
||||
if not instance_names:
|
||||
self.log.warning(
|
||||
"No active instances found. " "Skipping submission.."
|
||||
)
|
||||
return
|
||||
|
||||
scene = context.data["currentFile"]
|
||||
scenename = os.path.basename(scene)
|
||||
|
||||
# Get project code
|
||||
project = io.find_one({"type": "project"})
|
||||
code = project["data"].get("code", project["name"])
|
||||
|
||||
job_name = "{scene} [PUBLISH]".format(scene=scenename)
|
||||
batch_name = "{code} - {scene}".format(code=code, scene=scenename)
|
||||
deadline_user = "roy" # todo: get deadline user dynamically
|
||||
|
||||
# Get only major.minor version of Houdini, ignore patch version
|
||||
version = hou.applicationVersionString()
|
||||
version = ".".join(version.split(".")[:2])
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "Houdini",
|
||||
"Pool": "houdini", # todo: remove hardcoded pool
|
||||
"BatchName": batch_name,
|
||||
"Comment": context.data.get("comment", ""),
|
||||
"Priority": 50,
|
||||
"Frames": "1-1", # Always trigger a single frame
|
||||
"IsFrameDependent": False,
|
||||
"Name": job_name,
|
||||
"UserName": deadline_user,
|
||||
# "Comment": instance.context.data.get("comment", ""),
|
||||
# "InitialStatus": state
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Build": None, # Don't force build
|
||||
"IgnoreInputs": True,
|
||||
# Inputs
|
||||
"SceneFile": scene,
|
||||
"OutputDriver": "/out/REMOTE_PUBLISH",
|
||||
# Mandatory for Deadline
|
||||
"Version": version,
|
||||
},
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": [],
|
||||
}
|
||||
|
||||
# Process submission per individual instance if the submission
|
||||
# is set to publish each instance as a separate job. Else submit
|
||||
# a single job to process all instances.
|
||||
per_instance = context.data.get("separateJobPerInstance", False)
|
||||
if per_instance:
|
||||
# Submit a job per instance
|
||||
job_name = payload["JobInfo"]["Name"]
|
||||
for instance in instance_names:
|
||||
# Clarify job name per submission (include instance name)
|
||||
payload["JobInfo"]["Name"] = job_name + " - %s" % instance
|
||||
self.submit_job(
|
||||
payload, instances=[instance], deadline=AVALON_DEADLINE
|
||||
)
|
||||
else:
|
||||
# Submit a single job
|
||||
self.submit_job(
|
||||
payload, instances=instance_names, deadline=AVALON_DEADLINE
|
||||
)
|
||||
|
||||
def submit_job(self, payload, instances, deadline):
|
||||
|
||||
# Ensure we operate on a copy, a shallow copy is fine.
|
||||
payload = payload.copy()
|
||||
|
||||
# Include critical environment variables with submission + api.Session
|
||||
keys = [
|
||||
# Submit along the current Avalon tool setup that we launched
|
||||
# this application with so the Render Slave can build its own
|
||||
# similar environment using it, e.g. "houdini17.5;pluginx2.3"
|
||||
"AVALON_TOOLS",
|
||||
]
|
||||
|
||||
environment = dict(
|
||||
{key: os.environ[key] for key in keys if key in os.environ},
|
||||
**api.Session
|
||||
)
|
||||
environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances)
|
||||
|
||||
payload["JobInfo"].update(
|
||||
{
|
||||
"EnvironmentKeyValue%d"
|
||||
% index: "{key}={value}".format(
|
||||
key=key, value=environment[key]
|
||||
)
|
||||
for index, key in enumerate(environment)
|
||||
}
|
||||
)
|
||||
|
||||
# Submit
|
||||
self.log.info("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
@ -0,0 +1,158 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit Solaris USD Render ROPs to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE.
|
||||
|
||||
Target "local":
|
||||
Even though this does *not* render locally this is seen as
|
||||
a 'local' submission as it is the regular way of submitting
|
||||
a Houdini render locally.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender",
|
||||
"redshift_rop"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
code = context.data["code"]
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
jobname = "%s - %s" % (filename, instance.name)
|
||||
|
||||
# Support code prefix label for batch name
|
||||
batch_name = filename
|
||||
if code:
|
||||
batch_name = "{0} - {1}".format(code, batch_name)
|
||||
|
||||
# Output driver to render
|
||||
driver = instance[0]
|
||||
|
||||
# StartFrame to EndFrame by byFrameStep
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["startFrame"]),
|
||||
end=int(instance.data["endFrame"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": batch_name,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
"Plugin": "Houdini",
|
||||
"Pool": "houdini_redshift", # todo: remove hardcoded pool
|
||||
"Frames": frames,
|
||||
|
||||
"ChunkSize": instance.data.get("chunkSize", 10),
|
||||
|
||||
"Comment": comment
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": filepath,
|
||||
"OutputDriver": driver.path(),
|
||||
|
||||
# Mandatory for Deadline
|
||||
# Houdini version without patch number
|
||||
"Version": hou.applicationVersionString().rsplit(".", 1)[0],
|
||||
|
||||
"IgnoreInputs": True
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical environment variables with submission + api.Session
|
||||
keys = [
|
||||
# Submit along the current Avalon tool setup that we launched
|
||||
# this application with so the Render Slave can build its own
|
||||
# similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9"
|
||||
"AVALON_TOOLS",
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
# Include OutputFilename entries
|
||||
# The first entry also enables double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
output_data = {}
|
||||
for i, filepath in enumerate(instance.data["files"]):
|
||||
dirname = os.path.dirname(filepath)
|
||||
fname = os.path.basename(filepath)
|
||||
output_data["OutputDirectory%d" % i] = dirname.replace("\\", "/")
|
||||
output_data["OutputFilename%d" % i] = fname
|
||||
|
||||
# For now ensure destination folder exists otherwise HUSK
|
||||
# will fail to render the output image. This is supposedly fixed
|
||||
# in new production builds of Houdini
|
||||
# TODO Remove this workaround with Houdini 18.0.391+
|
||||
if not os.path.exists(dirname):
|
||||
self.log.info("Ensuring output directory exists: %s" %
|
||||
dirname)
|
||||
os.makedirs(dirname)
|
||||
|
||||
payload["JobInfo"].update(output_data)
|
||||
|
||||
self.submit(instance, payload)
|
||||
|
||||
def submit(self, instance, payload):
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("Using Render Plugin : {}".format(plugin))
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
1
openpype/modules/ftrack/python2_vendor/arrow
Submodule
1
openpype/modules/ftrack/python2_vendor/arrow
Submodule
|
|
@ -0,0 +1 @@
|
|||
Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0
|
||||
|
|
@ -0,0 +1 @@
|
|||
Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e
|
||||
|
|
@ -1,4 +1,46 @@
|
|||
{
|
||||
"create": {
|
||||
"CreateAlembicCamera": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateCompositeSequence": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreatePointCache": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateRedshiftROP": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateRemotePublish": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateVDBCache": {
|
||||
"enabled": true,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateUSD": {
|
||||
"enabled": false,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateUSDModel": {
|
||||
"enabled": false,
|
||||
"defaults": []
|
||||
},
|
||||
"USDCreateShadingWorkspace": {
|
||||
"enabled": false,
|
||||
"defaults": []
|
||||
},
|
||||
"CreateUSDRender": {
|
||||
"enabled": false,
|
||||
"defaults": []
|
||||
}
|
||||
},
|
||||
"publish": {
|
||||
"ValidateContainers": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@
|
|||
"label": "Houdini",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_houdini_create"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "create",
|
||||
"label": "Creator plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_create_plugin",
|
||||
"template_data": [
|
||||
{
|
||||
"key": "CreateAlembicCamera",
|
||||
"label": "Create Alembic Camera"
|
||||
},
|
||||
{
|
||||
"key": "CreateCompositeSequence",
|
||||
"label": "Create Composite (Image Sequence)"
|
||||
},
|
||||
{
|
||||
"key": "CreatePointCache",
|
||||
"label": "Create Point Cache"
|
||||
},
|
||||
{
|
||||
"key": "CreateRedshiftROP",
|
||||
"label": "Create Redshift ROP"
|
||||
},
|
||||
{
|
||||
"key": "CreateRemotePublish",
|
||||
"label": "Create Remote Publish"
|
||||
},
|
||||
{
|
||||
"key": "CreateVDBCache",
|
||||
"label": "Create VDB Cache"
|
||||
},
|
||||
{
|
||||
"key": "CreateUSD",
|
||||
"label": "Create USD"
|
||||
},
|
||||
{
|
||||
"key": "CreateUSDModel",
|
||||
"label": "Create USD Model"
|
||||
},
|
||||
{
|
||||
"key": "USDCreateShadingWorkspace",
|
||||
"label": "Create USD Shading Workspace"
|
||||
},
|
||||
{
|
||||
"key": "CreateUSDRender",
|
||||
"label": "Create USD Render"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1 +1 @@
|
|||
Subproject commit 82d5b8137eea3b49d4781a4af51d7f375bb9f628
|
||||
Subproject commit 52e24a9993e5223b0a719786e77a4b87e936e556
|
||||
78
website/docs/artist_hosts_houdini.md
Normal file
78
website/docs/artist_hosts_houdini.md
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
---
|
||||
id: artist_hosts_houdini
|
||||
title: Houdini
|
||||
sidebar_label: Houdini
|
||||
---
|
||||
|
||||
## OpenPype global tools
|
||||
|
||||
- [Work Files](artist_tools.md#workfiles)
|
||||
- [Create](artist_tools.md#creator)
|
||||
- [Load](artist_tools.md#loader)
|
||||
- [Manage (Inventory)](artist_tools.md#inventory)
|
||||
- [Publish](artist_tools.md#publisher)
|
||||
- [Library Loader](artist_tools.md#library-loader)
|
||||
|
||||
## Publishing Alembic Cameras
|
||||
You can publish baked camera in Alembic format. Select your camera and go **OpenPype -> Create** and select **Camera (abc)**.
|
||||
This will create Alembic ROP in **out** with path and frame range already set. This node will have a name you've
|
||||
assigned in the **Creator** menu. For example if you name the subset `Default`, output Alembic Driver will be named
|
||||
`cameraDefault`. After that, you can **OpenPype -> Publish** and after some validations your camera will be published
|
||||
to `abc` file.
|
||||
|
||||
## Publishing Composites - Image Sequences
|
||||
You can publish image sequence directly from Houdini. You can use any `cop` network you have and publish image
|
||||
sequence generated from it. For example I've created simple **cop** graph to generate some noise:
|
||||

|
||||
|
||||
If I want to publish it, I'll select node I like - in this case `radialblur1` and go **OpenPype -> Create** and
|
||||
select **Composite (Image Sequence)**. This will create `/out/imagesequenceNoise` Composite ROP (I've named my subset
|
||||
*Noise*) with frame range set. When you hit **Publish** it will render image sequence from selected node.
|
||||
|
||||
## Publishing Point Caches (alembic)
|
||||
Publishing point caches in alembic format is pretty straightforward, but it is by default enforcing better compatibility
|
||||
with other DCCs, so it needs data do be exported prepared in certain way. You need to add `path` attribute so objects
|
||||
in alembic are better structured. When using alembic round trip in Houdini (loading alembics, modifying then and
|
||||
then publishing modifications), `path` is automatically resolved by alembic nodes.
|
||||
|
||||
In this example, I've created this node graph on **sop** level, and I want to publish it as point cache.
|
||||
|
||||

|
||||
|
||||
*Note: `connectivity` will add index for each primitive and `primitivewrangle1` will add `path` attribute, so it will
|
||||
be for each primitive (`sphere1` and `sphere2`) as Maya is expecting - `strange_GRP/strange0_GEO/strange0_GEOShape`. How
|
||||
you handle `path` attribute is up to you, this is just an example.*
|
||||
|
||||
Now select the `output0` node and go **OpenPype -> Create** and select **Point Cache**. It will create
|
||||
Alembic ROP `/out/pointcacheStrange`
|
||||
|
||||
|
||||
## Redshift
|
||||
:::note Work in progress
|
||||
This part of documentation is still work in progress.
|
||||
:::
|
||||
|
||||
## USD (experimental support)
|
||||
### Publishing USD
|
||||
You can publish your Solaris Stage as USD file.
|
||||

|
||||
|
||||
This is very simple test stage. I've selected `output` **lop** node and went to **OpenPype -> Create** where I've
|
||||
selected **USD**. This created `/out/usdDefault` USD ROP node.
|
||||
|
||||
### Publishing USD render
|
||||
|
||||
USD Render works in similar manner as USD file, except it will create **USD Render** ROP node in out and will publish
|
||||
images produced by it. If you have selected node in Solaris Stage it will by added as **lop path** to ROP.
|
||||
|
||||
## Publishing VDB
|
||||
|
||||
Publishing VDB files works as with other data types. In this example I've created simple PyroFX explosion from
|
||||
sphere. In `pyro_import` I've converted the volume to VDB:
|
||||
|
||||

|
||||
|
||||
I've selected `vdb1` and went **OpenPype -> Create** and selected **VDB Cache**. This will create
|
||||
geometry ROP in `/out` and sets its paths to output vdb files. During the publishing process
|
||||
whole dops are cooked.
|
||||
|
||||
BIN
website/docs/assets/houdini_imagesequence_cop.png
Normal file
BIN
website/docs/assets/houdini_imagesequence_cop.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
BIN
website/docs/assets/houdini_pointcache_path.png
Normal file
BIN
website/docs/assets/houdini_pointcache_path.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 79 KiB |
BIN
website/docs/assets/houdini_usd_stage.png
Normal file
BIN
website/docs/assets/houdini_usd_stage.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 744 KiB |
BIN
website/docs/assets/houdini_vdb_setup.png
Normal file
BIN
website/docs/assets/houdini_vdb_setup.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 28 KiB |
|
|
@ -22,6 +22,7 @@ module.exports = {
|
|||
"artist_hosts_maya",
|
||||
"artist_hosts_blender",
|
||||
"artist_hosts_harmony",
|
||||
"artist_hosts_houdini",
|
||||
"artist_hosts_aftereffects",
|
||||
"artist_hosts_resolve",
|
||||
"artist_hosts_photoshop",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue