removed houdini addon

This commit is contained in:
Jakub Trllo 2024-07-02 16:41:45 +02:00
parent 2e732629ed
commit cc13a565c1
165 changed files with 0 additions and 17002 deletions

View file

@ -1,13 +0,0 @@
from .version import __version__
from .addon import (
HoudiniAddon,
HOUDINI_HOST_DIR,
)
__all__ = (
"__version__",
"HoudiniAddon",
"HOUDINI_HOST_DIR",
)

View file

@ -1,54 +0,0 @@
import os
from ayon_core.addon import AYONAddon, IHostAddon
from .version import __version__
HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
class HoudiniAddon(AYONAddon, IHostAddon):
name = "houdini"
version = __version__
host_name = "houdini"
def add_implementation_envs(self, env, _app):
# Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH
startup_path = os.path.join(HOUDINI_HOST_DIR, "startup")
new_houdini_path = [startup_path]
new_houdini_menu_path = [startup_path]
old_houdini_path = env.get("HOUDINI_PATH") or ""
old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or ""
for path in old_houdini_path.split(os.pathsep):
if not path:
continue
norm_path = os.path.normpath(path)
if norm_path not in new_houdini_path:
new_houdini_path.append(norm_path)
for path in old_houdini_menu_path.split(os.pathsep):
if not path:
continue
norm_path = os.path.normpath(path)
if norm_path not in new_houdini_menu_path:
new_houdini_menu_path.append(norm_path)
# Add ampersand for unknown reason (Maybe is needed in Houdini?)
new_houdini_path.append("&")
new_houdini_menu_path.append("&")
env["HOUDINI_PATH"] = os.pathsep.join(new_houdini_path)
env["HOUDINI_MENU_PATH"] = os.pathsep.join(new_houdini_menu_path)
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(HOUDINI_HOST_DIR, "hooks")
]
def get_workfile_extensions(self):
return [".hip", ".hiplc", ".hipnc"]

View file

@ -1,28 +0,0 @@
from .pipeline import (
HoudiniHost,
ls,
containerise
)
from .lib import (
lsattr,
lsattrs,
read,
maintained_selection
)
__all__ = [
"HoudiniHost",
"ls",
"containerise",
# Utility functions
"lsattr",
"lsattrs",
"read",
"maintained_selection"
]

View file

@ -1,83 +0,0 @@
import pyblish.api
import hou
from ayon_core.pipeline.publish import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid nodes in Maya when plug-in failed.
To retrieve the invalid nodes this assumes a static `get_invalid()`
method is available on the plugin.
"""
label = "Select invalid"
on = "failed" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context,
plugin=plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
invalid = list()
for instance in errored_instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
if isinstance(invalid_nodes, (list, tuple)):
invalid.extend(invalid_nodes)
else:
self.log.warning("Plug-in returned to be invalid, "
"but has no selectable nodes.")
hou.clearAllSelected()
if invalid:
self.log.info("Selecting invalid nodes: {}".format(
", ".join(node.path() for node in invalid)
))
for node in invalid:
node.setSelected(True)
node.setCurrent(True)
else:
self.log.info("No invalid nodes found.")
class SelectROPAction(pyblish.api.Action):
"""Select ROP.
It's used to select the associated ROPs with the errored instances.
"""
label = "Select ROP"
on = "failed" # This action is only available on a failed plug-in
icon = "mdi.cursor-default-click"
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding ROP nodes..")
rop_nodes = list()
for instance in errored_instances:
node_path = instance.data.get("instance_node")
if not node_path:
continue
node = hou.node(node_path)
if not node:
continue
rop_nodes.append(node)
hou.clearAllSelected()
if rop_nodes:
self.log.info("Selecting ROP nodes: {}".format(
", ".join(node.path() for node in rop_nodes)
))
for node in rop_nodes:
node.setSelected(True)
node.setCurrent(True)
else:
self.log.info("No ROP nodes found.")

View file

@ -1,69 +0,0 @@
import attr
import hou
from ayon_houdini.api.lib import get_color_management_preferences
from ayon_core.pipeline.colorspace import get_display_view_colorspace_name
@attr.s
class LayerMetadata(object):
"""Data class for Render Layer metadata."""
frameStart = attr.ib()
frameEnd = attr.ib()
@attr.s
class RenderProduct(object):
"""Getting Colorspace as
Specific Render Product Parameter for submitting
publish job.
"""
colorspace = attr.ib() # colorspace
view = attr.ib()
productName = attr.ib(default=None)
class ARenderProduct(object):
def __init__(self):
"""Constructor."""
# Initialize
self.layer_data = self._get_layer_data()
self.layer_data.products = self.get_colorspace_data()
def _get_layer_data(self):
return LayerMetadata(
frameStart=int(hou.playbar.frameRange()[0]),
frameEnd=int(hou.playbar.frameRange()[1]),
)
def get_colorspace_data(self):
"""To be implemented by renderer class.
This should return a list of RenderProducts.
Returns:
list: List of RenderProduct
"""
data = get_color_management_preferences()
colorspace_data = [
RenderProduct(
colorspace=data["display"],
view=data["view"],
productName=""
)
]
return colorspace_data
def get_default_display_view_colorspace():
"""Returns the colorspace attribute of the default (display, view) pair.
It's used for 'ociocolorspace' parm in OpenGL Node."""
prefs = get_color_management_preferences()
return get_display_view_colorspace_name(
config_path=prefs["config"],
display=prefs["display"],
view=prefs["view"]
)

View file

@ -1,244 +0,0 @@
"""Library to register OpenPype Creators for Houdini TAB node search menu.
This can be used to install custom houdini tools for the TAB search
menu which will trigger a publish instance to be created interactively.
The Creators are automatically registered on launch of Houdini through the
Houdini integration's `host.install()` method.
"""
import contextlib
import tempfile
import logging
import os
import ayon_api
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.create import CreateContext
from ayon_core.resources import get_ayon_icon_filepath
import hou
import stateutils
import soptoolutils
import loptoolutils
import cop2toolutils
log = logging.getLogger(__name__)
CATEGORY_GENERIC_TOOL = {
hou.sopNodeTypeCategory(): soptoolutils.genericTool,
hou.cop2NodeTypeCategory(): cop2toolutils.genericTool,
hou.lopNodeTypeCategory(): loptoolutils.genericTool
}
CREATE_SCRIPT = """
from ayon_houdini.api.creator_node_shelves import create_interactive
create_interactive("{identifier}", **kwargs)
"""
def create_interactive(creator_identifier, **kwargs):
"""Create a Creator using its identifier interactively.
This is used by the generated shelf tools as callback when a user selects
the creator from the node tab search menu.
The `kwargs` should be what Houdini passes to the tool create scripts
context. For more information see:
https://www.sidefx.com/docs/houdini/hom/tool_script.html#arguments
Args:
creator_identifier (str): The creator identifier of the Creator plugin
to create.
Return:
list: The created instances.
"""
host = registered_host()
context = CreateContext(host)
creator = context.manual_creators.get(creator_identifier)
if not creator:
raise RuntimeError("Invalid creator identifier: {}".format(
creator_identifier)
)
# TODO Use Qt instead
result, variant = hou.ui.readInput(
"Define variant name",
buttons=("Ok", "Cancel"),
initial_contents=creator.get_default_variant(),
title="Define variant",
help="Set the variant for the publish instance",
close_choice=1
)
if result == 1:
# User interrupted
return
variant = variant.strip()
if not variant:
raise RuntimeError("Empty variant value entered.")
# TODO: Once more elaborate unique create behavior should exist per Creator
# instead of per network editor area then we should move this from here
# to a method on the Creators for which this could be the default
# implementation.
pane = stateutils.activePane(kwargs)
if isinstance(pane, hou.NetworkEditor):
pwd = pane.pwd()
project_name = context.get_current_project_name()
folder_path = context.get_current_folder_path()
task_name = context.get_current_task_name()
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
product_name = creator.get_product_name(
project_name=context.get_current_project_name(),
folder_entity=folder_entity,
task_entity=task_entity,
variant=variant,
host_name=context.host_name,
)
tool_fn = CATEGORY_GENERIC_TOOL.get(pwd.childTypeCategory())
if tool_fn is not None:
out_null = tool_fn(kwargs, "null")
out_null.setName("OUT_{}".format(product_name), unique_name=True)
before = context.instances_by_id.copy()
# Create the instance
context.create(
creator_identifier=creator_identifier,
variant=variant,
pre_create_data={"use_selection": True}
)
# For convenience we set the new node as current since that's much more
# familiar to the artist when creating a node interactively
# TODO Allow to disable auto-select in studio settings or user preferences
after = context.instances_by_id
new = set(after) - set(before)
if new:
# Select the new instance
for instance_id in new:
instance = after[instance_id]
node = hou.node(instance.get("instance_node"))
node.setCurrent(True)
return list(new)
@contextlib.contextmanager
def shelves_change_block():
"""Write shelf changes at the end of the context."""
hou.shelves.beginChangeBlock()
try:
yield
finally:
hou.shelves.endChangeBlock()
def install():
"""Install the Creator plug-ins to show in Houdini's TAB node search menu.
This function is re-entrant and can be called again to reinstall and
update the node definitions. For example during development it can be
useful to call it manually:
>>> from ayon_houdini.api.creator_node_shelves import install
>>> install()
Returns:
list: List of `hou.Tool` instances
"""
host = registered_host()
# Store the filepath on the host
# TODO: Define a less hacky static shelf path for current houdini session
filepath_attr = "_creator_node_shelf_filepath"
filepath = getattr(host, filepath_attr, None)
if filepath is None:
f = tempfile.NamedTemporaryFile(prefix="houdini_creator_nodes_",
suffix=".shelf",
delete=False)
f.close()
filepath = f.name
setattr(host, filepath_attr, filepath)
elif os.path.exists(filepath):
# Remove any existing shelf file so that we can completey regenerate
# and update the tools file if creator identifiers change
os.remove(filepath)
icon = get_ayon_icon_filepath()
tab_menu_label = os.environ.get("AYON_MENU_LABEL") or "AYON"
# Create context only to get creator plugins, so we don't reset and only
# populate what we need to retrieve the list of creator plugins
create_context = CreateContext(host, reset=False)
create_context.reset_current_context()
create_context._reset_creator_plugins()
log.debug("Writing OpenPype Creator nodes to shelf: {}".format(filepath))
tools = []
with shelves_change_block():
for identifier, creator in create_context.manual_creators.items():
# Allow the creator plug-in itself to override the categories
# for where they are shown with `Creator.get_network_categories()`
if not hasattr(creator, "get_network_categories"):
log.debug("Creator {} has no `get_network_categories` method "
"and will not be added to TAB search.")
continue
network_categories = creator.get_network_categories()
if not network_categories:
continue
key = "ayon_create.{}".format(identifier)
log.debug(f"Registering {key}")
script = CREATE_SCRIPT.format(identifier=identifier)
data = {
"script": script,
"language": hou.scriptLanguage.Python,
"icon": icon,
"help": "Create Ayon publish instance for {}".format(
creator.label
),
"help_url": None,
"network_categories": network_categories,
"viewer_categories": [],
"cop_viewer_categories": [],
"network_op_type": None,
"viewer_op_type": None,
"locations": [tab_menu_label]
}
label = "Create {}".format(creator.label)
tool = hou.shelves.tool(key)
if tool:
tool.setData(**data)
tool.setLabel(label)
else:
tool = hou.shelves.newTool(
file_path=filepath,
name=key,
label=label,
**data
)
tools.append(tool)
# Ensure the shelf is reloaded
hou.shelves.loadFile(filepath)
return tools

View file

@ -1,593 +0,0 @@
"""Helper functions for load HDA"""
import os
import contextlib
import uuid
from typing import List
import ayon_api
from ayon_api import (
get_project,
get_representation_by_id,
get_versions,
get_folder_by_path,
get_product_by_name,
get_version_by_name,
get_representation_by_name
)
from ayon_core.pipeline.load import (
get_representation_context,
get_representation_path_from_context
)
from ayon_core.pipeline.context_tools import (
get_current_project_name,
get_current_folder_path
)
from ayon_core.tools.utils import SimpleFoldersWidget
from ayon_core.style import load_stylesheet
from ayon_houdini.api import lib
from qtpy import QtCore, QtWidgets
import hou
def is_valid_uuid(value) -> bool:
"""Return whether value is a valid UUID"""
try:
uuid.UUID(value)
except ValueError:
return False
return True
@contextlib.contextmanager
def _unlocked_parm(parm):
"""Unlock parm during context; will always lock after"""
try:
parm.lock(False)
yield
finally:
parm.lock(True)
def get_available_versions(node):
"""Return the versions list for node.
The versions are sorted with the latest version first and oldest lower
version last.
Args:
node (hou.Node): Node to query selected products' versions for.
Returns:
list[int]: Version numbers for the product
"""
project_name = node.evalParm("project_name") or get_current_project_name()
folder_path = node.evalParm("folder_path")
product_name = node.evalParm("product_name")
if not all([
project_name, folder_path, product_name
]):
return []
folder_entity = get_folder_by_path(
project_name,
folder_path,
fields={"id"})
if not folder_entity:
return []
product_entity = get_product_by_name(
project_name,
product_name=product_name,
folder_id=folder_entity["id"],
fields={"id"})
if not product_entity:
return []
# TODO: Support hero versions
versions = get_versions(
project_name,
product_ids={product_entity["id"]},
fields={"version"},
hero=False)
version_names = [version["version"] for version in versions]
version_names.reverse()
return version_names
def update_info(node, context):
"""Update project, folder, product, version, representation name parms.
Arguments:
node (hou.Node): Node to update
context (dict): Context of representation
"""
# TODO: Avoid 'duplicate' taking over the expression if originally
# it was $OS and by duplicating, e.g. the `folder` does not exist
# anymore since it is now `hero1` instead of `hero`
# TODO: Support hero versions
version = str(context["version"]["version"])
# We only set the values if the value does not match the currently
# evaluated result of the other parms, so that if the project name
# value was dynamically set by the user with an expression or alike
# then if it still matches the value of the current representation id
# we preserve it. In essence, only update the value if the current
# *evaluated* value of the parm differs.
parms = {
"project_name": context["project"]["name"],
"folder_path": context["folder"]["path"],
"product_name": context["product"]["name"],
"version": version,
"representation_name": context["representation"]["name"],
}
parms = {key: value for key, value in parms.items()
if node.evalParm(key) != value}
parms["load_message"] = "" # clear any warnings/errors
# Note that these never trigger any parm callbacks since we do not
# trigger the `parm.pressButton` and programmatically setting values
# in Houdini does not trigger callbacks automatically
node.setParms(parms)
def _get_thumbnail(project_name: str, version_id: str, thumbnail_dir: str):
folder = hou.text.expandString(thumbnail_dir)
path = os.path.join(folder, "{}_thumbnail.jpg".format(version_id))
expanded_path = hou.text.expandString(path)
if os.path.isfile(expanded_path):
return path
# Try and create a thumbnail cache file
data = ayon_api.get_thumbnail(project_name,
entity_type="version",
entity_id=version_id)
if data:
thumbnail_dir_expanded = hou.text.expandString(thumbnail_dir)
os.makedirs(thumbnail_dir_expanded, exist_ok=True)
with open(expanded_path, "wb") as f:
f.write(data.content)
return path
def set_representation(node, representation_id: str):
file_parm = node.parm("file")
if not representation_id:
# Clear filepath and thumbnail
with _unlocked_parm(file_parm):
file_parm.set("")
set_node_thumbnail(node, None)
return
project_name = (
node.evalParm("project_name")
or get_current_project_name()
)
# Ignore invalid representation ids silently
# TODO remove - added for backwards compatibility with OpenPype scenes
if not is_valid_uuid(representation_id):
return
repre_entity = get_representation_by_id(project_name, representation_id)
if not repre_entity:
return
context = get_representation_context(project_name, repre_entity)
update_info(node, context)
path = get_representation_path_from_context(context)
# Load fails on UNC paths with backslashes and also
# fails to resolve @sourcename var with backslashed
# paths correctly. So we force forward slashes
path = path.replace("\\", "/")
with _unlocked_parm(file_parm):
file_parm.set(path)
if node.evalParm("show_thumbnail"):
# Update thumbnail
# TODO: Cache thumbnail path as well
version_id = repre_entity["versionId"]
thumbnail_dir = node.evalParm("thumbnail_cache_dir")
thumbnail_path = _get_thumbnail(
project_name, version_id, thumbnail_dir
)
set_node_thumbnail(node, thumbnail_path)
def set_node_thumbnail(node, thumbnail: str):
"""Update node thumbnail to thumbnail"""
if thumbnail is None:
lib.set_node_thumbnail(node, None)
rect = compute_thumbnail_rect(node)
lib.set_node_thumbnail(node, thumbnail, rect)
def compute_thumbnail_rect(node):
"""Compute thumbnail bounding rect based on thumbnail parms"""
offset_x = node.evalParm("thumbnail_offsetx")
offset_y = node.evalParm("thumbnail_offsety")
width = node.evalParm("thumbnail_size")
# todo: compute height from aspect of actual image file.
aspect = 0.5625 # for now assume 16:9
height = width * aspect
center = 0.5
half_width = (width * .5)
return hou.BoundingRect(
offset_x + center - half_width,
offset_y,
offset_x + center + half_width,
offset_y + height
)
def on_thumbnail_show_changed(node):
"""Callback on thumbnail show parm changed"""
if node.evalParm("show_thumbnail"):
# For now, update all
on_representation_id_changed(node)
else:
lib.remove_all_thumbnails(node)
def on_thumbnail_size_changed(node):
"""Callback on thumbnail offset or size parms changed"""
thumbnail = lib.get_node_thumbnail(node)
if thumbnail:
rect = compute_thumbnail_rect(node)
thumbnail.setRect(rect)
lib.set_node_thumbnail(node, thumbnail)
def on_representation_id_changed(node):
"""Callback on representation id changed
Args:
node (hou.Node): Node to update.
"""
repre_id = node.evalParm("representation")
set_representation(node, repre_id)
def on_representation_parms_changed(node):
"""
Usually used as callback to the project, folder, product, version and
representation parms which on change - would result in a different
representation id to be resolved.
Args:
node (hou.Node): Node to update.
"""
project_name = node.evalParm("project_name") or get_current_project_name()
representation_id = get_representation_id(
project_name=project_name,
folder_path=node.evalParm("folder_path"),
product_name=node.evalParm("product_name"),
version=node.evalParm("version"),
representation_name=node.evalParm("representation_name"),
load_message_parm=node.parm("load_message")
)
if representation_id is None:
representation_id = ""
else:
representation_id = str(representation_id)
if node.evalParm("representation") != representation_id:
node.parm("representation").set(representation_id)
node.parm("representation").pressButton() # trigger callback
def get_representation_id(
project_name,
folder_path,
product_name,
version,
representation_name,
load_message_parm,
):
"""Get representation id.
Args:
project_name (str): Project name
folder_path (str): Folder name
product_name (str): Product name
version (str): Version name as string
representation_name (str): Representation name
load_message_parm (hou.Parm): A string message parm to report
any error messages to.
Returns:
Optional[str]: Representation id or None if not found.
"""
if not all([
project_name, folder_path, product_name, version, representation_name
]):
labels = {
"project": project_name,
"folder": folder_path,
"product": product_name,
"version": version,
"representation": representation_name
}
missing = ", ".join(key for key, value in labels.items() if not value)
load_message_parm.set(f"Load info incomplete. Found empty: {missing}")
return
try:
version = int(version.strip())
except ValueError:
load_message_parm.set(f"Invalid version format: '{version}'\n"
"Make sure to set a valid version number.")
return
folder_entity = get_folder_by_path(project_name,
folder_path=folder_path,
fields={"id"})
if not folder_entity:
# This may be due to the project not existing - so let's validate
# that first
if not get_project(project_name):
load_message_parm.set(f"Project not found: '{project_name}'")
return
load_message_parm.set(f"Folder not found: '{folder_path}'")
return
product_entity = get_product_by_name(
project_name,
product_name=product_name,
folder_id=folder_entity["id"],
fields={"id"})
if not product_entity:
load_message_parm.set(f"Product not found: '{product_name}'")
return
version_entity = get_version_by_name(
project_name,
version,
product_id=product_entity["id"],
fields={"id"})
if not version_entity:
load_message_parm.set(f"Version not found: '{version}'")
return
representation_entity = get_representation_by_name(
project_name,
representation_name,
version_id=version_entity["id"],
fields={"id"})
if not representation_entity:
load_message_parm.set(
f"Representation not found: '{representation_name}'.")
return
return representation_entity["id"]
def setup_flag_changed_callback(node):
"""Register flag changed callback (for thumbnail brightness)"""
node.addEventCallback(
(hou.nodeEventType.FlagChanged,),
on_flag_changed
)
def on_flag_changed(node, **kwargs):
"""On node flag changed callback.
Updates the brightness of attached thumbnails
"""
# Showing thumbnail is disabled so can return early since
# there should be no thumbnail to update.
if not node.evalParm('show_thumbnail'):
return
# Update node thumbnails brightness with the
# bypass state of the node.
parent = node.parent()
images = lib.get_background_images(parent)
if not images:
return
brightness = 0.3 if node.isBypassed() else 1.0
has_changes = False
node_path = node.path()
for image in images:
if image.relativeToPath() == node_path:
image.setBrightness(brightness)
has_changes = True
if has_changes:
lib.set_background_images(parent, images)
def keep_background_images_linked(node, old_name):
"""Reconnect background images to node from old name.
Used as callback on node name changes to keep thumbnails linked."""
from ayon_houdini.api.lib import (
get_background_images,
set_background_images
)
parent = node.parent()
images = get_background_images(parent)
if not images:
return
changes = False
old_path = f"{node.parent().path()}/{old_name}"
for image in images:
if image.relativeToPath() == old_path:
image.setRelativeToPath(node.path())
changes = True
if changes:
set_background_images(parent, images)
class SelectFolderPathDialog(QtWidgets.QDialog):
"""Simple dialog to allow a user to select project and asset."""
def __init__(self, parent=None):
super(SelectFolderPathDialog, self).__init__(parent)
self.setWindowTitle("Set project and folder path")
self.setStyleSheet(load_stylesheet())
project_widget = QtWidgets.QComboBox()
project_widget.addItems(self.get_projects())
filter_widget = QtWidgets.QLineEdit()
filter_widget.setPlaceholderText("Folder name filter...")
folder_widget = SimpleFoldersWidget(parent=self)
accept_button = QtWidgets.QPushButton("Accept")
main_layout = QtWidgets.QVBoxLayout(self)
main_layout.addWidget(project_widget, 0)
main_layout.addWidget(filter_widget, 0)
main_layout.addWidget(folder_widget, 1)
main_layout.addWidget(accept_button, 0)
self.project_widget = project_widget
self.folder_widget = folder_widget
project_widget.currentTextChanged.connect(self.on_project_changed)
filter_widget.textChanged.connect(folder_widget.set_name_filter)
folder_widget.double_clicked.connect(self.accept)
accept_button.clicked.connect(self.accept)
def get_selected_folder_path(self) -> str:
return self.folder_widget.get_selected_folder_path()
def get_selected_project_name(self) -> str:
return self.project_widget.currentText()
def get_projects(self) -> List[str]:
projects = ayon_api.get_projects(fields=["name"])
return [p["name"] for p in projects]
def on_project_changed(self, project_name: str):
self.folder_widget.set_project_name(project_name)
def set_project_name(self, project_name: str):
self.project_widget.setCurrentText(project_name)
if self.project_widget.currentText() != project_name:
# Project does not exist
return
# Force the set of widget because even though a callback exist on the
# project widget it may have been initialized to that value and hence
# detect no change.
self.folder_widget.set_project_name(project_name)
def select_folder_path(node):
"""Show dialog to select folder path.
When triggered it opens a dialog that shows the available
folder paths within a given project.
Note:
This function should be refactored.
It currently shows the available
folder paths within the current project only.
Args:
node (hou.OpNode): The HDA node.
"""
main_window = lib.get_main_window()
project_name = node.evalParm("project_name")
folder_path = node.evalParm("folder_path")
dialog = SelectFolderPathDialog(parent=main_window)
dialog.set_project_name(project_name)
if folder_path:
# We add a small delay to the setting of the selected folder
# because the folder widget's set project logic itself also runs
# with a bit of a delay, and unfortunately otherwise the project
# has not been selected yet and thus selection does not work.
def _select_folder_path():
dialog.folder_widget.set_selected_folder_path(folder_path)
QtCore.QTimer.singleShot(100, _select_folder_path)
dialog.setStyleSheet(load_stylesheet())
result = dialog.exec_()
if result != QtWidgets.QDialog.Accepted:
return
# Set project
selected_project_name = dialog.get_selected_project_name()
if selected_project_name == get_current_project_name():
selected_project_name = '$AYON_PROJECT_NAME'
project_parm = node.parm("project_name")
project_parm.set(selected_project_name)
project_parm.pressButton() # allow any callbacks to trigger
# Set folder path
selected_folder_path = dialog.get_selected_folder_path()
if not selected_folder_path:
# Do nothing if user accepted with nothing selected
return
if selected_folder_path == get_current_folder_path():
selected_folder_path = '$AYON_FOLDER_PATH'
folder_parm = node.parm("folder_path")
folder_parm.set(selected_folder_path)
folder_parm.pressButton() # allow any callbacks to trigger
def get_available_products(node):
"""Return products menu items
It gets a list of available products of the specified product types
within the specified folder path with in the specified project.
Users can specify those in the HDA parameters.
Args:
node (hou.OpNode): The HDA node.
Returns:
list[str]: Product names for Products menu.
"""
project_name = node.evalParm("project_name")
folder_path = node.evalParm("folder_path")
product_type = node.evalParm("product_type")
folder_entity = ayon_api.get_folder_by_path(project_name,
folder_path,
fields={"id"})
if not folder_entity:
return []
products = ayon_api.get_products(
project_name,
folder_ids=[folder_entity["id"]],
product_types=[product_type]
)
return [product["name"] for product in products]
def set_to_latest_version(node):
"""Callback on product name change
Refresh version parameter value by setting its value to
the latest version of the selected product.
Args:
node (hou.OpNode): The HDA node.
"""
versions = get_available_versions(node)
if versions:
node.parm("version").set(str(versions[0]))

File diff suppressed because it is too large Load diff

View file

@ -1,449 +0,0 @@
# -*- coding: utf-8 -*-
"""Pipeline tools for OpenPype Houdini integration."""
import os
import json
import logging
import hou # noqa
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
from ayon_core.tools.utils import host_tools
import pyblish.api
from ayon_core.pipeline import (
register_creator_plugin_path,
register_loader_plugin_path,
register_inventory_action_path,
AVALON_CONTAINER_ID,
AYON_CONTAINER_ID,
)
from ayon_core.pipeline.load import any_outdated_containers
from ayon_houdini import HOUDINI_HOST_DIR
from ayon_houdini.api import lib, shelves, creator_node_shelves
from ayon_core.lib import (
register_event_callback,
emit_event,
env_value_to_bool,
)
from .lib import JSON_PREFIX
log = logging.getLogger("ayon_houdini")
AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS"
CONTEXT_CONTAINER = "/obj/OpenPypeContext"
IS_HEADLESS = not hasattr(hou, "ui")
PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
# Track whether the workfile tool is about to save
_about_to_save = False
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
name = "houdini"
def __init__(self):
super(HoudiniHost, self).__init__()
self._op_events = {}
self._has_been_setup = False
def install(self):
pyblish.api.register_host("houdini")
pyblish.api.register_host("hython")
pyblish.api.register_host("hpython")
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
log.info("Installing callbacks ... ")
# register_event_callback("init", on_init)
self._register_callbacks()
register_event_callback("workfile.save.before", before_workfile_save)
register_event_callback("before.save", before_save)
register_event_callback("save", on_save)
register_event_callback("open", on_open)
register_event_callback("new", on_new)
register_event_callback("taskChanged", on_task_changed)
self._has_been_setup = True
# Set folder settings for the empty scene directly after launch of
# Houdini so it initializes into the correct scene FPS,
# Frame Range, etc.
# TODO: make sure this doesn't trigger when
# opening with last workfile.
_set_context_settings()
if not IS_HEADLESS:
import hdefereval # noqa, hdefereval is only available in ui mode
# Defer generation of shelves due to issue on Windows where shelf
# initialization during start up delays Houdini UI by minutes
# making it extremely slow to launch.
hdefereval.executeDeferred(shelves.generate_shelves)
hdefereval.executeDeferred(creator_node_shelves.install)
if env_value_to_bool("AYON_WORKFILE_TOOL_ON_START"):
hdefereval.executeDeferred(lambda: host_tools.show_workfiles(parent=hou.qt.mainWindow()))
def workfile_has_unsaved_changes(self):
return hou.hipFile.hasUnsavedChanges()
def get_workfile_extensions(self):
return [".hip", ".hiplc", ".hipnc"]
def save_workfile(self, dst_path=None):
# Force forwards slashes to avoid segfault
if dst_path:
dst_path = dst_path.replace("\\", "/")
hou.hipFile.save(file_name=dst_path,
save_to_recent_files=True)
return dst_path
def open_workfile(self, filepath):
# Force forwards slashes to avoid segfault
filepath = filepath.replace("\\", "/")
hou.hipFile.load(filepath,
suppress_save_prompt=True,
ignore_load_warnings=False)
return filepath
def get_current_workfile(self):
current_filepath = hou.hipFile.path()
if (os.path.basename(current_filepath) == "untitled.hip" and
not os.path.exists(current_filepath)):
# By default a new scene in houdini is saved in the current
# working directory as "untitled.hip" so we need to capture
# that and consider it 'not saved' when it's in that state.
return None
return current_filepath
def get_containers(self):
return ls()
def _register_callbacks(self):
for event in self._op_events.copy().values():
if event is None:
continue
try:
hou.hipFile.removeEventCallback(event)
except RuntimeError as e:
log.info(e)
self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback(
on_file_event_callback
)
@staticmethod
def create_context_node():
"""Helper for creating context holding node.
Returns:
hou.Node: context node
"""
obj_network = hou.node("/obj")
op_ctx = obj_network.createNode("subnet",
node_name="OpenPypeContext",
run_init_scripts=False,
load_contents=False)
op_ctx.moveToGoodPosition()
op_ctx.setBuiltExplicitly(False)
op_ctx.setCreatorState("OpenPype")
op_ctx.setComment("OpenPype node to hold context metadata")
op_ctx.setColor(hou.Color((0.081, 0.798, 0.810)))
op_ctx.setDisplayFlag(False)
op_ctx.hide(True)
return op_ctx
def update_context_data(self, data, changes):
op_ctx = hou.node(CONTEXT_CONTAINER)
if not op_ctx:
op_ctx = self.create_context_node()
lib.imprint(op_ctx, data, update=True)
def get_context_data(self):
op_ctx = hou.node(CONTEXT_CONTAINER)
if not op_ctx:
op_ctx = self.create_context_node()
return lib.read(op_ctx)
def save_file(self, dst_path=None):
# Force forwards slashes to avoid segfault
dst_path = dst_path.replace("\\", "/")
hou.hipFile.save(file_name=dst_path,
save_to_recent_files=True)
def on_file_event_callback(event):
if event == hou.hipFileEventType.AfterLoad:
emit_event("open")
elif event == hou.hipFileEventType.AfterSave:
emit_event("save")
elif event == hou.hipFileEventType.BeforeSave:
emit_event("before.save")
elif event == hou.hipFileEventType.AfterClear:
emit_event("new")
def containerise(name,
namespace,
nodes,
context,
loader=None,
suffix=""):
"""Bundle `nodes` into a subnet and imprint it with metadata
Containerisation enables a tracking of version, author and origin
for loaded assets.
Arguments:
name (str): Name of resulting assembly
namespace (str): Namespace under which to host container
nodes (list): Long names of nodes to containerise
context (dict): Asset information
loader (str, optional): Name of loader used to produce this container.
suffix (str, optional): Suffix of container, defaults to `_CON`.
Returns:
container (str): Name of container assembly
"""
# Get AVALON_CONTAINERS subnet
subnet = get_or_create_avalon_container()
# Create proper container name
container_name = "{}_{}".format(name, suffix or "CON")
container = hou.node("/obj/{}".format(name))
container.setName(container_name, unique_name=True)
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": name,
"namespace": namespace,
"loader": str(loader),
"representation": context["representation"]["id"],
}
lib.imprint(container, data)
# "Parent" the container under the container network
hou.moveNodesTo([container], subnet)
subnet.node(container_name).moveToGoodPosition()
return container
def parse_container(container):
"""Return the container node's full container data.
Args:
container (hou.Node): A container node name.
Returns:
dict: The container schema data for this container node.
"""
# Read only relevant parms
# TODO: Clean up this hack replacing `lib.read(container)`
data = {}
for name in ["name", "namespace", "loader", "representation", "id"]:
parm = container.parm(name)
if not parm:
return {}
value = parm.eval()
# test if value is json encoded dict
if isinstance(value, str) and value.startswith(JSON_PREFIX):
try:
value = json.loads(value[len(JSON_PREFIX):])
except json.JSONDecodeError:
# not a json
pass
data[name] = value
# Backwards compatibility pre-schemas for containers
data["schema"] = data.get("schema", "openpype:container-1.0")
# Append transient data
data["objectName"] = container.path()
data["node"] = container
return data
def ls():
containers = []
for identifier in (
AYON_CONTAINER_ID,
AVALON_CONTAINER_ID,
"pyblish.mindbender.container"
):
containers += lib.lsattr("id", identifier)
for container in sorted(containers,
# Hou 19+ Python 3 hou.ObjNode are not
# sortable due to not supporting greater
# than comparisons
key=lambda node: node.path()):
yield parse_container(container)
def before_workfile_save(event):
global _about_to_save
_about_to_save = True
def before_save():
return lib.validate_fps()
def on_save():
log.info("Running callback on save..")
# update houdini vars
lib.update_houdini_vars_context_dialog()
# We are now starting the actual save directly
global _about_to_save
_about_to_save = False
def on_task_changed():
global _about_to_save
if not IS_HEADLESS and _about_to_save:
# Let's prompt the user to update the context settings or not
lib.prompt_reset_context()
def _show_outdated_content_popup():
# Get main window
parent = lib.get_main_window()
if parent is None:
log.info("Skipping outdated content pop-up "
"because Houdini window can't be found.")
return
from ayon_core.tools.utils import SimplePopup
# Show outdated pop-up
def _on_show_inventory():
from ayon_core.tools.utils import host_tools
host_tools.show_scene_inventory(parent=parent)
dialog = SimplePopup(parent=parent)
dialog.setWindowTitle("Houdini scene has outdated content")
dialog.set_message("There are outdated containers in "
"your Houdini scene.")
dialog.on_clicked.connect(_on_show_inventory)
dialog.show()
def on_open():
if not hou.isUIAvailable():
log.debug("Batch mode detected, ignoring `on_open` callbacks..")
return
log.info("Running callback on open..")
# update houdini vars
lib.update_houdini_vars_context_dialog()
# Validate FPS after update_task_from_path to
# ensure it is using correct FPS for the folder
lib.validate_fps()
if any_outdated_containers():
parent = lib.get_main_window()
if parent is None:
# When opening Houdini with last workfile on launch the UI hasn't
# initialized yet completely when the `on_open` callback triggers.
# We defer the dialog popup to wait for the UI to become available.
# We assume it will open because `hou.isUIAvailable()` returns True
import hdefereval
hdefereval.executeDeferred(_show_outdated_content_popup)
else:
_show_outdated_content_popup()
log.warning("Scene has outdated content.")
def on_new():
"""Set project resolution and fps when create a new file"""
if hou.hipFile.isLoadingHipFile():
# This event also triggers when Houdini opens a file due to the
# new event being registered to 'afterClear'. As such we can skip
# 'new' logic if the user is opening a file anyway
log.debug("Skipping on new callback due to scene being opened.")
return
log.info("Running callback on new..")
_set_context_settings()
# It seems that the current frame always gets reset to frame 1 on
# new scene. So we enforce current frame to be at the start of the playbar
# with execute deferred
def _enforce_start_frame():
start = hou.playbar.playbackRange()[0]
hou.setFrame(start)
if hou.isUIAvailable():
import hdefereval
hdefereval.executeDeferred(_enforce_start_frame)
else:
# Run without execute deferred when no UI is available because
# without UI `hdefereval` is not available to import
_enforce_start_frame()
def get_or_create_avalon_container() -> "hou.OpNode":
avalon_container = hou.node(AVALON_CONTAINERS)
if avalon_container:
return avalon_container
parent_path, name = AVALON_CONTAINERS.rsplit("/", 1)
parent = hou.node(parent_path)
return parent.createNode(
"subnet", node_name=name
)
def _set_context_settings():
"""Apply the project settings from the project definition
Settings can be overwritten by a folder if the folder.attrib contains
any information regarding those settings.
Examples of settings:
fps
resolution
renderer
Returns:
None
"""
lib.reset_framerange()
lib.update_houdini_vars_context()

View file

@ -1,347 +0,0 @@
# -*- coding: utf-8 -*-
"""Houdini specific Avalon/Pyblish plugin definitions."""
import sys
from abc import (
ABCMeta
)
import six
import hou
import pyblish.api
from ayon_core.pipeline import (
CreatorError,
Creator,
CreatedInstance,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
load,
publish
)
from ayon_core.lib import BoolDef
from .lib import imprint, read, lsattr, add_self_publish_button
SETTINGS_CATEGORY = "houdini"
class HoudiniCreatorBase(object):
@staticmethod
def cache_instance_data(shared_data):
"""Cache instances for Creators to shared data.
Create `houdini_cached_instances` key when needed in shared data and
fill it with all collected instances from the scene under its
respective creator identifiers.
Create `houdini_cached_legacy_instance` key for any legacy instances
detected in the scene as instances per family.
Args:
Dict[str, Any]: Shared data.
"""
if shared_data.get("houdini_cached_instances") is None:
cache = dict()
cache_legacy = dict()
nodes = []
for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]:
nodes.extend(lsattr("id", id_type))
for node in nodes:
creator_identifier_parm = node.parm("creator_identifier")
if creator_identifier_parm:
# creator instance
creator_id = creator_identifier_parm.eval()
cache.setdefault(creator_id, []).append(node)
else:
# legacy instance
family_parm = node.parm("family")
if not family_parm:
# must be a broken instance
continue
family = family_parm.eval()
cache_legacy.setdefault(family, []).append(node)
shared_data["houdini_cached_instances"] = cache
shared_data["houdini_cached_legacy_instance"] = cache_legacy
return shared_data
@staticmethod
def create_instance_node(
folder_path,
node_name,
parent,
node_type="geometry",
pre_create_data=None
):
"""Create node representing instance.
Arguments:
folder_path (str): Folder path.
node_name (str): Name of the new node.
parent (str): Name of the parent node.
node_type (str, optional): Type of the node.
pre_create_data (Optional[Dict]): Pre create data.
Returns:
hou.Node: Newly created instance node.
"""
parent_node = hou.node(parent)
instance_node = parent_node.createNode(
node_type, node_name=node_name)
instance_node.moveToGoodPosition()
return instance_node
@six.add_metaclass(ABCMeta)
class HoudiniCreator(Creator, HoudiniCreatorBase):
"""Base class for most of the Houdini creator plugins."""
selected_nodes = []
settings_name = None
add_publish_button = False
settings_category = SETTINGS_CATEGORY
def create(self, product_name, instance_data, pre_create_data):
try:
self.selected_nodes = []
if pre_create_data.get("use_selection"):
self.selected_nodes = hou.selectedNodes()
# Get the node type and remove it from the data, not needed
node_type = instance_data.pop("node_type", None)
if node_type is None:
node_type = "geometry"
folder_path = instance_data["folderPath"]
instance_node = self.create_instance_node(
folder_path,
product_name,
"/out",
node_type,
pre_create_data
)
self.customize_node_look(instance_node)
instance_data["instance_node"] = instance_node.path()
instance_data["instance_id"] = instance_node.path()
instance_data["families"] = self.get_publish_families()
instance = CreatedInstance(
self.product_type,
product_name,
instance_data,
self)
self._add_instance_to_context(instance)
self.imprint(instance_node, instance.data_to_store())
if self.add_publish_button:
add_self_publish_button(instance_node)
return instance
except hou.Error as er:
six.reraise(
CreatorError,
CreatorError("Creator error: {}".format(er)),
sys.exc_info()[2])
def lock_parameters(self, node, parameters):
"""Lock list of specified parameters on the node.
Args:
node (hou.Node): Houdini node to lock parameters on.
parameters (list of str): List of parameter names.
"""
for name in parameters:
try:
parm = node.parm(name)
parm.lock(True)
except AttributeError:
self.log.debug("missing lock pattern {}".format(name))
def collect_instances(self):
# cache instances if missing
self.cache_instance_data(self.collection_shared_data)
for instance in self.collection_shared_data[
"houdini_cached_instances"].get(self.identifier, []):
node_data = read(instance)
# Node paths are always the full node path since that is unique
# Because it's the node's path it's not written into attributes
# but explicitly collected
node_path = instance.path()
node_data["instance_id"] = node_path
node_data["instance_node"] = node_path
node_data["families"] = self.get_publish_families()
if "AYON_productName" in node_data:
node_data["productName"] = node_data.pop("AYON_productName")
created_instance = CreatedInstance.from_existing(
node_data, self
)
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, changes in update_list:
instance_node = hou.node(created_inst.get("instance_node"))
new_values = {
key: changes[key].new_value
for key in changes.changed_keys
}
# Update parm templates and values
self.imprint(
instance_node,
new_values,
update=True
)
def imprint(self, node, values, update=False):
# Never store instance node and instance id since that data comes
# from the node's path
if "productName" in values:
values["AYON_productName"] = values.pop("productName")
values.pop("instance_node", None)
values.pop("instance_id", None)
values.pop("families", None)
imprint(node, values, update=update)
def remove_instances(self, instances):
"""Remove specified instance from the scene.
This is only removing `id` parameter so instance is no longer
instance, because it might contain valuable data for artist.
"""
for instance in instances:
instance_node = hou.node(instance.data.get("instance_node"))
if instance_node:
instance_node.destroy()
self._remove_instance_from_context(instance)
def get_pre_create_attr_defs(self):
return [
BoolDef("use_selection", label="Use selection")
]
@staticmethod
def customize_node_look(
node, color=None,
shape="chevron_down"):
"""Set custom look for instance nodes.
Args:
node (hou.Node): Node to set look.
color (hou.Color, Optional): Color of the node.
shape (str, Optional): Shape name of the node.
Returns:
None
"""
if not color:
color = hou.Color((0.616, 0.871, 0.769))
node.setUserData('nodeshape', shape)
node.setColor(color)
def get_publish_families(self):
"""Return families for the instances of this creator.
Allow a Creator to define multiple families so that a creator can
e.g. specify `usd` and `usdrop`.
There is no need to override this method if you only have the
primary family defined by the `product_type` property as that will
always be set.
Returns:
List[str]: families for instances of this creator
"""
return []
def get_network_categories(self):
"""Return in which network view type this creator should show.
The node type categories returned here will be used to define where
the creator will show up in the TAB search for nodes in Houdini's
Network View.
This can be overridden in inherited classes to define where that
particular Creator should be visible in the TAB search.
Returns:
list: List of houdini node type categories
"""
return [hou.ropNodeTypeCategory()]
def apply_settings(self, project_settings):
"""Method called on initialization of plugin to apply settings."""
# Apply General Settings
houdini_general_settings = project_settings["houdini"]["general"]
self.add_publish_button = houdini_general_settings.get(
"add_self_publish_button", False)
# Apply Creator Settings
settings_name = self.settings_name
if settings_name is None:
settings_name = self.__class__.__name__
settings = project_settings["houdini"]["create"]
settings = settings.get(settings_name)
if settings is None:
self.log.debug(
"No settings found for {}".format(self.__class__.__name__)
)
return
for key, value in settings.items():
setattr(self, key, value)
class HoudiniLoader(load.LoaderPlugin):
"""Base class for Houdini load plugins."""
hosts = ["houdini"]
settings_category = SETTINGS_CATEGORY
class HoudiniInstancePlugin(pyblish.api.InstancePlugin):
"""Base class for Houdini instance publish plugins."""
hosts = ["houdini"]
settings_category = SETTINGS_CATEGORY
class HoudiniContextPlugin(pyblish.api.ContextPlugin):
"""Base class for Houdini context publish plugins."""
hosts = ["houdini"]
settings_category = SETTINGS_CATEGORY
class HoudiniExtractorPlugin(publish.Extractor):
"""Base class for Houdini extract plugins.
Note:
The `HoudiniExtractorPlugin` is a subclass of `publish.Extractor`,
which in turn is a subclass of `pyblish.api.InstancePlugin`.
Should there be a requirement to create an extractor that operates
as a context plugin, it would be beneficial to incorporate
the functionalities present in `publish.Extractor`.
"""
hosts = ["houdini"]
settings_category = SETTINGS_CATEGORY

View file

@ -1,215 +0,0 @@
import os
import re
import logging
import platform
from ayon_core.settings import get_project_settings
from ayon_core.pipeline import get_current_project_name
from ayon_core.lib import StringTemplate
import hou
from .lib import get_current_context_template_data_with_folder_attrs
log = logging.getLogger("ayon_houdini.shelves")
def generate_shelves():
"""This function generates complete shelves from shelf set to tools
in Houdini from openpype project settings houdini shelf definition.
"""
current_os = platform.system().lower()
# load configuration of houdini shelves
project_name = get_current_project_name()
project_settings = get_project_settings(project_name)
shelves_configs = project_settings["houdini"]["shelves"]
if not shelves_configs:
log.debug("No custom shelves found in project settings.")
return
# Get Template data
template_data = get_current_context_template_data_with_folder_attrs()
for config in shelves_configs:
selected_option = config["options"]
shelf_set_config = config[selected_option]
shelf_set_filepath = shelf_set_config.get('shelf_set_source_path')
if shelf_set_filepath:
shelf_set_os_filepath = shelf_set_filepath[current_os]
if shelf_set_os_filepath:
shelf_set_os_filepath = get_path_using_template_data(
shelf_set_os_filepath, template_data
)
if not os.path.isfile(shelf_set_os_filepath):
log.error("Shelf path doesn't exist - "
"{}".format(shelf_set_os_filepath))
continue
hou.shelves.loadFile(shelf_set_os_filepath)
continue
shelf_set_name = shelf_set_config.get('shelf_set_name')
if not shelf_set_name:
log.warning("No name found in shelf set definition.")
continue
shelves_definition = shelf_set_config.get('shelf_definition')
if not shelves_definition:
log.debug(
"No shelf definition found for shelf set named '{}'".format(
shelf_set_name
)
)
continue
shelf_set = get_or_create_shelf_set(shelf_set_name)
for shelf_definition in shelves_definition:
shelf_name = shelf_definition.get('shelf_name')
if not shelf_name:
log.warning("No name found in shelf definition.")
continue
shelf = get_or_create_shelf(shelf_name)
if not shelf_definition.get('tools_list'):
log.debug(
"No tool definition found for shelf named {}".format(
shelf_name
)
)
continue
mandatory_attributes = {'label', 'script'}
for tool_definition in shelf_definition.get('tools_list'):
# We verify that the name and script attributes of the tool
# are set
if not all(
tool_definition[key] for key in mandatory_attributes
):
log.warning(
"You need to specify at least the name and the "
"script path of the tool.")
continue
tool = get_or_create_tool(
tool_definition, shelf, template_data
)
if not tool:
continue
# Add the tool to the shelf if not already in it
if tool not in shelf.tools():
shelf.setTools(list(shelf.tools()) + [tool])
# Add the shelf in the shelf set if not already in it
if shelf not in shelf_set.shelves():
shelf_set.setShelves(shelf_set.shelves() + (shelf,))
def get_or_create_shelf_set(shelf_set_label):
"""This function verifies if the shelf set label exists. If not,
creates a new shelf set.
Arguments:
shelf_set_label (str): The label of the shelf set
Returns:
hou.ShelfSet: The shelf set existing or the new one
"""
all_shelves_sets = hou.shelves.shelfSets().values()
shelf_set = next((shelf for shelf in all_shelves_sets if
shelf.label() == shelf_set_label), None)
if shelf_set:
return shelf_set
shelf_set_name = shelf_set_label.replace(' ', '_').lower()
new_shelf_set = hou.shelves.newShelfSet(
name=shelf_set_name,
label=shelf_set_label
)
return new_shelf_set
def get_or_create_shelf(shelf_label):
"""This function verifies if the shelf label exists. If not, creates
a new shelf.
Arguments:
shelf_label (str): The label of the shelf
Returns:
hou.Shelf: The shelf existing or the new one
"""
all_shelves = hou.shelves.shelves().values()
shelf = next((s for s in all_shelves if s.label() == shelf_label), None)
if shelf:
return shelf
shelf_name = shelf_label.replace(' ', '_').lower()
new_shelf = hou.shelves.newShelf(
name=shelf_name,
label=shelf_label
)
return new_shelf
def get_or_create_tool(tool_definition, shelf, template_data):
"""This function verifies if the tool exists and updates it. If not, creates
a new one.
Arguments:
tool_definition (dict): Dict with label, script, icon and help
shelf (hou.Shelf): The parent shelf of the tool
Returns:
hou.Tool: The tool updated or the new one
"""
tool_label = tool_definition.get("label")
if not tool_label:
log.warning("Skipped shelf without label")
return
script_path = tool_definition["script"]
script_path = get_path_using_template_data(script_path, template_data)
if not script_path or not os.path.exists(script_path):
log.warning("This path doesn't exist - {}".format(script_path))
return
icon_path = tool_definition["icon"]
if icon_path:
icon_path = get_path_using_template_data(icon_path, template_data)
tool_definition["icon"] = icon_path
existing_tools = shelf.tools()
existing_tool = next(
(tool for tool in existing_tools if tool.label() == tool_label),
None
)
with open(script_path) as stream:
script = stream.read()
tool_definition["script"] = script
if existing_tool:
tool_definition.pop("label", None)
existing_tool.setData(**tool_definition)
return existing_tool
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
return hou.shelves.newTool(name=tool_name, **tool_definition)
def get_path_using_template_data(path, template_data):
path = StringTemplate.format_template(path, template_data)
path = path.replace("\\", "/")
return path

View file

@ -1,379 +0,0 @@
"""Houdini-specific USD Library functions."""
import contextlib
import logging
import json
import itertools
from typing import List
import hou
from pxr import Usd, Sdf, Tf, Vt, UsdRender
log = logging.getLogger(__name__)
def add_usd_output_processor(ropnode, processor):
"""Add USD Output Processor to USD Rop node.
Args:
ropnode (hou.RopNode): The USD Rop node.
processor (str): The output processor name. This is the basename of
the python file that contains the Houdini USD Output Processor.
"""
import loputils
loputils.handleOutputProcessorAdd(
{
"node": ropnode,
"parm": ropnode.parm("outputprocessors"),
"script_value": processor,
}
)
def remove_usd_output_processor(ropnode, processor):
"""Removes USD Output Processor from USD Rop node.
Args:
ropnode (hou.RopNode): The USD Rop node.
processor (str): The output processor name. This is the basename of
the python file that contains the Houdini USD Output Processor.
"""
import loputils
parm = ropnode.parm(processor + "_remove")
if not parm:
raise RuntimeError(
"Output Processor %s does not "
"exist on %s" % (processor, ropnode.name())
)
loputils.handleOutputProcessorRemove({"node": ropnode, "parm": parm})
@contextlib.contextmanager
def outputprocessors(ropnode, processors=tuple(), disable_all_others=True):
"""Context manager to temporarily add Output Processors to USD ROP node.
Args:
ropnode (hou.RopNode): The USD Rop node.
processors (tuple or list): The processors to add.
disable_all_others (bool, Optional): Whether to disable all
output processors currently on the ROP node that are not in the
`processors` list passed to this function.
"""
# TODO: Add support for forcing the correct Order of the processors
original = []
prefix = "enableoutputprocessor_"
processor_parms = ropnode.globParms(prefix + "*")
for parm in processor_parms:
original.append((parm, parm.eval()))
if disable_all_others:
for parm in processor_parms:
parm.set(False)
added = []
for processor in processors:
parm = ropnode.parm(prefix + processor)
if parm:
# If processor already exists, just enable it
parm.set(True)
else:
# Else add the new processor
add_usd_output_processor(ropnode, processor)
added.append(processor)
try:
yield
finally:
# Remove newly added processors
for processor in added:
remove_usd_output_processor(ropnode, processor)
# Revert to original values
for parm, value in original:
if parm:
parm.set(value)
def get_usd_rop_loppath(node):
# Get sop path
node_type = node.type().name()
if node_type == "usd":
return node.parm("loppath").evalAsNode()
elif node_type in {"usd_rop", "usdrender_rop"}:
# Inside Solaris e.g. /stage (not in ROP context)
# When incoming connection is present it takes it directly
inputs = node.inputs()
if inputs:
return inputs[0]
else:
return node.parm("loppath").evalAsNode()
def get_layer_save_path(layer, expand_string=True):
"""Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer.
Args:
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
expand_string (bool): Whether to expand any houdini vars in the save
path before computing the absolute path.
Returns:
str or None: Path to save to when data exists.
"""
hou_layer_info = layer.rootPrims.get("HoudiniLayerInfo")
if not hou_layer_info:
return
save_path = hou_layer_info.customData.get("HoudiniSavePath", None)
if save_path:
# Unfortunately this doesn't actually resolve the full absolute path
if expand_string:
save_path = hou.text.expandString(save_path)
return layer.ComputeAbsolutePath(save_path)
def get_referenced_layers(layer):
"""Return SdfLayers for all external references of the current layer
Args:
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
Returns:
list: List of pxr.Sdf.Layer that are external references to this layer
"""
layers = []
for layer_id in layer.GetExternalReferences():
layer = Sdf.Layer.Find(layer_id)
if not layer:
# A file may not be in memory and is
# referenced from disk. As such it cannot
# be found. We will ignore those layers.
continue
layers.append(layer)
return layers
def iter_layer_recursive(layer):
"""Recursively iterate all 'external' referenced layers"""
layers = get_referenced_layers(layer)
traversed = set(layers) # Avoid recursion to itself (if even possible)
traverse = list(layers)
for layer in traverse:
# Include children layers (recursion)
children_layers = get_referenced_layers(layer)
children_layers = [x for x in children_layers if x not in traversed]
traverse.extend(children_layers)
traversed.update(children_layers)
yield layer
def get_configured_save_layers(usd_rop, strip_above_layer_break=True):
"""Retrieve the layer save paths from a USD ROP.
Arguments:
usdrop (hou.RopNode): USD Rop Node
strip_above_layer_break (Optional[bool]): Whether to exclude any
layers that are above layer breaks. This defaults to True.
Returns:
List[Sdf.Layer]: The layers with configured save paths.
"""
lop_node = get_usd_rop_loppath(usd_rop)
stage = lop_node.stage(apply_viewport_overrides=False)
if not stage:
raise RuntimeError(
"No valid USD stage for ROP node: " "%s" % usd_rop.path()
)
root_layer = stage.GetRootLayer()
if strip_above_layer_break:
layers_above_layer_break = set(lop_node.layersAboveLayerBreak())
else:
layers_above_layer_break = set()
save_layers = []
for layer in iter_layer_recursive(root_layer):
if (
strip_above_layer_break and
layer.identifier in layers_above_layer_break
):
continue
save_path = get_layer_save_path(layer)
if save_path is not None:
save_layers.append(layer)
return save_layers
def setup_lop_python_layer(layer, node, savepath=None,
apply_file_format_args=True):
"""Set up Sdf.Layer with HoudiniLayerInfo prim for metadata.
This is the same as `loputils.createPythonLayer` but can be run on top
of `pxr.Sdf.Layer` instances that are already created in a Python LOP node.
That's useful if your layer creation itself is built to be DCC agnostic,
then we just need to run this after per layer to make it explicitly
stored for houdini.
By default, Houdini doesn't apply the FileFormatArguments supplied to
the created layer; however it does support USD's file save suffix
of `:SDF_FORMAT_ARGS:` to supply them. With `apply_file_format_args` any
file format args set on the layer's creation will be added to the
save path through that.
Note: The `node.addHeldLayer` call will only work from a LOP python node
whenever `node.editableStage()` or `node.editableLayer()` was called.
Arguments:
layer (Sdf.Layer): An existing layer (most likely just created
in the current runtime)
node (hou.LopNode): The Python LOP node to attach the layer to so
it does not get garbage collected/mangled after the downstream.
savepath (Optional[str]): When provided the HoudiniSaveControl
will be set to Explicit with HoudiniSavePath to this path.
apply_file_format_args (Optional[bool]): When enabled any
FileFormatArgs defined for the layer on creation will be set
in the HoudiniSavePath so Houdini USD ROP will use them top.
Returns:
Sdf.PrimSpec: The Created HoudiniLayerInfo prim spec.
"""
# Add a Houdini Layer Info prim where we can put the save path.
p = Sdf.CreatePrimInLayer(layer, '/HoudiniLayerInfo')
p.specifier = Sdf.SpecifierDef
p.typeName = 'HoudiniLayerInfo'
if savepath:
if apply_file_format_args:
args = layer.GetFileFormatArguments()
savepath = Sdf.Layer.CreateIdentifier(savepath, args)
p.customData['HoudiniSavePath'] = savepath
p.customData['HoudiniSaveControl'] = 'Explicit'
# Let everyone know what node created this layer.
p.customData['HoudiniCreatorNode'] = node.sessionId()
p.customData['HoudiniEditorNodes'] = Vt.IntArray([node.sessionId()])
node.addHeldLayer(layer.identifier)
return p
@contextlib.contextmanager
def remap_paths(rop_node, mapping):
"""Enable the AyonRemapPaths output processor with provided `mapping`"""
from ayon_houdini.api.lib import parm_values
if not mapping:
# Do nothing
yield
return
# Houdini string parms need to escape backslashes due to the support
# of expressions - as such we do so on the json data
value = json.dumps(mapping).replace("\\", "\\\\")
with outputprocessors(
rop_node,
processors=["ayon_remap_paths"],
disable_all_others=True,
):
with parm_values([
(rop_node.parm("ayon_remap_paths_remap_json"), value)
]):
yield
def get_usd_render_rop_rendersettings(rop_node, stage=None, logger=None):
"""Return the chosen UsdRender.Settings from the stage (if any).
Args:
rop_node (hou.Node): The Houdini USD Render ROP node.
stage (pxr.Usd.Stage): The USD stage to find the render settings
in. This is usually the stage from the LOP path the USD Render
ROP node refers to.
logger (logging.Logger): Logger to log warnings to if no render
settings were find in stage.
Returns:
Optional[UsdRender.Settings]: Render Settings.
"""
if logger is None:
logger = log
if stage is None:
lop_node = get_usd_rop_loppath(rop_node)
stage = lop_node.stage()
path = rop_node.evalParm("rendersettings")
if not path:
# Default behavior
path = "/Render/rendersettings"
prim = stage.GetPrimAtPath(path)
if not prim:
logger.warning("No render settings primitive found at: %s", path)
return
render_settings = UsdRender.Settings(prim)
if not render_settings:
logger.warning("Prim at %s is not a valid RenderSettings prim.", path)
return
return render_settings
def get_schema_type_names(type_name: str) -> List[str]:
"""Return schema type name for type name and its derived types
This can be useful for checking whether a `Sdf.PrimSpec`'s type name is of
a given type or any of its derived types.
Args:
type_name (str): The type name, like e.g. 'UsdGeomMesh'
Returns:
List[str]: List of schema type names and their derived types.
"""
schema_registry = Usd.SchemaRegistry
type_ = Tf.Type.FindByName(type_name)
if type_ == Tf.Type.Unknown:
type_ = schema_registry.GetTypeFromSchemaTypeName(type_name)
if type_ == Tf.Type.Unknown:
# Type not found
return []
results = []
derived = type_.GetAllDerivedTypes()
for derived_type in itertools.chain([type_], derived):
schema_type_name = schema_registry.GetSchemaTypeName(derived_type)
if schema_type_name:
results.append(schema_type_name)
return results

View file

@ -1,64 +0,0 @@
from ayon_applications import PreLaunchHook, LaunchTypes
class SetDefaultDisplayView(PreLaunchHook):
"""Set default view and default display for houdini via OpenColorIO.
Houdini's defaultDisplay and defaultView are set by
setting 'OCIO_ACTIVE_DISPLAYS' and 'OCIO_ACTIVE_VIEWS'
environment variables respectively.
More info: https://www.sidefx.com/docs/houdini/io/ocio.html#set-up
"""
app_groups = {"houdini"}
launch_types = {LaunchTypes.local}
def execute(self):
OCIO = self.launch_context.env.get("OCIO")
# This is a cheap way to skip this hook if either global color
# management or houdini color management was disabled because the
# OCIO var would be set by the global OCIOEnvHook
if not OCIO:
return
# workfile settings added in '0.2.13'
houdini_color_settings = \
self.data["project_settings"]["houdini"]["imageio"].get("workfile")
if not houdini_color_settings:
self.log.info("Hook 'SetDefaultDisplayView' requires Houdini "
"addon version >= '0.2.13'")
return
if not houdini_color_settings["enabled"]:
self.log.info(
"Houdini workfile color management is disabled."
)
return
# 'OCIO_ACTIVE_DISPLAYS', 'OCIO_ACTIVE_VIEWS' are checked
# as Admins can add them in Ayon env vars or Ayon tools.
default_display = houdini_color_settings["default_display"]
if default_display:
# get 'OCIO_ACTIVE_DISPLAYS' value if exists.
self._set_context_env("OCIO_ACTIVE_DISPLAYS", default_display)
default_view = houdini_color_settings["default_view"]
if default_view:
# get 'OCIO_ACTIVE_VIEWS' value if exists.
self._set_context_env("OCIO_ACTIVE_VIEWS", default_view)
def _set_context_env(self, env_var, default_value):
env_value = self.launch_context.env.get(env_var, "")
new_value = ":".join(
key for key in [default_value, env_value] if key
)
self.log.info(
"Setting {} environment to: {}"
.format(env_var, new_value)
)
self.launch_context.env[env_var] = new_value

View file

@ -1,18 +0,0 @@
from ayon_applications import PreLaunchHook, LaunchTypes
class SetPath(PreLaunchHook):
"""Set current dir to workdir.
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = {"houdini"}
launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
if not workdir:
self.log.warning("BUG: Workdir is not filled.")
return
self.launch_context.kwargs["cwd"] = workdir

View file

@ -1,78 +0,0 @@
# -*- coding: utf-8 -*-
"""Converter for legacy Houdini products."""
from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin
from ayon_houdini.api.lib import imprint
class HoudiniLegacyConvertor(ProductConvertorPlugin):
"""Find and convert any legacy products in the scene.
This Converter will find all legacy products in the scene and will
transform them to the current system. Since the old products doesn't
retain any information about their original creators, the only mapping
we can do is based on their families.
Its limitation is that you can have multiple creators creating product
name of the same product type and there is no way to handle it. This code
should nevertheless cover all creators that came with AYON.
"""
identifier = "io.openpype.creators.houdini.legacy"
product_type_to_id = {
"camera": "io.openpype.creators.houdini.camera",
"ass": "io.openpype.creators.houdini.ass",
"imagesequence": "io.openpype.creators.houdini.imagesequence",
"hda": "io.openpype.creators.houdini.hda",
"pointcache": "io.openpype.creators.houdini.pointcache",
"redshiftproxy": "io.openpype.creators.houdini.redshiftproxy",
"redshift_rop": "io.openpype.creators.houdini.redshift_rop",
"usd": "io.openpype.creators.houdini.usd",
"usdrender": "io.openpype.creators.houdini.usdrender",
"vdbcache": "io.openpype.creators.houdini.vdbcache"
}
def __init__(self, *args, **kwargs):
super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs)
self.legacy_instances = {}
def find_instances(self):
"""Find legacy products in the scene.
Legacy products are the ones that doesn't have `creator_identifier`
parameter on them.
This is using cached entries done in
:py:meth:`~HoudiniCreatorBase.cache_instance_data()`
"""
self.legacy_instances = self.collection_shared_data.get(
"houdini_cached_legacy_instance")
if not self.legacy_instances:
return
self.add_convertor_item("Found {} incompatible product{}.".format(
len(self.legacy_instances),
"s" if len(self.legacy_instances) > 1 else ""
))
def convert(self):
"""Convert all legacy products to current.
It is enough to add `creator_identifier` and `instance_node`.
"""
if not self.legacy_instances:
return
for product_type, legacy_instances in self.legacy_instances.items():
if product_type in self.product_type_to_id:
for instance in legacy_instances:
creator_id = self.product_type_to_id[product_type]
data = {
"creator_identifier": creator_id,
"instance_node": instance.path()
}
if product_type == "pointcache":
data["families"] = ["abc"]
self.log.info("Converting {} to {}".format(
instance.path(), creator_id))
imprint(instance, data)

View file

@ -1,57 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating alembic camera products."""
from ayon_houdini.api import plugin
from ayon_core.pipeline import CreatorError
import hou
class CreateAlembicCamera(plugin.HoudiniCreator):
"""Single baked camera from Alembic ROP."""
identifier = "io.openpype.creators.houdini.camera"
label = "Camera (Abc)"
product_type = "camera"
icon = "camera"
def create(self, product_name, instance_data, pre_create_data):
import hou
instance_data.pop("active", None)
instance_data.update({"node_type": "alembic"})
instance = super(CreateAlembicCamera, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"filename": hou.text.expandString(
"$HIP/pyblish/{}.abc".format(product_name)),
"use_sop_path": False,
}
if self.selected_nodes:
if len(self.selected_nodes) > 1:
raise CreatorError("More than one item selected.")
path = self.selected_nodes[0].path()
# Split the node path into the first root and the remainder
# So we can set the root and objects parameters correctly
_, root, remainder = path.split("/", 2)
parms.update({"root": "/" + root, "objects": remainder})
instance_node.setParms(parms)
# Lock the Use Sop Path setting so the
# user doesn't accidentally enable it.
to_lock = ["use_sop_path"]
self.lock_parameters(instance_node, to_lock)
instance_node.parm("trange").set(1)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.objNodeTypeCategory()
]

View file

@ -1,70 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating Arnold ASS files."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef
class CreateArnoldAss(plugin.HoudiniCreator):
"""Arnold .ass Archive"""
identifier = "io.openpype.creators.houdini.ass"
label = "Arnold ASS"
product_type = "ass"
icon = "magic"
# Default extension: `.ass` or `.ass.gz`
# however calling HoudiniCreator.create()
# will override it by the value in the project settings
ext = ".ass"
def create(self, product_name, instance_data, pre_create_data):
import hou
instance_data.pop("active", None)
instance_data.update({"node_type": "arnold"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateArnoldAss, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
# Hide Properties Tab on Arnold ROP since that's used
# for rendering instead of .ass Archive Export
parm_template_group = instance_node.parmTemplateGroup()
parm_template_group.hideFolder("Properties", True)
instance_node.setParmTemplateGroup(parm_template_group)
filepath = "{}{}".format(
hou.text.expandString("$HIP/pyblish/"),
"{}.$F4{}".format(product_name, self.ext)
)
parms = {
# Render frame range
"trange": 1,
# Arnold ROP settings
"ar_ass_file": filepath,
"ar_ass_export_enable": 1
}
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = ["ar_ass_export_enable", "productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,110 +0,0 @@
from ayon_houdini.api import plugin
from ayon_core.lib import EnumDef, BoolDef
class CreateArnoldRop(plugin.HoudiniCreator):
"""Arnold ROP"""
identifier = "io.openpype.creators.houdini.arnold_rop"
label = "Arnold ROP"
product_type = "arnold_rop"
icon = "magic"
# Default extension
ext = "exr"
# Default render target
render_target = "farm_split"
def create(self, product_name, instance_data, pre_create_data):
import hou
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
# Remove the active, we are checking the bypass flag of the nodes
instance_data.pop("active", None)
instance_data.update({"node_type": "arnold"})
# Add chunk size attribute
instance_data["chunkSize"] = 1
instance = super(CreateArnoldRop, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
ext = pre_create_data.get("image_format")
filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
ext=ext,
)
parms = {
# Render frame range
"trange": 1,
# Arnold ROP settings
"ar_picture": filepath,
"ar_exr_half_precision": 1 # half precision
}
if pre_create_data.get("render_target") == "farm_split":
ass_filepath = \
"{export_dir}{product_name}/{product_name}.$F4.ass".format(
export_dir=hou.text.expandString("$HIP/pyblish/ass/"),
product_name=product_name,
)
parms["ar_ass_export_enable"] = 1
parms["ar_ass_file"] = ass_filepath
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
"farm_split": "Farm Rendering - Split export & render jobs",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target),
]
def get_pre_create_attr_defs(self):
image_format_enum = [
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
"rad", "rat", "rta", "sgi", "tga", "tif",
]
attrs = [
EnumDef("image_format",
image_format_enum,
default=self.ext,
label="Image Format Options"),
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,108 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache bgeo files."""
from ayon_houdini.api import plugin
from ayon_core.pipeline import CreatorError
import hou
from ayon_core.lib import EnumDef, BoolDef
class CreateBGEO(plugin.HoudiniCreator):
"""BGEO pointcache creator."""
identifier = "io.openpype.creators.houdini.bgeo"
label = "PointCache (Bgeo)"
product_type = "pointcache"
icon = "gears"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "geometry"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateBGEO, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
file_path = "{}{}".format(
hou.text.expandString("$HIP/pyblish/"),
"{}.$F4.{}".format(
product_name,
pre_create_data.get("bgeo_type") or "bgeo.sc")
)
parms = {
"sopoutput": file_path
}
instance_node.parm("trange").set(1)
if self.selected_nodes:
# if selection is on SOP level, use it
if isinstance(self.selected_nodes[0], hou.SopNode):
parms["soppath"] = self.selected_nodes[0].path()
else:
# try to find output node with the lowest index
outputs = [
child for child in self.selected_nodes[0].children()
if child.type().name() == "output"
]
if not outputs:
instance_node.setParms(parms)
raise CreatorError((
"Missing output node in SOP level for the selection. "
"Please select correct SOP path in created instance."
))
outputs.sort(key=lambda output: output.evalParm("outputidx"))
parms["soppath"] = outputs[0].path()
instance_node.setParms(parms)
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
bgeo_enum = [
{
"value": "bgeo",
"label": "uncompressed bgeo (.bgeo)"
},
{
"value": "bgeosc",
"label": "BLOSC compressed bgeo (.bgeosc)"
},
{
"value": "bgeo.sc",
"label": "BLOSC compressed bgeo (.bgeo.sc)"
},
{
"value": "bgeo.gz",
"label": "GZ compressed bgeo (.bgeo.gz)"
},
{
"value": "bgeo.lzma",
"label": "LZMA compressed bgeo (.bgeo.lzma)"
},
{
"value": "bgeo.bz2",
"label": "BZip2 compressed bgeo (.bgeo.bz2)"
}
]
return attrs + [
EnumDef("bgeo_type", bgeo_enum, label="BGEO Options"),
] + self.get_instance_attr_defs()
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]

View file

@ -1,61 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating composite sequences."""
from ayon_houdini.api import plugin
from ayon_core.pipeline import CreatorError
import hou
class CreateCompositeSequence(plugin.HoudiniCreator):
"""Composite ROP to Image Sequence"""
identifier = "io.openpype.creators.houdini.imagesequence"
label = "Composite (Image Sequence)"
product_type = "imagesequence"
icon = "gears"
ext = ".exr"
def create(self, product_name, instance_data, pre_create_data):
import hou # noqa
instance_data.pop("active", None)
instance_data.update({"node_type": "comp"})
instance = super(CreateCompositeSequence, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
filepath = "{}{}".format(
hou.text.expandString("$HIP/pyblish/"),
"{}.$F4{}".format(product_name, self.ext)
)
parms = {
"trange": 1,
"copoutput": filepath
}
if self.selected_nodes:
if len(self.selected_nodes) > 1:
raise CreatorError("More than one item selected.")
path = self.selected_nodes[0].path()
parms["coppath"] = path
instance_node.setParms(parms)
# Manually set f1 & f2 to $FSTART and $FEND respectively
# to match other Houdini nodes default.
instance_node.parm("f1").setExpression("$FSTART")
instance_node.parm("f2").setExpression("$FEND")
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.cop2NodeTypeCategory()
]

View file

@ -1,323 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating publishable Houdini Digital Assets."""
import hou
import ayon_api
from ayon_core.pipeline import (
CreatorError,
get_current_project_name
)
from ayon_core.lib import (
get_ayon_username,
BoolDef
)
from ayon_houdini.api import plugin
# region assettools
# logic based on Houdini 19.5.752 `assettools.py` because
# this logic was removed in Houdini 20+
def get_tool_submenus(hda_def):
"""Returns the tab submenu entries of this node.
Note: A node could be placed in multiple entries at once.
Arguments:
hda_def: the HDA Definition by hou.node.type().definition()
Returns:
Optional[list[str]]: A list of submenus
"""
import xml.etree.ElementTree as ET
if hda_def.hasSection('Tools.shelf'):
sections = hda_def.sections()
ts_section = sections['Tools.shelf'].contents()
try:
root = ET.fromstring(ts_section)
except ET.ParseError:
return None
tool = root[0]
submenus = tool.findall('toolSubmenu')
if submenus:
tool_submenus = []
for submenu in submenus:
if submenu is not None:
text = submenu.text
if text:
tool_submenus.append(submenu.text)
if tool_submenus:
return tool_submenus
else:
return None
else:
return None
else:
return None
def set_tool_submenu(hda_def,
new_submenu='Digital Assets'):
"""Sets the tab menu entry for a node.
Arguments:
hda_def: the HDA Definition by hou.node.type().definition()
new_submenu (Optional[str]): This will be the new submenu, replacing
old_submenu entry
"""
context_dict = {
'Shop': 'SHOP',
'Cop2': 'COP2',
'Object': 'OBJ',
'Chop': 'CHOP',
'Sop': 'SOP',
'Vop': 'VOP',
'VopNet': 'VOPNET',
'Driver': 'ROP',
'TOP': 'TOP',
'Top': 'TOP',
'Lop': 'LOP',
'Dop': 'DOP'}
utils_dict = {
'Shop': 'shoptoolutils',
'Cop2': 'cop2toolutils',
'Object': 'objecttoolutils',
'Chop': 'choptoolutils',
'Sop': 'soptoolutils',
'Vop': 'voptoolutils',
'VopNet': 'vopnettoolutils',
'Driver': 'drivertoolutils',
'TOP': 'toptoolutils',
'Top': 'toptoolutils',
'Lop': 'loptoolutils',
'Dop': 'doptoolutils'}
if hda_def.hasSection('Tools.shelf'):
old_submenu = get_tool_submenus(hda_def)[0]
else:
# Add default tools shelf section
content = """<?xml version="1.0" encoding="UTF-8"?>
<shelfDocument>
<!-- This file contains definitions of shelves, toolbars, and tools.
It should not be hand-edited when it is being used by the application.
Note, that two definitions of the same element are not allowed in
a single file. -->
<tool name="$HDA_DEFAULT_TOOL" label="$HDA_LABEL" icon="$HDA_ICON">
<toolMenuContext name="viewer">
<contextNetType>SOP</contextNetType>
</toolMenuContext>
<toolMenuContext name="network">
<contextOpType>$HDA_TABLE_AND_NAME</contextOpType>
</toolMenuContext>
<toolSubmenu>Digital Assets</toolSubmenu>
<script scriptType="python"><![CDATA[import soptoolutils
soptoolutils.genericTool(kwargs, \'$HDA_NAME\')]]></script>
</tool>
</shelfDocument>
"""
nodetype_category_name = hda_def.nodeType().category().name()
context = context_dict[nodetype_category_name]
util = utils_dict[nodetype_category_name]
content = content.replace(
"<contextNetType>SOP</contextNetType>",
f"<contextNetType>{context}</contextNetType>")
content = content.replace('soptoolutils', util)
hda_def.addSection('Tools.shelf', content)
old_submenu = 'Digital Assets'
# Replace submenu
tools = hda_def.sections()["Tools.shelf"]
content = tools.contents()
content = content.replace(
f"<toolSubmenu>{old_submenu}</toolSubmenu>",
f"<toolSubmenu>{new_submenu}</toolSubmenu>"
)
hda_def.addSection('Tools.shelf', content)
# endregion
class CreateHDA(plugin.HoudiniCreator):
"""Publish Houdini Digital Asset file."""
identifier = "io.openpype.creators.houdini.hda"
label = "Houdini Digital Asset (Hda)"
product_type = "hda"
icon = "gears"
maintain_selection = False
def _check_existing(self, folder_path, product_name):
# type: (str, str) -> bool
"""Check if existing product name versions already exists."""
# Get all products of the current folder
project_name = self.project_name
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path, fields={"id"}
)
product_entities = ayon_api.get_products(
project_name, folder_ids={folder_entity["id"]}, fields={"name"}
)
existing_product_names_low = {
product_entity["name"].lower()
for product_entity in product_entities
}
return product_name.lower() in existing_product_names_low
def create_instance_node(
self,
folder_path,
node_name,
parent,
node_type="geometry",
pre_create_data=None
):
if pre_create_data is None:
pre_create_data = {}
if self.selected_nodes:
# if we have `use selection` enabled, and we have some
# selected nodes ...
if self.selected_nodes[0].type().name() == "subnet":
to_hda = self.selected_nodes[0]
to_hda.setName("{}_subnet".format(node_name), unique_name=True)
else:
parent_node = self.selected_nodes[0].parent()
subnet = parent_node.collapseIntoSubnet(
self.selected_nodes,
subnet_name="{}_subnet".format(node_name))
subnet.moveToGoodPosition()
to_hda = subnet
else:
# Use Obj as the default path
parent_node = hou.node("/obj")
# Find and return the NetworkEditor pane tab with the minimum index
pane = hou.ui.paneTabOfType(hou.paneTabType.NetworkEditor)
if isinstance(pane, hou.NetworkEditor):
# Use the NetworkEditor pane path as the parent path.
parent_node = pane.pwd()
to_hda = parent_node.createNode(
"subnet", node_name="{}_subnet".format(node_name))
if not to_hda.type().definition():
# if node type has not its definition, it is not user
# created hda. We test if hda can be created from the node.
if not to_hda.canCreateDigitalAsset():
raise CreatorError(
"cannot create hda from node {}".format(to_hda))
# Pick a unique type name for HDA product per folder path per project.
type_name = (
"{project_name}{folder_path}_{node_name}".format(
project_name=get_current_project_name(),
folder_path=folder_path.replace("/","_"),
node_name=node_name
)
)
hda_node = to_hda.createDigitalAsset(
name=type_name,
description=node_name,
hda_file_name="$HIP/{}.hda".format(node_name),
ignore_external_references=True
)
hda_node.layoutChildren()
elif self._check_existing(folder_path, node_name):
raise CreatorError(
("product {} is already published with different HDA"
"definition.").format(node_name))
else:
hda_node = to_hda
# If user tries to create the same HDA instance more than
# once, then all of them will have the same product name and
# point to the same hda_file_name. But, their node names will
# be incremented.
hda_node.setName(node_name, unique_name=True)
self.customize_node_look(hda_node)
# Set Custom settings.
hda_def = hda_node.type().definition()
if pre_create_data.get("set_user"):
hda_def.setUserInfo(get_ayon_username())
if pre_create_data.get("use_project"):
set_tool_submenu(hda_def, "AYON/{}".format(self.project_name))
return hda_node
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
return super(CreateHDA, self).create(
product_name,
instance_data,
pre_create_data)
def get_network_categories(self):
# Houdini allows creating sub-network nodes inside
# these categories.
# Therefore this plugin can work in these categories.
return [
hou.chopNodeTypeCategory(),
hou.cop2NodeTypeCategory(),
hou.dopNodeTypeCategory(),
hou.ropNodeTypeCategory(),
hou.lopNodeTypeCategory(),
hou.objNodeTypeCategory(),
hou.sopNodeTypeCategory(),
hou.topNodeTypeCategory(),
hou.vopNodeTypeCategory()
]
def get_pre_create_attr_defs(self):
attrs = super(CreateHDA, self).get_pre_create_attr_defs()
return attrs + [
BoolDef("set_user",
tooltip="Set current user as the author of the HDA",
default=False,
label="Set Current User"),
BoolDef("use_project",
tooltip="Use project name as tab submenu path.\n"
"The location in TAB Menu will be\n"
"'AYON/project_name/your_HDA_name'",
default=True,
label="Use Project as menu entry"),
]
def get_dynamic_data(
self,
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
):
"""
Pass product name from product name templates as dynamic data.
"""
dynamic_data = super(CreateHDA, self).get_dynamic_data(
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
)
dynamic_data.update(
{
"asset": folder_entity["name"],
"folder": {
"label": folder_entity["label"],
"name": folder_entity["name"]
}
}
)
return dynamic_data

View file

@ -1,147 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Karma ROP."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef, EnumDef, NumberDef
class CreateKarmaROP(plugin.HoudiniCreator):
"""Karma ROP"""
identifier = "io.openpype.creators.houdini.karma_rop"
label = "Karma ROP"
product_type = "karma_rop"
icon = "magic"
# Default render target
render_target = "farm"
def create(self, product_name, instance_data, pre_create_data):
import hou # noqa
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
instance_data.pop("active", None)
instance_data.update({"node_type": "karma"})
# Add chunk size attribute
instance_data["chunkSize"] = 10
instance = super(CreateKarmaROP, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
ext = pre_create_data.get("image_format")
filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
ext=ext,
)
checkpoint = "{cp_dir}{product_name}.$F4.checkpoint".format(
cp_dir=hou.text.expandString("$HIP/pyblish/"),
product_name=product_name
)
usd_directory = "{usd_dir}{product_name}_$RENDERID".format(
usd_dir=hou.text.expandString("$HIP/pyblish/renders/usd_renders/"), # noqa
product_name=product_name
)
parms = {
# Render Frame Range
"trange": 1,
# Karma ROP Setting
"picture": filepath,
# Karma Checkpoint Setting
"productName": checkpoint,
# USD Output Directory
"savetodirectory": usd_directory,
}
res_x = pre_create_data.get("res_x")
res_y = pre_create_data.get("res_y")
if self.selected_nodes:
# If camera found in selection
# we will use as render camera
camera = None
for node in self.selected_nodes:
if node.type().name() == "cam":
camera = node.path()
has_camera = pre_create_data.get("cam_res")
if has_camera:
res_x = node.evalParm("resx")
res_y = node.evalParm("resy")
if not camera:
self.log.warning("No render camera found in selection")
parms.update({
"camera": camera or "",
"resolutionx": res_x,
"resolutiony": res_y,
})
instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target)
]
def get_pre_create_attr_defs(self):
image_format_enum = [
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
"rad", "rat", "rta", "sgi", "tga", "tif",
]
attrs = super(CreateKarmaROP, self).get_pre_create_attr_defs()
attrs += [
EnumDef("image_format",
image_format_enum,
default="exr",
label="Image Format Options"),
NumberDef("res_x",
label="width",
default=1920,
decimals=0),
NumberDef("res_y",
label="height",
default=720,
decimals=0),
BoolDef("cam_res",
label="Camera Resolution",
default=False),
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,128 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Mantra ROP."""
from ayon_houdini.api import plugin
from ayon_core.lib import EnumDef, BoolDef
class CreateMantraROP(plugin.HoudiniCreator):
"""Mantra ROP"""
identifier = "io.openpype.creators.houdini.mantra_rop"
label = "Mantra ROP"
product_type = "mantra_rop"
icon = "magic"
# Default render target
render_target = "farm_split"
def create(self, product_name, instance_data, pre_create_data):
import hou # noqa
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
instance_data.pop("active", None)
instance_data.update({"node_type": "ifd"})
# Add chunk size attribute
instance_data["chunkSize"] = 10
instance = super(CreateMantraROP, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
ext = pre_create_data.get("image_format")
filepath = "{renders_dir}{product_name}/{product_name}.$F4.{ext}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
ext=ext,
)
parms = {
# Render Frame Range
"trange": 1,
# Mantra ROP Setting
"vm_picture": filepath,
}
if pre_create_data.get("render_target") == "farm_split":
ifd_filepath = \
"{export_dir}{product_name}/{product_name}.$F4.ifd".format(
export_dir=hou.text.expandString("$HIP/pyblish/ifd/"),
product_name=product_name,
)
parms["soho_outputmode"] = 1
parms["soho_diskfile"] = ifd_filepath
if self.selected_nodes:
# If camera found in selection
# we will use as render camera
camera = None
for node in self.selected_nodes:
if node.type().name() == "cam":
camera = node.path()
if not camera:
self.log.warning("No render camera found in selection")
parms.update({"camera": camera or ""})
custom_res = pre_create_data.get("override_resolution")
if custom_res:
parms.update({"override_camerares": 1})
instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
"farm_split": "Farm Rendering - Split export & render jobs",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target)
]
def get_pre_create_attr_defs(self):
image_format_enum = [
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
"rad", "rat", "rta", "sgi", "tga", "tif",
]
attrs = super(CreateMantraROP, self).get_pre_create_attr_defs()
attrs += [
EnumDef("image_format",
image_format_enum,
default="exr",
label="Image Format Options"),
BoolDef("override_resolution",
label="Override Camera Resolution",
tooltip="Override the current camera "
"resolution, recommended for IPR.",
default=False),
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,141 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating Model product type.
Note:
Currently, This creator plugin is the same as 'create_pointcache.py'
But renaming the product type to 'model'.
It's purpose to support
Maya (load/publish model from maya to/from houdini).
It's considered to support multiple representations in the future.
"""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef
import hou
class CreateModel(plugin.HoudiniCreator):
"""Create Model"""
identifier = "io.openpype.creators.houdini.model"
label = "Model"
product_type = "model"
icon = "cube"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "alembic"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateModel, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"use_sop_path": True,
"build_from_path": True,
"path_attrib": "path",
"prim_to_detail_pattern": "cbId",
"format": 2,
"facesets": 0,
"filename": hou.text.expandString(
"$HIP/pyblish/{}.abc".format(product_name))
}
if self.selected_nodes:
selected_node = self.selected_nodes[0]
# Although Houdini allows ObjNode path on `sop_path` for the
# the ROP node we prefer it set to the SopNode path explicitly
# Allow sop level paths (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
parms["sop_path"] = selected_node.path()
self.log.debug(
"Valid SopNode selection, 'SOP Path' in ROP will be set to '%s'."
% selected_node.path()
)
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
# but do not allow other object level nodes types like cameras, etc.
elif isinstance(selected_node, hou.ObjNode) and \
selected_node.type().name() in ["geo"]:
# get the output node with the minimum
# 'outputidx' or the node with display flag
sop_path = self.get_obj_output(selected_node)
if sop_path:
parms["sop_path"] = sop_path.path()
self.log.debug(
"Valid ObjNode selection, 'SOP Path' in ROP will be set to "
"the child path '%s'."
% sop_path.path()
)
if not parms.get("sop_path", None):
self.log.debug(
"Selection isn't valid. 'SOP Path' in ROP will be empty."
)
else:
self.log.debug(
"No Selection. 'SOP Path' in ROP will be empty."
)
instance_node.setParms(parms)
instance_node.parm("trange").set(1)
# Explicitly set f1 and f2 to frame start.
# Which forces the rop node to export one frame.
instance_node.parmTuple('f').deleteAllKeyframes()
fstart = int(hou.hscriptExpression("$FSTART"))
instance_node.parmTuple('f').set((fstart, fstart, 1))
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_obj_output(self, obj_node):
"""Find output node with the smallest 'outputidx'."""
outputs = obj_node.subnetOutputs()
# if obj_node is empty
if not outputs:
return
# if obj_node has one output child whether its
# sop output node or a node with the render flag
elif len(outputs) == 1:
return outputs[0]
# if there are more than one, then it have multiple output nodes
# return the one with the minimum 'outputidx'
else:
return min(outputs,
key=lambda node: node.evalParm('outputidx'))
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,124 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache alembics."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef
import hou
class CreatePointCache(plugin.HoudiniCreator):
"""Alembic ROP to pointcache"""
identifier = "io.openpype.creators.houdini.pointcache"
label = "PointCache (Abc)"
product_type = "pointcache"
icon = "gears"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "alembic"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreatePointCache, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"use_sop_path": True,
"build_from_path": True,
"path_attrib": "path",
"prim_to_detail_pattern": "cbId",
"format": 2,
"facesets": 0,
"filename": hou.text.expandString(
"$HIP/pyblish/{}.abc".format(product_name))
}
if self.selected_nodes:
selected_node = self.selected_nodes[0]
# Although Houdini allows ObjNode path on `sop_path` for the
# the ROP node we prefer it set to the SopNode path explicitly
# Allow sop level paths (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
parms["sop_path"] = selected_node.path()
self.log.debug(
"Valid SopNode selection, 'SOP Path' in ROP will be set to '%s'."
% selected_node.path()
)
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
# but do not allow other object level nodes types like cameras, etc.
elif isinstance(selected_node, hou.ObjNode) and \
selected_node.type().name() in ["geo"]:
# get the output node with the minimum
# 'outputidx' or the node with display flag
sop_path = self.get_obj_output(selected_node)
if sop_path:
parms["sop_path"] = sop_path.path()
self.log.debug(
"Valid ObjNode selection, 'SOP Path' in ROP will be set to "
"the child path '%s'."
% sop_path.path()
)
if not parms.get("sop_path", None):
self.log.debug(
"Selection isn't valid. 'SOP Path' in ROP will be empty."
)
else:
self.log.debug(
"No Selection. 'SOP Path' in ROP will be empty."
)
instance_node.setParms(parms)
instance_node.parm("trange").set(1)
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_obj_output(self, obj_node):
"""Find output node with the smallest 'outputidx'."""
outputs = obj_node.subnetOutputs()
# if obj_node is empty
if not outputs:
return
# if obj_node has one output child whether its
# sop output node or a node with the render flag
elif len(outputs) == 1:
return outputs[0]
# if there are more than one, then it have multiple output nodes
# return the one with the minimum 'outputidx'
else:
return min(outputs,
key=lambda node: node.evalParm('outputidx'))
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,68 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating Redshift proxies."""
from ayon_houdini.api import plugin
import hou
from ayon_core.lib import BoolDef
class CreateRedshiftProxy(plugin.HoudiniCreator):
"""Redshift Proxy"""
identifier = "io.openpype.creators.houdini.redshiftproxy"
label = "Redshift Proxy"
product_type = "redshiftproxy"
icon = "magic"
def create(self, product_name, instance_data, pre_create_data):
# Remove the active, we are checking the bypass flag of the nodes
instance_data.pop("active", None)
# Redshift provides a `Redshift_Proxy_Output` node type which shows
# a limited set of parameters by default and is set to extract a
# Redshift Proxy. However when "imprinting" extra parameters needed
# for OpenPype it starts showing all its parameters again. It's unclear
# why this happens.
# TODO: Somehow enforce so that it only shows the original limited
# attributes of the Redshift_Proxy_Output node type
instance_data.update({"node_type": "Redshift_Proxy_Output"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateRedshiftProxy, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"RS_archive_file": '$HIP/pyblish/{}.$F4.rs'.format(product_name),
}
if self.selected_nodes:
parms["RS_archive_sopPath"] = self.selected_nodes[0].path()
instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["productType", "id", "prim_to_detail_pattern"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,172 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create Redshift ROP."""
import hou # noqa
from ayon_core.pipeline import CreatorError
from ayon_houdini.api import plugin
from ayon_core.lib import EnumDef, BoolDef
class CreateRedshiftROP(plugin.HoudiniCreator):
"""Redshift ROP"""
identifier = "io.openpype.creators.houdini.redshift_rop"
label = "Redshift ROP"
product_type = "redshift_rop"
icon = "magic"
ext = "exr"
multi_layered_mode = "No Multi-Layered EXR File"
# Default render target
render_target = "farm_split"
def create(self, product_name, instance_data, pre_create_data):
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
instance_data.pop("active", None)
instance_data.update({"node_type": "Redshift_ROP"})
# Add chunk size attribute
instance_data["chunkSize"] = 10
instance = super(CreateRedshiftROP, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
basename = instance_node.name()
# Also create the linked Redshift IPR Rop
try:
ipr_rop = instance_node.parent().createNode(
"Redshift_IPR", node_name=f"{basename}_IPR"
)
except hou.OperationFailed as e:
raise CreatorError(
(
"Cannot create Redshift node. Is Redshift "
"installed and enabled?"
)
) from e
# Move it to directly under the Redshift ROP
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
# Set the linked rop to the Redshift ROP
ipr_rop.parm("linked_rop").set(instance_node.path())
ext = pre_create_data.get("image_format")
multi_layered_mode = pre_create_data.get("multi_layered_mode")
ext_format_index = {"exr": 0, "tif": 1, "jpg": 2, "png": 3}
multilayer_mode_index = {"No Multi-Layered EXR File": "1",
"Full Multi-Layered EXR File": "2" }
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
fmt="$AOV.$F4.{ext}".format(ext=ext)
)
if multilayer_mode_index[multi_layered_mode] == "1":
multipart = False
elif multilayer_mode_index[multi_layered_mode] == "2":
multipart = True
parms = {
# Render frame range
"trange": 1,
# Redshift ROP settings
"RS_outputFileNamePrefix": filepath,
"RS_outputBeautyAOVSuffix": "beauty",
"RS_outputFileFormat": ext_format_index[ext],
}
if ext == "exr":
parms["RS_outputMultilayerMode"] = multilayer_mode_index[multi_layered_mode]
parms["RS_aovMultipart"] = multipart
if self.selected_nodes:
# set up the render camera from the selected node
camera = None
for node in self.selected_nodes:
if node.type().name() == "cam":
camera = node.path()
parms["RS_renderCamera"] = camera or ""
export_dir = hou.text.expandString("$HIP/pyblish/rs/")
rs_filepath = f"{export_dir}{product_name}/{product_name}.$F4.rs"
parms["RS_archive_file"] = rs_filepath
if pre_create_data.get("render_target") == "farm_split":
parms["RS_archive_enable"] = 1
instance_node.setParms(parms)
# Lock some Avalon attributes
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def remove_instances(self, instances):
for instance in instances:
node = instance.data.get("instance_node")
ipr_node = hou.node(f"{node}_IPR")
if ipr_node:
ipr_node.destroy()
return super(CreateRedshiftROP, self).remove_instances(instances)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
"farm_split": "Farm Rendering - Split export & render jobs",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target)
]
def get_pre_create_attr_defs(self):
image_format_enum = [
"exr", "tif", "jpg", "png",
]
multi_layered_mode = [
"No Multi-Layered EXR File",
"Full Multi-Layered EXR File"
]
attrs = super(CreateRedshiftROP, self).get_pre_create_attr_defs()
attrs += [
EnumDef("image_format",
image_format_enum,
default=self.ext,
label="Image Format Options"),
EnumDef("multi_layered_mode",
multi_layered_mode,
default=self.multi_layered_mode,
label="Multi-Layered EXR"),
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,153 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating openGL reviews."""
from ayon_houdini.api import lib, plugin
from ayon_core.lib import EnumDef, BoolDef, NumberDef
import os
import hou
class CreateReview(plugin.HoudiniCreator):
"""Review with OpenGL ROP"""
identifier = "io.openpype.creators.houdini.review"
label = "Review"
product_type = "review"
icon = "video-camera"
review_color_space = ""
def apply_settings(self, project_settings):
super(CreateReview, self).apply_settings(project_settings)
# workfile settings added in '0.2.13'
color_settings = project_settings["houdini"]["imageio"].get(
"workfile", {}
)
if color_settings.get("enabled"):
self.review_color_space = color_settings.get("review_color_space")
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "opengl"})
instance_data["imageFormat"] = pre_create_data.get("imageFormat")
instance_data["keepImages"] = pre_create_data.get("keepImages")
instance = super(CreateReview, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
frame_range = hou.playbar.frameRange()
filepath = "{root}/{product_name}/{product_name}.$F4.{ext}".format(
root=hou.text.expandString("$HIP/pyblish"),
# keep dynamic link to product name
product_name="`chs(\"AYON_productName\")`",
ext=pre_create_data.get("image_format") or "png"
)
parms = {
"picture": filepath,
"trange": 1,
# Unlike many other ROP nodes the opengl node does not default
# to expression of $FSTART and $FEND so we preserve that behavior
# but do set the range to the frame range of the playbar
"f1": frame_range[0],
"f2": frame_range[1],
}
override_resolution = pre_create_data.get("override_resolution")
if override_resolution:
parms.update({
"tres": override_resolution,
"res1": pre_create_data.get("resx"),
"res2": pre_create_data.get("resy"),
"aspect": pre_create_data.get("aspect"),
})
if self.selected_nodes:
# The first camera found in selection we will use as camera
# Other node types we set in force objects
camera = None
force_objects = []
for node in self.selected_nodes:
path = node.path()
if node.type().name() == "cam":
if camera:
continue
camera = path
else:
force_objects.append(path)
if not camera:
self.log.warning("No camera found in selection.")
parms.update({
"camera": camera or "",
"scenepath": "/obj",
"forceobjects": " ".join(force_objects),
"vobjects": "" # clear candidate objects from '*' value
})
instance_node.setParms(parms)
# Set OCIO Colorspace to the default colorspace
# if there's OCIO
if os.getenv("OCIO"):
# Fall to the default value if cls.review_color_space is empty.
if not self.review_color_space:
# cls.review_color_space is an empty string
# when the imageio/workfile setting is disabled or
# when the Review colorspace setting is empty.
from ayon_houdini.api.colorspace import get_default_display_view_colorspace # noqa
self.review_color_space = get_default_display_view_colorspace()
lib.set_review_color_space(instance_node,
self.review_color_space,
self.log)
to_lock = ["id", "productType"]
self.lock_parameters(instance_node, to_lock)
def get_pre_create_attr_defs(self):
attrs = super(CreateReview, self).get_pre_create_attr_defs()
image_format_enum = [
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
"rad", "rat", "rta", "sgi", "tga", "tif",
]
return attrs + [
BoolDef("keepImages",
label="Keep Image Sequences",
default=False),
EnumDef("imageFormat",
image_format_enum,
default="png",
label="Image Format Options"),
BoolDef("override_resolution",
label="Override resolution",
tooltip="When disabled the resolution set on the camera "
"is used instead.",
default=True),
NumberDef("resx",
label="Resolution Width",
default=1280,
minimum=2,
decimals=0),
NumberDef("resy",
label="Resolution Height",
default=720,
minimum=2,
decimals=0),
NumberDef("aspect",
label="Aspect Ratio",
default=1.0,
minimum=0.0001,
decimals=3)
]

View file

@ -1,155 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator for Unreal Static Meshes."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef, EnumDef
import hou
class CreateStaticMesh(plugin.HoudiniCreator):
"""Static Meshes as FBX. """
identifier = "io.openpype.creators.houdini.staticmesh.fbx"
label = "Static Mesh (FBX)"
product_type = "staticMesh"
icon = "fa5s.cubes"
default_variants = ["Main"]
def create(self, product_name, instance_data, pre_create_data):
instance_data.update({"node_type": "filmboxfbx"})
instance = super(CreateStaticMesh, self).create(
product_name,
instance_data,
pre_create_data)
# get the created rop node
instance_node = hou.node(instance.get("instance_node"))
# prepare parms
output_path = hou.text.expandString(
"$HIP/pyblish/{}.fbx".format(product_name)
)
parms = {
"startnode": self.get_selection(),
"sopoutput": output_path,
# vertex cache format
"vcformat": pre_create_data.get("vcformat"),
"convertunits": pre_create_data.get("convertunits"),
# set render range to use frame range start-end frame
"trange": 1,
"createsubnetroot": pre_create_data.get("createsubnetroot")
}
# set parms
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.objNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_pre_create_attr_defs(self):
"""Add settings for users. """
attrs = super(CreateStaticMesh, self).get_pre_create_attr_defs()
createsubnetroot = BoolDef("createsubnetroot",
tooltip="Create an extra root for the "
"Export node when it's a "
"subnetwork. This causes the "
"exporting subnetwork node to be "
"represented in the FBX file.",
default=False,
label="Create Root for Subnet")
vcformat = EnumDef("vcformat",
items={
0: "Maya Compatible (MC)",
1: "3DS MAX Compatible (PC2)"
},
default=0,
label="Vertex Cache Format")
convert_units = BoolDef("convertunits",
tooltip="When on, the FBX is converted"
"from the current Houdini "
"system units to the native "
"FBX unit of centimeters.",
default=False,
label="Convert Units")
return attrs + [createsubnetroot, vcformat, convert_units]
def get_dynamic_data(
self,
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
):
"""
The default prodcut name templates for Unreal include {asset} and thus
we should pass that along as dynamic data.
"""
dynamic_data = super(CreateStaticMesh, self).get_dynamic_data(
project_name,
folder_entity,
task_entity,
variant,
host_name,
instance
)
dynamic_data["asset"] = folder_entity["name"]
return dynamic_data
def get_selection(self):
"""Selection Logic.
how self.selected_nodes should be processed to get
the desirable node from selection.
Returns:
str : node path
"""
selection = ""
if self.selected_nodes:
selected_node = self.selected_nodes[0]
# Accept sop level nodes (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
selection = selected_node.path()
self.log.debug(
"Valid SopNode selection, 'Export' in filmboxfbx"
" will be set to '%s'.", selected_node
)
# Accept object level nodes (e.g. /obj/geo1)
elif isinstance(selected_node, hou.ObjNode):
selection = selected_node.path()
self.log.debug(
"Valid ObjNode selection, 'Export' in filmboxfbx "
"will be set to the child path '%s'.", selection
)
else:
self.log.debug(
"Selection isn't valid. 'Export' in "
"filmboxfbx will be empty."
)
else:
self.log.debug(
"No Selection. 'Export' in filmboxfbx will be empty."
)
return selection

View file

@ -1,55 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USDs."""
from ayon_houdini.api import plugin
import hou
class CreateUSD(plugin.HoudiniCreator):
"""Universal Scene Description"""
identifier = "io.openpype.creators.houdini.usd"
label = "USD"
product_type = "usd"
icon = "cubes"
enabled = False
description = "Create USD"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "usd"})
instance = super(CreateUSD, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"lopoutput": "$HIP/pyblish/{}.usd".format(product_name),
"enableoutputprocessor_simplerelativepaths": False,
}
if self.selected_nodes:
parms["loppath"] = self.selected_nodes[0].path()
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = [
"fileperframe",
# Lock some Avalon attributes
"productType",
"id",
]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.lopNodeTypeCategory()
]
def get_publish_families(self):
return ["usd", "usdrop"]

View file

@ -1,73 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USD looks with textures."""
import inspect
from ayon_houdini.api import plugin
import hou
class CreateUSDLook(plugin.HoudiniCreator):
"""Universal Scene Description Look"""
identifier = "io.openpype.creators.houdini.usd.look"
label = "Look"
product_type = "look"
icon = "paint-brush"
enabled = True
description = "Create USD Look"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "usd"})
instance = super(CreateUSDLook, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"lopoutput": "$HIP/pyblish/{}.usd".format(product_name),
"enableoutputprocessor_simplerelativepaths": False,
# Set the 'default prim' by default to the folder name being
# published to
"defaultprim": '/`strsplit(chs("folderPath"), "/", -1)`',
}
if self.selected_nodes:
parms["loppath"] = self.selected_nodes[0].path()
instance_node.setParms(parms)
# Lock any parameters in this list
to_lock = [
"fileperframe",
# Lock some Avalon attributes
"family",
"id",
]
self.lock_parameters(instance_node, to_lock)
def get_detail_description(self):
return inspect.cleandoc("""Publish looks in USD data.
From the Houdini Solaris context (LOPs) this will publish the look for
an asset as a USD file with the used textures.
Any assets used by the look will be relatively remapped to the USD
file and integrated into the publish as `resources`.
""")
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.lopNodeTypeCategory()
]
def get_publish_families(self):
return ["usd", "look", "usdrop"]

View file

@ -1,165 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating USD renders."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef, EnumDef
import hou
def get_usd_rop_renderers():
"""Return all available renderers supported by USD Render ROP.
Note that the USD Render ROP does not include all Hydra renderers, because
it excludes the GL ones like Houdini GL and Storm. USD Render ROP only
lists the renderers that have `aovsupport` enabled. Also see:
https://www.sidefx.com/docs/houdini/nodes/out/usdrender.html#list
Returns:
dict[str, str]: Plug-in name to display name mapping.
"""
return {
info["name"]: info["displayname"] for info
in hou.lop.availableRendererInfo() if info.get('aovsupport')
}
class CreateUSDRender(plugin.HoudiniCreator):
"""USD Render ROP in /stage"""
identifier = "io.openpype.creators.houdini.usdrender"
label = "USD Render"
product_type = "usdrender"
icon = "magic"
description = "Create USD Render"
default_renderer = "Karma CPU"
# Default render target
render_target = "farm_split"
def create(self, product_name, instance_data, pre_create_data):
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
# TODO: Support creation in /stage if wanted by user
# pre_create_data["parent"] = "/stage"
# Remove the active, we are checking the bypass flag of the nodes
instance_data.pop("active", None)
instance_data.update({"node_type": "usdrender"})
# Override default value for the Export Chunk Size because if the
# a single USD file is written as opposed to per frame we want to
# ensure only one machine picks up that sequence
# TODO: Probably better to change the default somehow for just this
# Creator on the HoudiniSubmitDeadline plug-in, if possible?
(
instance_data
.setdefault("publish_attributes", {})
.setdefault("HoudiniSubmitDeadlineUsdRender", {})["export_chunk"]
) = 1000
instance = super(CreateUSDRender, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
# Render frame range
"trange": 1
}
if self.selected_nodes:
parms["loppath"] = self.selected_nodes[0].path()
if pre_create_data.get("render_target") == "farm_split":
# Do not trigger the husk render, only trigger the USD export
parms["runcommand"] = False
# By default, the render ROP writes out the render file to a
# temporary directory. But if we want to render the USD file on
# the farm we instead want it in the project available
# to all machines. So we ensure all USD files are written to a
# folder to our choice. The
# `__render__.usd` (default name, defined by `lopoutput` parm)
# in that folder will then be the file to render.
parms["savetodirectory_directory"] = "$HIP/render/usd/$HIPNAME/$OS"
parms["lopoutput"] = "__render__.usd"
parms["allframesatonce"] = True
# By default strip any Houdini custom data from the output file
# since the renderer doesn't care about it
parms["clearhoudinicustomdata"] = True
# Use the first selected LOP node if "Use Selection" is enabled
# and the user had any nodes selected
if self.selected_nodes:
for node in self.selected_nodes:
if node.type().category() == hou.lopNodeTypeCategory():
parms["loppath"] = node.path()
break
# Set default renderer if defined in settings
if pre_create_data.get("renderer"):
parms["renderer"] = pre_create_data.get("renderer")
instance_node.setParms(parms)
# Lock some AYON attributes
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
"farm_split": "Farm Rendering - Split export & render jobs",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target)
]
def get_pre_create_attr_defs(self):
# Retrieve available renderers and convert default renderer to
# plug-in name if settings provided the display name
renderer_plugin_to_display_name = get_usd_rop_renderers()
default_renderer = self.default_renderer or None
if (
default_renderer
and default_renderer not in renderer_plugin_to_display_name
):
# Map default renderer display name to plugin name
for name, display_name in renderer_plugin_to_display_name.items():
if default_renderer == display_name:
default_renderer = name
break
else:
# Default renderer not found in available renderers
default_renderer = None
attrs = super(CreateUSDRender, self).get_pre_create_attr_defs()
attrs += [
EnumDef("renderer",
label="Renderer",
default=default_renderer,
items=renderer_plugin_to_display_name),
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,122 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating VDB Caches."""
from ayon_houdini.api import plugin
from ayon_core.lib import BoolDef
import hou
class CreateVDBCache(plugin.HoudiniCreator):
"""OpenVDB from Geometry ROP"""
identifier = "io.openpype.creators.houdini.vdbcache"
name = "vbdcache"
label = "VDB Cache"
product_type = "vdbcache"
icon = "cloud"
def create(self, product_name, instance_data, pre_create_data):
import hou
instance_data.pop("active", None)
instance_data.update({"node_type": "geometry"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateVDBCache, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
file_path = "{}{}".format(
hou.text.expandString("$HIP/pyblish/"),
"{}.$F4.vdb".format(product_name))
parms = {
"sopoutput": file_path,
"initsim": True,
"trange": 1
}
if self.selected_nodes:
parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
instance_node.setParms(parms)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.objNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_sop_node_path(self, selected_node):
"""Get Sop Path of the selected node.
Although Houdini allows ObjNode path on `sop_path` for the
the ROP node, we prefer it set to the SopNode path explicitly.
"""
# Allow sop level paths (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
self.log.debug(
"Valid SopNode selection, 'SOP Path' in ROP will"
" be set to '%s'.", selected_node.path()
)
return selected_node.path()
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
# but do not allow other object level nodes types like cameras, etc.
elif isinstance(selected_node, hou.ObjNode) and \
selected_node.type().name() == "geo":
# Try to find output node.
sop_node = self.get_obj_output(selected_node)
if sop_node:
self.log.debug(
"Valid ObjNode selection, 'SOP Path' in ROP will "
"be set to the child path '%s'.", sop_node.path()
)
return sop_node.path()
self.log.debug(
"Selection isn't valid. 'SOP Path' in ROP will be empty."
)
return ""
def get_obj_output(self, obj_node):
"""Try to find output node.
If any output nodes are present, return the output node with
the minimum 'outputidx'
If no output nodes are present, return the node with display flag
If no nodes are present at all, return None
"""
outputs = obj_node.subnetOutputs()
# if obj_node is empty
if not outputs:
return
# if obj_node has one output child whether its
# sop output node or a node with the render flag
elif len(outputs) == 1:
return outputs[0]
# if there are more than one, then it has multiple output nodes
# return the one with the minimum 'outputidx'
else:
return min(outputs,
key=lambda node: node.evalParm('outputidx'))
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -1,200 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin to create VRay ROP."""
import hou
from ayon_houdini.api import plugin
from ayon_core.pipeline import CreatorError
from ayon_core.lib import EnumDef, BoolDef
class CreateVrayROP(plugin.HoudiniCreator):
"""VRay ROP"""
identifier = "io.openpype.creators.houdini.vray_rop"
label = "VRay ROP"
product_type = "vray_rop"
icon = "magic"
ext = "exr"
# Default render target
render_target = "farm_split"
def create(self, product_name, instance_data, pre_create_data):
# Transfer settings from pre create to instance
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
for key in ["render_target", "review"]:
if key in pre_create_data:
creator_attributes[key] = pre_create_data[key]
instance_data.pop("active", None)
instance_data.update({"node_type": "vray_renderer"})
# Add chunk size attribute
instance_data["chunkSize"] = 10
instance = super(CreateVrayROP, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
# Add IPR for Vray
basename = instance_node.name()
try:
ipr_rop = instance_node.parent().createNode(
"vray", node_name=basename + "_IPR"
)
except hou.OperationFailed:
raise CreatorError(
"Cannot create Vray render node. "
"Make sure Vray installed and enabled!"
)
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
ipr_rop.parm("rop").set(instance_node.path())
parms = {
"trange": 1,
"SettingsEXR_bits_per_channel": "16" # half precision
}
if pre_create_data.get("render_target") == "farm_split":
scene_filepath = \
"{export_dir}{product_name}/{product_name}.$F4.vrscene".format(
export_dir=hou.text.expandString("$HIP/pyblish/vrscene/"),
product_name=product_name,
)
# Setting render_export_mode to "2" because that's for
# "Export only" ("1" is for "Export & Render")
parms["render_export_mode"] = "2"
parms["render_export_filepath"] = scene_filepath
if self.selected_nodes:
# set up the render camera from the selected node
camera = None
for node in self.selected_nodes:
if node.type().name() == "cam":
camera = node.path()
parms.update({
"render_camera": camera or ""
})
# Enable render element
ext = pre_create_data.get("image_format")
instance_data["RenderElement"] = pre_create_data.get("render_element_enabled") # noqa
if pre_create_data.get("render_element_enabled", True):
# Vray has its own tag for AOV file output
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
fmt="${aov}.$F4.{ext}".format(aov="AOV",
ext=ext)
)
filepath = "{}{}".format(
hou.text.expandString("$HIP/pyblish/renders/"),
"{}/{}.${}.$F4.{}".format(product_name,
product_name,
"AOV",
ext)
)
re_rop = instance_node.parent().createNode(
"vray_render_channels",
node_name=basename + "_render_element"
)
# move the render element node next to the vray renderer node
re_rop.setPosition(instance_node.position() + hou.Vector2(0, 1))
re_path = re_rop.path()
parms.update({
"use_render_channels": 1,
"SettingsOutput_img_file_path": filepath,
"render_network_render_channels": re_path
})
else:
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
product_name=product_name,
fmt="$F4.{ext}".format(ext=ext)
)
parms.update({
"use_render_channels": 0,
"SettingsOutput_img_file_path": filepath
})
custom_res = pre_create_data.get("override_resolution")
if custom_res:
parms.update({"override_camerares": 1})
instance_node.setParms(parms)
# lock parameters from AVALON
to_lock = ["productType", "id"]
self.lock_parameters(instance_node, to_lock)
def remove_instances(self, instances):
for instance in instances:
node = instance.data.get("instance_node")
# for the extra render node from the plugins
# such as vray and redshift
ipr_node = hou.node("{}{}".format(node, "_IPR"))
if ipr_node:
ipr_node.destroy()
re_node = hou.node("{}{}".format(node,
"_render_element"))
if re_node:
re_node.destroy()
return super(CreateVrayROP, self).remove_instances(instances)
def get_instance_attr_defs(self):
"""get instance attribute definitions.
Attributes defined in this method are exposed in
publish tab in the publisher UI.
"""
render_target_items = {
"local": "Local machine rendering",
"local_no_render": "Use existing frames (local)",
"farm": "Farm Rendering",
"farm_split": "Farm Rendering - Split export & render jobs",
}
return [
BoolDef("review",
label="Review",
tooltip="Mark as reviewable",
default=True),
EnumDef("render_target",
items=render_target_items,
label="Render target",
default=self.render_target)
]
def get_pre_create_attr_defs(self):
image_format_enum = [
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
"rad", "rat", "rta", "sgi", "tga", "tif",
]
attrs = super(CreateVrayROP, self).get_pre_create_attr_defs()
attrs += [
EnumDef("image_format",
image_format_enum,
default=self.ext,
label="Image Format Options"),
BoolDef("override_resolution",
label="Override Camera Resolution",
tooltip="Override the current camera "
"resolution, recommended for IPR.",
default=False),
BoolDef("render_element_enabled",
label="Render Element",
tooltip="Create Render Element Node "
"if enabled",
default=False)
]
return attrs + self.get_instance_attr_defs()

View file

@ -1,121 +0,0 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating workfiles."""
import ayon_api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import read, imprint
from ayon_houdini.api.pipeline import CONTEXT_CONTAINER
from ayon_core.pipeline import CreatedInstance, AutoCreator
import hou
class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
"""Workfile auto-creator."""
identifier = "io.openpype.creators.houdini.workfile"
label = "Workfile"
product_type = "workfile"
icon = "fa5.file"
default_variant = "Main"
def create(self):
variant = self.default_variant
current_instance = next(
(
instance for instance in self.create_context.instances
if instance.creator_identifier == self.identifier
), None)
project_name = self.project_name
folder_path = self.create_context.get_current_folder_path()
task_name = self.create_context.get_current_task_name()
host_name = self.host_name
if current_instance is None:
current_folder_path = None
else:
current_folder_path = current_instance["folderPath"]
if current_instance is None:
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
product_name = self.get_product_name(
project_name,
folder_entity,
task_entity,
variant,
host_name,
)
data = {
"folderPath": folder_path,
"task": task_name,
"variant": variant,
}
data.update(
self.get_dynamic_data(
project_name,
folder_entity,
task_entity,
variant,
host_name,
current_instance)
)
self.log.info("Auto-creating workfile instance...")
current_instance = CreatedInstance(
self.product_type, product_name, data, self
)
self._add_instance_to_context(current_instance)
elif (
current_folder_path != folder_path
or current_instance["task"] != task_name
):
# Update instance context if is not the same
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
product_name = self.get_product_name(
project_name,
folder_entity,
task_entity,
variant,
host_name,
)
current_instance["folderPath"] = folder_path
current_instance["task"] = task_name
current_instance["productName"] = product_name
# write workfile information to context container.
op_ctx = hou.node(CONTEXT_CONTAINER)
if not op_ctx:
op_ctx = self.host.create_context_node()
workfile_data = {"workfile": current_instance.data_to_store()}
imprint(op_ctx, workfile_data)
def collect_instances(self):
op_ctx = hou.node(CONTEXT_CONTAINER)
instance = read(op_ctx)
if not instance:
return
workfile = instance.get("workfile")
if not workfile:
return
created_instance = CreatedInstance.from_existing(
workfile, self
)
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
op_ctx = hou.node(CONTEXT_CONTAINER)
for created_inst, _changes in update_list:
if created_inst["creator_identifier"] == self.identifier:
workfile_data = {"workfile": created_inst.data_to_store()}
imprint(op_ctx, workfile_data, update=True)

View file

@ -1,26 +0,0 @@
from ayon_core.pipeline import InventoryAction
from ayon_houdini.api.lib import (
get_camera_from_container,
set_camera_resolution
)
from ayon_core.pipeline.context_tools import get_current_folder_entity
class SetCameraResolution(InventoryAction):
label = "Set Camera Resolution"
icon = "desktop"
color = "orange"
@staticmethod
def is_compatible(container):
return (
container.get("loader") == "CameraLoader"
)
def process(self, containers):
folder_entity = get_current_folder_entity()
for container in containers:
node = container["node"]
camera = get_camera_from_container(node)
set_camera_resolution(camera, folder_entity)

View file

@ -1,83 +0,0 @@
"""A module containing generic loader actions that will display in the Loader.
"""
from ayon_houdini.api import plugin
class SetFrameRangeLoader(plugin.HoudiniLoader):
"""Set frame range excluding pre- and post-handles"""
product_types = {
"animation",
"camera",
"pointcache",
"vdbcache",
"usd",
}
representations = {"abc", "vdb", "usd"}
label = "Set frame range"
order = 11
icon = "clock-o"
color = "white"
def load(self, context, name, namespace, data):
import hou
version_attributes = context["version"]["attrib"]
start = version_attributes.get("frameStart")
end = version_attributes.get("frameEnd")
if start is None or end is None:
print(
"Skipping setting frame range because start or "
"end frame data is missing.."
)
return
hou.playbar.setFrameRange(start, end)
hou.playbar.setPlaybackRange(start, end)
class SetFrameRangeWithHandlesLoader(plugin.HoudiniLoader):
"""Set frame range including pre- and post-handles"""
product_types = {
"animation",
"camera",
"pointcache",
"vdbcache",
"usd",
}
representations = {"abc", "vdb", "usd"}
label = "Set frame range (with handles)"
order = 12
icon = "clock-o"
color = "white"
def load(self, context, name, namespace, data):
import hou
version_attributes = context["version"]["attrib"]
start = version_attributes.get("frameStart")
end = version_attributes.get("frameEnd")
if start is None or end is None:
print(
"Skipping setting frame range because start or "
"end frame data is missing.."
)
return
# Include handles
start -= version_attributes.get("handleStart", 0)
end += version_attributes.get("handleEnd", 0)
hou.playbar.setFrameRange(start, end)
hou.playbar.setPlaybackRange(start, end)

View file

@ -1,89 +0,0 @@
import os
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class AbcLoader(plugin.HoudiniLoader):
"""Load Alembic"""
product_types = {"model", "animation", "pointcache", "gpuCache"}
label = "Load Alembic"
representations = {"*"}
extensions = {"abc"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
# Remove the file node, it only loads static meshes
# Houdini 17 has removed the file node from the geo node
file_node = container.node("file1")
if file_node:
file_node.destroy()
# Create an alembic node (supports animation)
alembic = container.createNode("alembic", node_name=node_name)
alembic.setParms({"fileName": file_path})
# Position nodes nicely
container.moveToGoodPosition()
container.layoutChildren()
nodes = [container, alembic]
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
try:
alembic_node = next(
n for n in node.children() if n.type().name() == "alembic"
)
except StopIteration:
self.log.error("Could not find node of type `alembic`")
return
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
alembic_node.setParms({"fileName": file_path})
# Update attribute
node.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,81 +0,0 @@
import os
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class AbcArchiveLoader(plugin.HoudiniLoader):
"""Load Alembic as full geometry network hierarchy """
product_types = {"model", "animation", "pointcache", "gpuCache"}
label = "Load Alembic as Archive"
representations = {"*"}
extensions = {"abc"}
order = -5
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create an Alembic archive node
node = obj.createNode("alembicarchive", node_name=node_name)
node.moveToGoodPosition()
# TODO: add FPS of project / folder
node.setParms({"fileName": file_path,
"channelRef": True})
# Apply some magic
node.parm("buildHierarchy").pressButton()
node.moveToGoodPosition()
nodes = [node]
self[:] = nodes
return pipeline.containerise(node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="")
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
# Update attributes
node.setParms({"fileName": file_path,
"representation": repre_entity["id"]})
# Rebuild
node.parm("buildHierarchy").pressButton()
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,91 +0,0 @@
import os
import re
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class AssLoader(plugin.HoudiniLoader):
"""Load .ass with Arnold Procedural"""
product_types = {"ass"}
label = "Load Arnold Procedural"
representations = {"ass"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
procedural = obj.createNode("arnold::procedural", node_name=node_name)
procedural.setParms(
{
"ar_filename": self.format_path(context["representation"])
})
nodes = [procedural]
self[:] = nodes
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, context):
# Update the file path
repre_entity = context["representation"]
procedural = container["node"]
procedural.setParms({"ar_filename": self.format_path(repre_entity)})
# Update attribute
procedural.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
@staticmethod
def format_path(representation):
"""Format file path correctly for single ass.* or ass.* sequence.
Args:
representation (dict): representation to be loaded.
Returns:
str: Formatted path to be used by the input node.
"""
path = get_representation_path(representation)
if not os.path.exists(path):
raise RuntimeError("Path does not exist: {}".format(path))
is_sequence = bool(representation["context"].get("frame"))
# The path is either a single file or sequence in a folder.
if is_sequence:
dir_path, file_name = os.path.split(path)
path = os.path.join(
dir_path,
re.sub(r"(.*)\.(\d+)\.(ass.*)", "\\1.$F4.\\3", file_name)
)
return os.path.normpath(path).replace("\\", "/")
def switch(self, container, context):
self.update(container, context)

View file

@ -1,52 +0,0 @@
from ayon_core.pipeline import load
from ayon_houdini.api.lib import find_active_network
import hou
class LOPLoadAssetLoader(load.LoaderPlugin):
"""Load reference/payload into Solaris using AYON `lop_import` LOP"""
product_types = {"*"}
label = "Load Asset (LOPs)"
representations = ["usd", "abc", "usda", "usdc"]
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create node
network = find_active_network(
category=hou.lopNodeTypeCategory(),
default="/stage"
)
node = network.createNode("ayon::lop_import", node_name=node_name)
node.moveToGoodPosition()
# Set representation id
parm = node.parm("representation")
parm.set(context["representation"]["id"])
parm.pressButton() # trigger callbacks
nodes = [node]
self[:] = nodes
def update(self, container, context):
node = container["node"]
# Set representation id
parm = node.parm("representation")
parm.set(context["representation"]["id"])
parm.pressButton() # trigger callbacks
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,111 +0,0 @@
# -*- coding: utf-8 -*-
import os
import re
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class BgeoLoader(plugin.HoudiniLoader):
"""Load bgeo files to Houdini."""
label = "Load bgeo"
product_types = {"model", "pointcache", "bgeo"}
representations = {
"bgeo", "bgeosc", "bgeogz",
"bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
# Remove the file node, it only loads static meshes
# Houdini 17 has removed the file node from the geo node
file_node = container.node("file1")
if file_node:
file_node.destroy()
# Explicitly create a file node
path = self.filepath_from_context(context)
file_node = container.createNode("file", node_name=node_name)
file_node.setParms(
{"file": self.format_path(path, context["representation"])})
# Set display on last node
file_node.setDisplayFlag(True)
nodes = [container, file_node]
self[:] = nodes
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
@staticmethod
def format_path(path, representation):
"""Format file path correctly for single bgeo or bgeo sequence."""
if not os.path.exists(path):
raise RuntimeError("Path does not exist: %s" % path)
is_sequence = bool(representation["context"].get("frame"))
# The path is either a single file or sequence in a folder.
if not is_sequence:
filename = path
else:
filename = re.sub(r"(.*)\.(\d+)\.(bgeo.*)", "\\1.$F4.\\3", path)
filename = os.path.join(path, filename)
filename = os.path.normpath(filename)
filename = filename.replace("\\", "/")
return filename
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
try:
file_node = next(
n for n in node.children() if n.type().name() == "file"
)
except StopIteration:
self.log.error("Could not find node of type `alembic`")
return
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = self.format_path(file_path, repre_entity)
file_node.setParms({"file": file_path})
# Update attribute
node.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,212 +0,0 @@
import hou
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
from ayon_houdini.api.lib import (
set_camera_resolution,
get_camera_from_container
)
ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")'
'.alembicGetCameraDict')
def transfer_non_default_values(src, dest, ignore=None):
"""Copy parm from src to dest.
Because the Alembic Archive rebuilds the entire node
hierarchy on triggering "Build Hierarchy" we want to
preserve any local tweaks made by the user on the camera
for ease of use. That could be a background image, a
resolution change or even Redshift camera parameters.
We try to do so by finding all Parms that exist on both
source and destination node, include only those that both
are not at their default value, they must be visible,
we exclude those that have the special "alembic archive"
channel expression and ignore certain Parm types.
"""
ignore_types = {
hou.parmTemplateType.Toggle,
hou.parmTemplateType.Menu,
hou.parmTemplateType.Button,
hou.parmTemplateType.FolderSet,
hou.parmTemplateType.Separator,
hou.parmTemplateType.Label,
}
src.updateParmStates()
for parm in src.allParms():
if ignore and parm.name() in ignore:
continue
# If destination parm does not exist, ignore..
dest_parm = dest.parm(parm.name())
if not dest_parm:
continue
# Ignore values that are currently at default
if parm.isAtDefault() and dest_parm.isAtDefault():
continue
if not parm.isVisible():
# Ignore hidden parameters, assume they
# are implementation details
continue
expression = None
try:
expression = parm.expression()
except hou.OperationFailed:
# No expression present
pass
if expression is not None and ARCHIVE_EXPRESSION in expression:
# Assume it's part of the automated connections that the
# Alembic Archive makes on loading of the camera and thus we do
# not want to transfer the expression
continue
# Ignore folders, separators, etc.
if parm.parmTemplate().type() in ignore_types:
continue
print("Preserving attribute: %s" % parm.name())
dest_parm.setFromParm(parm)
class CameraLoader(plugin.HoudiniLoader):
"""Load camera from an Alembic file"""
product_types = {"camera"}
label = "Load Camera (abc)"
representations = {"abc"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context).replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a archive node
node = self.create_and_connect(obj, "alembicarchive", node_name)
# TODO: add FPS of project / folder
node.setParms({"fileName": file_path, "channelRef": True})
# Apply some magic
node.parm("buildHierarchy").pressButton()
node.moveToGoodPosition()
# Create an alembic xform node
nodes = [node]
camera = get_camera_from_container(node)
self._match_maya_render_mask(camera)
set_camera_resolution(camera, folder_entity=context["folder"])
self[:] = nodes
return pipeline.containerise(node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="")
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
# Update attributes
node.setParms({"fileName": file_path,
"representation": repre_entity["id"]})
# Store the cam temporarily next to the Alembic Archive
# so that we can preserve parm values the user set on it
# after build hierarchy was triggered.
old_camera = get_camera_from_container(node)
temp_camera = old_camera.copyTo(node.parent())
# Rebuild
node.parm("buildHierarchy").pressButton()
# Apply values to the new camera
new_camera = get_camera_from_container(node)
transfer_non_default_values(temp_camera,
new_camera,
# The hidden uniform scale attribute
# gets a default connection to
# "icon_scale" just skip that completely
ignore={"scale"})
self._match_maya_render_mask(new_camera)
set_camera_resolution(new_camera)
temp_camera.destroy()
def switch(self, container, context):
self.update(container, context)
def remove(self, container):
node = container["node"]
node.destroy()
def create_and_connect(self, node, node_type, name=None):
"""Create a node within a node which and connect it to the input
Args:
node(hou.Node): parent of the new node
node_type(str) name of the type of node, eg: 'alembic'
name(str, Optional): name of the node
Returns:
hou.Node
"""
if name:
new_node = node.createNode(node_type, node_name=name)
else:
new_node = node.createNode(node_type)
new_node.moveToGoodPosition()
return new_node
def _match_maya_render_mask(self, camera):
"""Workaround to match Maya render mask in Houdini"""
parm = camera.parm("aperture")
expression = parm.expression()
expression = expression.replace("return ", "aperture = ")
expression += """
# Match maya render mask (logic from Houdini's own FBX importer)
node = hou.pwd()
resx = node.evalParm('resx')
resy = node.evalParm('resy')
aspect = node.evalParm('aspect')
aperture *= min(1, (resx / resy * aspect) / 1.5)
return aperture
"""
parm.setExpression(expression, language=hou.exprLanguage.Python)

View file

@ -1,140 +0,0 @@
# -*- coding: utf-8 -*-
"""Fbx Loader for houdini. """
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class FbxLoader(plugin.HoudiniLoader):
"""Load fbx files. """
label = "Load FBX"
icon = "code-fork"
color = "orange"
order = -10
product_types = {"*"}
representations = {"*"}
extensions = {"fbx"}
def load(self, context, name=None, namespace=None, data=None):
# get file path from context
file_path = self.filepath_from_context(context)
file_path = file_path.replace("\\", "/")
# get necessary data
namespace, node_name = self.get_node_name(context, name, namespace)
# create load tree
nodes = self.create_load_node_tree(file_path, node_name, name)
self[:] = nodes
# Call containerise function which does some automations for you
# like moving created nodes to the AVALON_CONTAINERS subnetwork
containerised_nodes = pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
return containerised_nodes
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
try:
file_node = next(
n for n in node.children() if n.type().name() == "file"
)
except StopIteration:
self.log.error("Could not find node of type `file`")
return
# Update the file path from representation
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
file_node.setParms({"file": file_path})
# Update attribute
node.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)
def get_node_name(self, context, name=None, namespace=None):
"""Define node name."""
if not namespace:
namespace = context["folder"]["name"]
if namespace:
node_name = "{}_{}".format(namespace, name)
else:
node_name = name
return namespace, node_name
def create_load_node_tree(self, file_path, node_name, product_name):
"""Create Load network.
you can start building your tree at any obj level.
it'll be much easier to build it in the root obj level.
Afterwards, your tree will be automatically moved to
'/obj/AVALON_CONTAINERS' subnetwork.
"""
import hou
# Get the root obj level
obj = hou.node("/obj")
# Create a new obj geo node
parent_node = obj.createNode("geo", node_name=node_name)
# In older houdini,
# when reating a new obj geo node, a default file node will be
# automatically created.
# so, we will delete it if exists.
file_node = parent_node.node("file1")
if file_node:
file_node.destroy()
# Create a new file node
file_node = parent_node.createNode("file", node_name=node_name)
file_node.setParms({"file": file_path})
# Create attribute delete
attribdelete_name = "attribdelete_{}".format(product_name)
attribdelete = parent_node.createNode("attribdelete",
node_name=attribdelete_name)
attribdelete.setParms({"ptdel": "fbx_*"})
attribdelete.setInput(0, file_node)
# Create a Null node
null_name = "OUT_{}".format(product_name)
null = parent_node.createNode("null", node_name=null_name)
null.setInput(0, attribdelete)
# Ensure display flag is on the file_node input node and not on the OUT
# node to optimize "debug" displaying in the viewport.
file_node.setDisplayFlag(True)
# Set new position for children nodes
parent_node.layoutChildren()
# Return all the nodes
return [parent_node, file_node, attribdelete, null]

View file

@ -1,130 +0,0 @@
import os
import re
import hou
from ayon_houdini.api import (
pipeline,
plugin
)
class FilePathLoader(plugin.HoudiniLoader):
"""Load a managed filepath to a null node.
This is useful if for a particular workflow there is no existing loader
yet. A Houdini artists can load as the generic filepath loader and then
reference the relevant Houdini parm to use the exact value. The benefit
is that this filepath will be managed and can be updated as usual.
"""
label = "Load filepath to node"
order = 9
icon = "link"
color = "white"
product_types = {"*"}
representations = {"*"}
def load(self, context, name=None, namespace=None, data=None):
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a null node
container = obj.createNode("null", node_name=node_name)
# Destroy any children
for node in container.children():
node.destroy()
# Add filepath attribute, set value as default value
filepath = self.format_path(
path=self.filepath_from_context(context),
representation=context["representation"]
)
parm_template_group = container.parmTemplateGroup()
attr_folder = hou.FolderParmTemplate("attributes_folder", "Attributes")
parm = hou.StringParmTemplate(name="filepath",
label="Filepath",
num_components=1,
default_value=(filepath,))
attr_folder.addParmTemplate(parm)
parm_template_group.append(attr_folder)
# Hide some default labels
for folder_label in ["Transform", "Render", "Misc", "Redshift OBJ"]:
folder = parm_template_group.findFolder(folder_label)
if not folder:
continue
parm_template_group.hideFolder(folder_label, True)
container.setParmTemplateGroup(parm_template_group)
container.setDisplayFlag(False)
container.setSelectableInViewport(False)
container.useXray(False)
nodes = [container]
self[:] = nodes
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, context):
# Update the file path
representation_entity = context["representation"]
file_path = self.format_path(
path=self.filepath_from_context(context),
representation=representation_entity
)
node = container["node"]
node.setParms({
"filepath": file_path,
"representation": str(representation_entity["id"])
})
# Update the parameter default value (cosmetics)
parm_template_group = node.parmTemplateGroup()
parm = parm_template_group.find("filepath")
parm.setDefaultValue((file_path,))
parm_template_group.replace(parm_template_group.find("filepath"),
parm)
node.setParmTemplateGroup(parm_template_group)
def switch(self, container, context):
self.update(container, context)
def remove(self, container):
node = container["node"]
node.destroy()
@staticmethod
def format_path(path: str, representation: dict) -> str:
"""Format file path for sequence with $F."""
if not os.path.exists(path):
raise RuntimeError("Path does not exist: %s" % path)
# The path is either a single file or sequence in a folder.
frame = representation["context"].get("frame")
if frame is not None:
# Substitute frame number in sequence with $F with padding
ext = representation.get("ext", representation["name"])
token = "$F{}".format(len(frame)) # e.g. $F4
pattern = r"\.(\d+)\.{ext}$".format(ext=re.escape(ext))
path = re.sub(pattern, ".{}.{}".format(token, ext), path)
return os.path.normpath(path).replace("\\", "/")

View file

@ -1,121 +0,0 @@
# -*- coding: utf-8 -*-
import os
import hou
from ayon_core.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID
)
from ayon_core.pipeline.load import LoadError
from ayon_houdini.api import (
lib,
pipeline,
plugin
)
class HdaLoader(plugin.HoudiniLoader):
"""Load Houdini Digital Asset file."""
product_types = {"hda"}
label = "Load Hda"
representations = {"hda"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
namespace = namespace or context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
hou.hda.installFile(file_path)
hda_defs = hou.hda.definitionsInFile(file_path)
if not hda_defs:
raise LoadError(f"No HDA definitions found in file: {file_path}")
parent_node = self._create_dedicated_parent_node(hda_defs[-1])
# Get the type name from the HDA definition.
type_name = hda_defs[-1].nodeTypeName()
hda_node = parent_node.createNode(type_name, node_name)
hda_node.moveToGoodPosition()
# Imprint it manually
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": node_name,
"namespace": namespace,
"loader": self.__class__.__name__,
"representation": context["representation"]["id"],
}
lib.imprint(hda_node, data)
return hda_node
def update(self, container, context):
repre_entity = context["representation"]
hda_node = container["node"]
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
hou.hda.installFile(file_path)
defs = hda_node.type().allInstalledDefinitions()
def_paths = [d.libraryFilePath() for d in defs]
new = def_paths.index(file_path)
defs[new].setIsPreferred(True)
hda_node.setParms({
"representation": repre_entity["id"]
})
def remove(self, container):
node = container["node"]
parent = node.parent()
node.destroy()
if parent.path() == pipeline.AVALON_CONTAINERS:
return
# Remove parent if empty.
if not parent.children():
parent.destroy()
def _create_dedicated_parent_node(self, hda_def):
# Get the root node
parent_node = pipeline.get_or_create_avalon_container()
node = None
node_type = None
if hda_def.nodeTypeCategory() == hou.objNodeTypeCategory():
return parent_node
elif hda_def.nodeTypeCategory() == hou.chopNodeTypeCategory():
node_type, node_name = "chopnet", "MOTION"
elif hda_def.nodeTypeCategory() == hou.cop2NodeTypeCategory():
node_type, node_name = "cop2net", "IMAGES"
elif hda_def.nodeTypeCategory() == hou.dopNodeTypeCategory():
node_type, node_name = "dopnet", "DOPS"
elif hda_def.nodeTypeCategory() == hou.ropNodeTypeCategory():
node_type, node_name = "ropnet", "ROPS"
elif hda_def.nodeTypeCategory() == hou.lopNodeTypeCategory():
node_type, node_name = "lopnet", "LOPS"
elif hda_def.nodeTypeCategory() == hou.sopNodeTypeCategory():
node_type, node_name = "geo", "SOPS"
elif hda_def.nodeTypeCategory() == hou.topNodeTypeCategory():
node_type, node_name = "topnet", "TOPS"
# TODO: Create a dedicated parent node based on Vop Node vex context.
elif hda_def.nodeTypeCategory() == hou.vopNodeTypeCategory():
node_type, node_name = "matnet", "MATSandVOPS"
node = parent_node.node(node_name)
if not node:
node = parent_node.createNode(node_type, node_name)
node.moveToGoodPosition()
return node

View file

@ -1,188 +0,0 @@
import os
import re
import hou
from ayon_core.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID,
)
from ayon_houdini.api import (
pipeline,
plugin,
lib
)
def get_image_avalon_container():
"""The COP2 files must be in a COP2 network.
So we maintain a single entry point within AVALON_CONTAINERS,
just for ease of use.
"""
path = pipeline.AVALON_CONTAINERS
avalon_container = hou.node(path)
if not avalon_container:
# Let's create avalon container secretly
# but make sure the pipeline still is built the
# way we anticipate it was built, asserting it.
assert path == "/obj/AVALON_CONTAINERS"
parent = hou.node("/obj")
avalon_container = parent.createNode(
"subnet", node_name="AVALON_CONTAINERS"
)
image_container = hou.node(path + "/IMAGES")
if not image_container:
image_container = avalon_container.createNode(
"cop2net", node_name="IMAGES"
)
image_container.moveToGoodPosition()
return image_container
class ImageLoader(plugin.HoudiniLoader):
"""Load images into COP2"""
product_types = {
"imagesequence",
"review",
"render",
"plate",
"image",
"online",
}
label = "Load Image (COP2)"
representations = {"*"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
# Format file name, Houdini only wants forward slashes
path = self.filepath_from_context(context)
path = self.format_path(path, representation=context["representation"])
# Get the root node
parent = get_image_avalon_container()
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
node = parent.createNode("file", node_name=node_name)
node.moveToGoodPosition()
parms = {"filename1": path}
parms.update(self.get_colorspace_parms(context["representation"]))
node.setParms(parms)
# Imprint it manually
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": node_name,
"namespace": namespace,
"loader": str(self.__class__.__name__),
"representation": context["representation"]["id"],
}
# todo: add folder="Avalon"
lib.imprint(node, data)
return node
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = self.format_path(file_path, repre_entity)
parms = {
"filename1": file_path,
"representation": repre_entity["id"],
}
parms.update(self.get_colorspace_parms(repre_entity))
# Update attributes
node.setParms(parms)
def remove(self, container):
node = container["node"]
# Let's clean up the IMAGES COP2 network
# if it ends up being empty and we deleted
# the last file node. Store the parent
# before we delete the node.
parent = node.parent()
node.destroy()
if not parent.children():
parent.destroy()
@staticmethod
def format_path(path, representation):
"""Format file path correctly for single image or sequence."""
if not os.path.exists(path):
raise RuntimeError("Path does not exist: %s" % path)
ext = os.path.splitext(path)[-1]
is_sequence = bool(representation["context"].get("frame"))
# The path is either a single file or sequence in a folder.
if not is_sequence:
filename = path
else:
filename = re.sub(r"(.*)\.(\d+){}$".format(re.escape(ext)),
"\\1.$F4{}".format(ext),
path)
filename = os.path.join(path, filename)
filename = os.path.normpath(filename)
filename = filename.replace("\\", "/")
return filename
def get_colorspace_parms(self, representation: dict) -> dict:
"""Return the color space parameters.
Returns the values for the colorspace parameters on the node if there
is colorspace data on the representation.
Arguments:
representation (dict): The representation entity.
Returns:
dict: Parm to value mapping if colorspace data is defined.
"""
# Using OCIO colorspace on COP2 File node is only supported in Hou 20+
major, _, _ = hou.applicationVersion()
if major < 20:
return {}
data = representation.get("data", {}).get("colorspaceData", {})
if not data:
return {}
colorspace = data["colorspace"]
if colorspace:
return {
"colorspace": 3, # Use OpenColorIO
"ocio_space": colorspace
}
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,113 +0,0 @@
import os
import re
import hou
from ayon_core.pipeline import get_representation_path
from ayon_core.pipeline.load import LoadError
from ayon_houdini.api import (
pipeline,
plugin
)
class RedshiftProxyLoader(plugin.HoudiniLoader):
"""Load Redshift Proxy"""
product_types = {"redshiftproxy"}
label = "Load Redshift Proxy"
representations = {"rs"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
# Check whether the Redshift parameters exist - if not, then likely
# redshift is not set up or initialized correctly
if not container.parm("RS_objprop_proxy_enable"):
container.destroy()
raise LoadError("Unable to initialize geo node with Redshift "
"attributes. Make sure you have the Redshift "
"plug-in set up correctly for Houdini.")
# Enable by default
container.setParms({
"RS_objprop_proxy_enable": True,
"RS_objprop_proxy_file": self.format_path(
self.filepath_from_context(context),
context["representation"])
})
# Remove the file node, it only loads static meshes
# Houdini 17 has removed the file node from the geo node
file_node = container.node("file1")
if file_node:
file_node.destroy()
# Add this stub node inside so it previews ok
proxy_sop = container.createNode("redshift_proxySOP",
node_name=node_name)
proxy_sop.setDisplayFlag(True)
nodes = [container, proxy_sop]
self[:] = nodes
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, context):
repre_entity = context["representation"]
# Update the file path
file_path = get_representation_path(repre_entity)
node = container["node"]
node.setParms({
"RS_objprop_proxy_file": self.format_path(
file_path, repre_entity)
})
# Update attribute
node.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
@staticmethod
def format_path(path, representation):
"""Format file path correctly for single redshift proxy
or redshift proxy sequence."""
if not os.path.exists(path):
raise RuntimeError("Path does not exist: %s" % path)
is_sequence = bool(representation["context"].get("frame"))
# The path is either a single file or sequence in a folder.
if is_sequence:
filename = re.sub(r"(.*)\.(\d+)\.(rs.*)", "\\1.$F4.\\3", path)
filename = os.path.join(path, filename)
else:
filename = path
filename = os.path.normpath(filename)
filename = filename.replace("\\", "/")
return filename

View file

@ -1,87 +0,0 @@
from ayon_core.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID,
)
from ayon_houdini.api import (
plugin,
lib
)
class USDSublayerLoader(plugin.HoudiniLoader):
"""Sublayer USD file in Solaris"""
product_types = {
"usd",
"usdCamera",
}
label = "Sublayer USD"
representations = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = 1
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import os
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
stage = hou.node("/stage")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create USD reference
container = stage.createNode("sublayer", node_name=node_name)
container.setParms({"filepath1": file_path})
container.moveToGoodPosition()
# Imprint it manually
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": node_name,
"namespace": namespace,
"loader": str(self.__class__.__name__),
"representation": context["representation"]["id"],
}
# todo: add folder="Avalon"
lib.imprint(container, data)
return container
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
# Update attributes
node.setParms(
{
"filepath1": file_path,
"representation": repre_entity["id"],
}
)
# Reload files
node.parm("reload").pressButton()
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,87 +0,0 @@
from ayon_core.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID,
)
from ayon_houdini.api import (
plugin,
lib
)
class USDReferenceLoader(plugin.HoudiniLoader):
"""Reference USD file in Solaris"""
product_types = {
"usd",
"usdCamera",
}
label = "Reference USD"
representations = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = -8
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import os
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
stage = hou.node("/stage")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create USD reference
container = stage.createNode("reference", node_name=node_name)
container.setParms({"filepath1": file_path})
container.moveToGoodPosition()
# Imprint it manually
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": node_name,
"namespace": namespace,
"loader": str(self.__class__.__name__),
"representation": context["representation"]["id"],
}
# todo: add folder="Avalon"
lib.imprint(container, data)
return container
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = file_path.replace("\\", "/")
# Update attributes
node.setParms(
{
"filepath1": file_path,
"representation": repre_entity["id"],
}
)
# Reload files
node.parm("reload").pressButton()
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,79 +0,0 @@
import os
from ayon_houdini.api import (
pipeline,
plugin
)
class SopUsdImportLoader(plugin.HoudiniLoader):
"""Load USD to SOPs via `usdimport`"""
label = "Load USD to SOPs"
product_types = {"*"}
representations = {"usd"}
order = -6
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Format file name, Houdini only wants forward slashes
file_path = self.filepath_from_context(context)
file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
# Create a usdimport node
usdimport = container.createNode("usdimport", node_name=node_name)
usdimport.setParms({"filepath1": file_path})
# Set new position for unpack node else it gets cluttered
nodes = [container, usdimport]
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, context):
node = container["node"]
try:
usdimport_node = next(
n for n in node.children() if n.type().name() == "usdimport"
)
except StopIteration:
self.log.error("Could not find node of type `usdimport`")
return
# Update the file path
file_path = self.filepath_from_context(context)
file_path = file_path.replace("\\", "/")
usdimport_node.setParms({"filepath1": file_path})
# Update attribute
node.setParms({"representation": context["representation"]["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,108 +0,0 @@
import os
import re
from ayon_core.pipeline import get_representation_path
from ayon_houdini.api import (
pipeline,
plugin
)
class VdbLoader(plugin.HoudiniLoader):
"""Load VDB"""
product_types = {"vdbcache"}
label = "Load VDB"
representations = {"vdb"}
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import hou
# Get the root node
obj = hou.node("/obj")
# Define node name
namespace = namespace if namespace else context["folder"]["name"]
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
# Remove the file node, it only loads static meshes
# Houdini 17 has removed the file node from the geo node
file_node = container.node("file1")
if file_node:
file_node.destroy()
# Explicitly create a file node
file_node = container.createNode("file", node_name=node_name)
path = self.filepath_from_context(context)
file_node.setParms(
{"file": self.format_path(path, context["representation"])})
# Set display on last node
file_node.setDisplayFlag(True)
nodes = [container, file_node]
self[:] = nodes
return pipeline.containerise(
node_name,
namespace,
nodes,
context,
self.__class__.__name__,
suffix="",
)
@staticmethod
def format_path(path, representation):
"""Format file path correctly for single vdb or vdb sequence."""
if not os.path.exists(path):
raise RuntimeError("Path does not exist: %s" % path)
is_sequence = bool(representation["context"].get("frame"))
# The path is either a single file or sequence in a folder.
if not is_sequence:
filename = path
else:
filename = re.sub(r"(.*)\.(\d+)\.vdb$", "\\1.$F4.vdb", path)
filename = os.path.join(path, filename)
filename = os.path.normpath(filename)
filename = filename.replace("\\", "/")
return filename
def update(self, container, context):
repre_entity = context["representation"]
node = container["node"]
try:
file_node = next(
n for n in node.children() if n.type().name() == "file"
)
except StopIteration:
self.log.error("Could not find node of type `alembic`")
return
# Update the file path
file_path = get_representation_path(repre_entity)
file_path = self.format_path(file_path, repre_entity)
file_node.setParms({"file": file_path})
# Update attribute
node.setParms({"representation": repre_entity["id"]})
def remove(self, container):
node = container["node"]
node.destroy()
def switch(self, container, context):
self.update(container, context)

View file

@ -1,48 +0,0 @@
import os
import platform
import subprocess
from ayon_core.lib.vendor_bin_utils import find_executable
from ayon_houdini.api import plugin
class ShowInUsdview(plugin.HoudiniLoader):
"""Open USD file in usdview"""
label = "Show in usdview"
representations = {"*"}
product_types = {"*"}
extensions = {"usd", "usda", "usdlc", "usdnc", "abc"}
order = 15
icon = "code-fork"
color = "white"
def load(self, context, name=None, namespace=None, data=None):
from pathlib import Path
if platform.system() == "Windows":
executable = "usdview.bat"
else:
executable = "usdview"
usdview = find_executable(executable)
if not usdview:
raise RuntimeError("Unable to find usdview")
# For some reason Windows can return the path like:
# C:/PROGRA~1/SIDEEF~1/HOUDIN~1.435/bin/usdview
# convert to resolved path so `subprocess` can take it
usdview = str(Path(usdview).resolve().as_posix())
filepath = self.filepath_from_context(context)
filepath = os.path.normpath(filepath)
filepath = filepath.replace("\\", "/")
if not os.path.exists(filepath):
self.log.error("File does not exist: %s" % filepath)
return
self.log.info("Start houdini variant of usdview...")
subprocess.Popen([usdview, filepath, "--renderer", "GL"])

View file

@ -1,42 +0,0 @@
import hou
import pyblish.api
from ayon_houdini.api import plugin
class CollectInstanceActiveState(plugin.HoudiniInstancePlugin):
"""Collect default active state for instance from its node bypass state.
This is done at the very end of the CollectorOrder so that any required
collecting of data iterating over instances (with InstancePlugin) will
actually collect the data for when the user enables the state in the UI.
Otherwise potentially required data might have skipped collecting.
"""
order = pyblish.api.CollectorOrder + 0.299
families = ["*"]
label = "Instance Active State"
def process(self, instance):
# Must have node to check for bypass state
if len(instance) == 0:
return
# Check bypass state and reverse
active = True
node = hou.node(instance.data.get("instance_node"))
if hasattr(node, "isBypassed"):
active = not node.isBypassed()
# Set instance active state
instance.data.update(
{
"active": active,
# temporarily translation of `active` to `publish` till
# issue has been resolved:
# https://github.com/pyblish/pyblish-base/issues/307
"publish": active,
}
)

View file

@ -1,168 +0,0 @@
import os
import re
import hou
import pyblish.api
from ayon_houdini.api import colorspace, plugin
from ayon_houdini.api.lib import (
get_color_management_preferences,
evalParmNoFrame
)
class CollectArnoldROPRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect Arnold ROP Render Products
Collects the instance.data["files"] for the render products.
Provides:
instance -> files
"""
label = "Arnold ROP Render Products"
# This specific order value is used so that
# this plugin runs after CollectFrames
order = pyblish.api.CollectorOrder + 0.11
families = ["arnold_rop"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
if chunk_size_parm:
chunk_size = int(chunk_size_parm.eval())
instance.data["chunkSize"] = chunk_size
self.log.debug("Chunk Size: %s" % chunk_size)
default_prefix = evalParmNoFrame(rop, "ar_picture")
render_products = []
export_prefix = None
export_products = []
if instance.data["splitRender"]:
export_prefix = evalParmNoFrame(
rop, "ar_ass_file", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
# Default beauty AOV
beauty_product = self.get_render_product_name(prefix=default_prefix,
suffix=None)
render_products.append(beauty_product)
files_by_aov = {
"": self.generate_expected_files(instance, beauty_product)
}
# Assume it's a multipartExr Render.
multipartExr = True
num_aovs = rop.evalParm("ar_aovs")
# TODO: Check the following logic.
# as it always assumes that all AOV are not merged.
for index in range(1, num_aovs + 1):
# Skip disabled AOVs
if not rop.evalParm("ar_enable_aov{}".format(index)):
continue
if rop.evalParm("ar_aov_exr_enable_layer_name{}".format(index)):
label = rop.evalParm("ar_aov_exr_layer_name{}".format(index))
else:
label = evalParmNoFrame(rop, "ar_aov_label{}".format(index))
aov_product = self.get_render_product_name(default_prefix,
suffix=label)
render_products.append(aov_product)
files_by_aov[label] = self.generate_expected_files(instance,
aov_product)
# Set to False as soon as we have a separated aov.
multipartExr = False
# Review Logic expects this key to exist and be True
# if render is a multipart Exr.
# As long as we have one AOV then multipartExr should be True.
instance.data["multipartExr"] = multipartExr
for product in render_products:
self.log.debug("Found render product: {}".format(product))
instance.data["files"] = list(render_products)
instance.data["renderProducts"] = colorspace.ARenderProduct()
# For now by default do NOT try to publish the rendered output
instance.data["publishJobState"] = "Suspended"
instance.data["attachTo"] = [] # stub required data
if "expectedFiles" not in instance.data:
instance.data["expectedFiles"] = list()
instance.data["expectedFiles"].append(files_by_aov)
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_render_product_name(self, prefix, suffix):
"""Return the output filename using the AOV prefix and suffix"""
# When AOV is explicitly defined in prefix we just swap it out
# directly with the AOV suffix to embed it.
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
if "%AOV%" in prefix:
# It seems that when some special separator characters are present
# before the %AOV% token that Redshift will secretly remove it if
# there is no suffix for the current product, for example:
# foo_%AOV% -> foo.exr
pattern = "%AOV%" if suffix else "[._-]?%AOV%"
product_name = re.sub(pattern,
suffix,
prefix,
flags=re.IGNORECASE)
else:
if suffix:
# Add ".{suffix}" before the extension
prefix_base, ext = os.path.splitext(prefix)
product_name = prefix_base + "." + suffix + ext
else:
product_name = prefix
return product_name
def generate_expected_files(self, instance, path):
"""Create expected files in instance data"""
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
def replace(match):
return "%0{}d".format(len(match.group()))
file = re.sub("#+", replace, file)
if "%" not in file:
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for i in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(dir, (file % i)).replace("\\", "/"))
return expected_files

View file

@ -1,122 +0,0 @@
# -*- coding: utf-8 -*-
"""Collector plugin for frames data on ROP instances."""
import pyblish.api
from ayon_core.lib import BoolDef
from ayon_core.pipeline import AYONPyblishPluginMixin
from ayon_houdini.api import plugin
class CollectAssetHandles(plugin.HoudiniInstancePlugin,
AYONPyblishPluginMixin):
"""Apply folder handles.
If instance does not have:
- frameStart
- frameEnd
- handleStart
- handleEnd
But it does have:
- frameStartHandle
- frameEndHandle
Then we will retrieve the folder's handles to compute
the exclusive frame range and actual handle ranges.
"""
# This specific order value is used so that
# this plugin runs after CollectAnatomyInstanceData
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Folder Handles"
use_asset_handles = True
def process(self, instance):
# Only process instances without already existing handles data
# but that do have frameStartHandle and frameEndHandle defined
# like the data collected from CollectRopFrameRange
if "frameStartHandle" not in instance.data:
return
if "frameEndHandle" not in instance.data:
return
has_existing_data = {
"handleStart",
"handleEnd",
"frameStart",
"frameEnd"
}.issubset(instance.data)
if has_existing_data:
return
attr_values = self.get_attr_values_from_data(instance.data)
if attr_values.get("use_handles", self.use_asset_handles):
folder_attributes = instance.data["folderEntity"]["attrib"]
handle_start = folder_attributes.get("handleStart", 0)
handle_end = folder_attributes.get("handleEnd", 0)
else:
handle_start = 0
handle_end = 0
frame_start = instance.data["frameStartHandle"] + handle_start
frame_end = instance.data["frameEndHandle"] - handle_end
instance.data.update({
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": frame_start,
"frameEnd": frame_end
})
# Log debug message about the collected frame range
if attr_values.get("use_handles", self.use_asset_handles):
self.log.debug(
"Full Frame range with Handles "
"[{frame_start_handle} - {frame_end_handle}]"
.format(
frame_start_handle=instance.data["frameStartHandle"],
frame_end_handle=instance.data["frameEndHandle"]
)
)
else:
self.log.debug(
"Use handles is deactivated for this instance, "
"start and end handles are set to 0."
)
# Log collected frame range to the user
message = "Frame range [{frame_start} - {frame_end}]".format(
frame_start=frame_start,
frame_end=frame_end
)
if handle_start or handle_end:
message += " with handles [{handle_start}]-[{handle_end}]".format(
handle_start=handle_start,
handle_end=handle_end
)
self.log.info(message)
if instance.data.get("byFrameStep", 1.0) != 1.0:
self.log.info(
"Frame steps {}".format(instance.data["byFrameStep"]))
# Add frame range to label if the instance has a frame range.
label = instance.data.get("label", instance.data["name"])
instance.data["label"] = (
"{label} [{frame_start_handle} - {frame_end_handle}]"
.format(
label=label,
frame_start_handle=instance.data["frameStartHandle"],
frame_end_handle=instance.data["frameEndHandle"]
)
)
@classmethod
def get_attribute_defs(cls):
return [
BoolDef("use_handles",
tooltip="Disable this if you want the publisher to"
" ignore start and end handles specified in the"
" folder attributes for this publish instance",
default=cls.use_asset_handles,
label="Use asset handles")
]

View file

@ -1,61 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import (
lib,
plugin
)
class CollectDataforCache(plugin.HoudiniInstancePlugin):
"""Collect data for caching to Deadline."""
# Run after Collect Frames
order = pyblish.api.CollectorOrder + 0.11
families = ["ass", "pointcache", "redshiftproxy", "vdbcache", "model"]
targets = ["local", "remote"]
label = "Collect Data for Cache"
def process(self, instance):
creator_attribute = instance.data["creator_attributes"]
farm_enabled = creator_attribute["farm"]
instance.data["farm"] = farm_enabled
if not farm_enabled:
self.log.debug("Caching on farm is disabled. "
"Skipping farm collecting.")
return
# Why do we need this particular collector to collect the expected
# output files from a ROP node. Don't we have a dedicated collector
# for that yet?
# Answer: No, we don't have a generic expected file collector.
# Because different product types needs different logic.
# e.g. check CollectMantraROPRenderProducts
# and CollectKarmaROPRenderProducts
# Collect expected files
ropnode = hou.node(instance.data["instance_node"])
output_parm = lib.get_output_parameter(ropnode)
expected_filepath = output_parm.eval()
instance.data.setdefault("files", list())
instance.data.setdefault("expectedFiles", list())
frames = instance.data.get("frames", "")
if isinstance(frames, str):
# single file
instance.data["files"].append(expected_filepath)
else:
# list of files
staging_dir, _ = os.path.split(expected_filepath)
instance.data["files"].extend(
["{}/{}".format(staging_dir, f) for f in frames]
)
cache_files = {"cache": instance.data["files"]}
instance.data.update({
"plugin": "Houdini",
"publish": True
})
instance.data["families"].append("publish.hou")
instance.data["expectedFiles"].append(cache_files)
self.log.debug("Caching on farm expected files: {}".format(instance.data["expectedFiles"]))

View file

@ -1,31 +0,0 @@
import pyblish.api
from ayon_core.lib import NumberDef
from ayon_core.pipeline import AYONPyblishPluginMixin
from ayon_houdini.api import plugin
class CollectChunkSize(plugin.HoudiniInstancePlugin,
AYONPyblishPluginMixin):
"""Collect chunk size for cache submission to Deadline."""
order = pyblish.api.CollectorOrder + 0.05
families = ["ass", "pointcache", "vdbcache", "redshiftproxy", "model"]
targets = ["local", "remote"]
label = "Collect Chunk Size"
chunk_size = 999999
def process(self, instance):
# need to get the chunk size info from the setting
attr_values = self.get_attr_values_from_data(instance.data)
instance.data["chunkSize"] = attr_values.get("chunkSize")
@classmethod
def get_attribute_defs(cls):
return [
NumberDef("chunkSize",
minimum=1,
maximum=999999,
decimals=0,
default=cls.chunk_size,
label="Frame Per Task")
]

View file

@ -1,37 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
class CollectHoudiniCurrentFile(plugin.HoudiniContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.1
label = "Houdini Current File"
def process(self, context):
"""Inject the current working file"""
current_file = hou.hipFile.path()
if not os.path.exists(current_file):
# By default, Houdini will even point a new scene to a path.
# However if the file is not saved at all and does not exist,
# we assume the user never set it.
current_file = ""
elif os.path.basename(current_file) == "untitled.hip":
# Due to even a new file being called 'untitled.hip' we are unable
# to confirm the current scene was ever saved because the file
# could have existed already. We will allow it if the file exists,
# but show a warning for this edge case to clarify the potential
# false positive.
self.log.warning(
"Current file is 'untitled.hip' and we are "
"unable to detect whether the current scene is "
"saved correctly."
)
context.data["currentFile"] = current_file
self.log.info('Current workfile path: {}'.format(current_file))

View file

@ -1,36 +0,0 @@
import pyblish.api
from ayon_houdini.api import plugin
class CollectFarmInstances(plugin.HoudiniInstancePlugin):
"""Collect instances for farm render."""
order = pyblish.api.CollectorOrder
families = ["mantra_rop",
"karma_rop",
"redshift_rop",
"arnold_rop",
"vray_rop",
"usdrender"]
targets = ["local", "remote"]
label = "Collect farm instances"
def process(self, instance):
creator_attribute = instance.data["creator_attributes"]
# Collect Render Target
if creator_attribute.get("render_target") not in {
"farm_split", "farm"
}:
instance.data["farm"] = False
instance.data["splitRender"] = False
self.log.debug("Render on farm is disabled. "
"Skipping farm collecting.")
return
instance.data["farm"] = True
instance.data["splitRender"] = (
creator_attribute.get("render_target") == "farm_split"
)

View file

@ -1,98 +0,0 @@
import os
from typing import List
import pyblish.api
from ayon_core.pipeline import AYONPyblishPluginMixin
from ayon_houdini.api import plugin
class CollectFilesForCleaningUp(plugin.HoudiniInstancePlugin,
AYONPyblishPluginMixin):
"""Collect Files For Cleaning Up.
This collector collects output files and adds them to file remove list.
CAUTION:
This collector registers exported files and
the parent folder for deletion in `ExplicitCleanUp` plug-in.
please refer to `ExplicitCleanUp`'s docstring for further info.
Notes:
Artists are free to change the file path in the ROP node.
Farm instances will be processed on farm by other dedicated plugins
that live in core addon e.g. `CollectRenderedFiles` plugin.
These dedicated plugins don't support tracking and removing
intermediate render files.
Local Render instances don't track intermediate render files,
Therefore, this plugin doesn't support removing
intermediate render files.
HDA is not added to this plugin's options in server settings.
Cleaning up HDA products will break the scene as Houdini will no longer
be able to find the HDA file.
In addition,HDA plugins always save HDAs to external files.
Therefore, Cleaning up HDA products will break the ability to go back
to the workfile and continue on the HDA.
"""
# It should run after CollectFrames and Collect Render plugins,
# and before CollectLocalRenderInstances.
order = pyblish.api.CollectorOrder + 0.115
hosts = ["houdini"]
families = ["*"]
label = "Collect Files For Cleaning Up"
def process(self, instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
files: List[str] = []
staging_dirs: List[str] = []
expected_files = instance.data.get("expectedFiles", [])
# Prefer 'expectedFiles' over 'frames' because it usually contains more
# output files than just a single file or single sequence of files.
if expected_files:
# Products with expected files
# This can be Render products or submitted cache to farm.
for expected in expected_files:
# expected.values() is a list of lists
for output_files in expected.values():
staging_dir, _ = os.path.split(output_files[0])
if staging_dir not in staging_dirs:
staging_dirs.append(staging_dir)
files.extend(output_files)
else:
# Products with frames or single file.
frames = instance.data.get("frames")
if frames is None:
self.log.warning(
f"No frames data found on instance {instance}"
". Skipping collection for caching on farm..."
)
return
staging_dir = instance.data.get("stagingDir")
staging_dirs.append(staging_dir)
if isinstance(frames, str):
# single file.
files.append(f"{staging_dir}/{frames}")
else:
# list of frame.
files.extend(
[f"{staging_dir}/{frame}" for frame in frames]
)
self.log.debug(
f"Add directories to 'cleanupEmptyDir': {staging_dirs}")
instance.context.data["cleanupEmptyDirs"].extend(staging_dirs)
self.log.debug("Add files to 'cleanupFullPaths': {}".format(files))
instance.context.data["cleanupFullPaths"].extend(files)

View file

@ -1,63 +0,0 @@
# -*- coding: utf-8 -*-
"""Collector plugin for frames data on ROP instances."""
import os
import hou # noqa
import clique
import pyblish.api
from ayon_houdini.api import lib, plugin
class CollectFrames(plugin.HoudiniInstancePlugin):
"""Collect all frames which would be saved from the ROP nodes"""
# This specific order value is used so that
# this plugin runs after CollectRopFrameRange
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Frames"
families = ["camera", "vdbcache", "imagesequence", "ass",
"redshiftproxy", "review", "pointcache", "fbx",
"model"]
def process(self, instance):
# CollectRopFrameRange computes `start_frame` and `end_frame`
# depending on the trange value.
start_frame = instance.data["frameStartHandle"]
end_frame = instance.data["frameEndHandle"]
# Evaluate the file name at the first frame.
ropnode = hou.node(instance.data["instance_node"])
output_parm = lib.get_output_parameter(ropnode)
output = output_parm.evalAtFrame(start_frame)
file_name = os.path.basename(output)
# todo: `frames` currently conflicts with "explicit frames" for a
# for a custom frame list. So this should be refactored.
instance.data.update({
"frames": file_name, # Set frames to the file name by default.
"stagingDir": os.path.dirname(output)
})
# Skip unnecessary logic if start and end frames are equal.
if start_frame == end_frame:
return
# Create collection using frame pattern.
# e.g. 'pointcacheBgeoCache_AB010.1001.bgeo'
# will be <Collection "pointcacheBgeoCache_AB010.%d.bgeo [1001]">
frame_collection, _ = clique.assemble(
[file_name],
patterns=[clique.PATTERNS["frames"]],
minimum_items=1
)
# Return as no frame pattern detected.
if not frame_collection:
return
# It's always expected to be one collection.
frame_collection = frame_collection[0]
frame_collection.indexes.clear()
frame_collection.indexes.update(list(range(start_frame, (end_frame + 1))))
instance.data["frames"] = list(frame_collection)

View file

@ -1,137 +0,0 @@
from collections import deque
import pyblish.api
from ayon_core.pipeline import registered_host
from ayon_houdini.api import plugin
def get_container_members(container):
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
return members
def collect_input_containers(containers, nodes):
"""Collect containers that contain any of the node in `nodes`.
This will return any loaded Avalon container that contains at least one of
the nodes. As such, the Avalon container is an input for it. Or in short,
there are member nodes of that container.
Returns:
list: Loaded containers that contain the `nodes`
"""
# Assume the containers have collected their cached '_members' data
# in the collector.
return [container for container in containers
if any(node in container["_members"] for node in nodes)]
def iter_upstream(node):
"""Yields all upstream inputs for the current node.
This includes all `node.inputAncestors()` but also traverses through all
`node.references()` for the node itself and for any of the upstream nodes.
This method has no max-depth and will collect all upstream inputs.
Yields:
hou.Node: The upstream nodes, including references.
"""
upstream = node.inputAncestors(
include_ref_inputs=True, follow_subnets=True
)
# Initialize process queue with the node's ancestors itself
queue = deque(upstream)
collected = set(upstream)
# Traverse upstream references for all nodes and yield them as we
# process the queue.
while queue:
upstream_node = queue.pop()
yield upstream_node
# Find its references that are not collected yet.
references = upstream_node.references()
references = [n for n in references if n not in collected]
queue.extend(references)
collected.update(references)
# Include the references' ancestors that have not been collected yet.
for reference in references:
if reference in collected:
# Might have been collected in previous iteration
continue
ancestors = reference.inputAncestors(
include_ref_inputs=True, follow_subnets=True
)
ancestors = [n for n in ancestors if n not in collected]
queue.extend(ancestors)
collected.update(ancestors)
class CollectUpstreamInputs(plugin.HoudiniInstancePlugin):
"""Collect source input containers used for this publish.
This will include `inputs` data of which loaded publishes were used in the
generation of this publish. This leaves an upstream trace to what was used
as input.
"""
label = "Collect Inputs"
order = pyblish.api.CollectorOrder + 0.4
def process(self, instance):
# We can't get the "inputAncestors" directly from the ROP
# node, so we find the related output node (set in SOP/COP path)
# and include that together with its ancestors
output = instance.data.get("output_node")
if output is None:
# If no valid output node is set then ignore it as validation
# will be checking those cases.
self.log.debug(
"No output node found, skipping collecting of inputs.."
)
return
# For large scenes the querying of "host.ls()" can be relatively slow
# e.g. up to a second. Many instances calling it easily slows this
# down. As such, we cache it so we trigger it only once.
# todo: Instead of hidden cache make "CollectContainers" plug-in
cache_key = "__cache_containers"
scene_containers = instance.context.data.get(cache_key, None)
if scene_containers is None:
# Query the scenes' containers if there's no cache yet
host = registered_host()
scene_containers = list(host.ls())
for container in scene_containers:
# Embed the members into the container dictionary
container_members = set(get_container_members(container))
container["_members"] = container_members
instance.context.data[cache_key] = scene_containers
inputs = []
if scene_containers:
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# Collect containers for the given set of nodes
containers = collect_input_containers(scene_containers, nodes)
inputs = [c["representation"] for c in containers]
instance.data["inputRepresentations"] = inputs
self.log.debug("Collected inputs: %s" % inputs)

View file

@ -1,24 +0,0 @@
"""Collector for different types.
This will add additional families to different instance based on
the creator_identifier parameter.
"""
import pyblish.api
from ayon_houdini.api import plugin
class CollectPointcacheType(plugin.HoudiniInstancePlugin):
"""Collect data type for different instances."""
order = pyblish.api.CollectorOrder
families = ["pointcache", "model"]
label = "Collect instances types"
def process(self, instance):
if instance.data["creator_identifier"] == "io.openpype.creators.houdini.bgeo": # noqa: E501
instance.data["families"] += ["bgeo"]
elif instance.data["creator_identifier"] in {
"io.openpype.creators.houdini.pointcache",
"io.openpype.creators.houdini.model"
}:
instance.data["families"] += ["abc"]

View file

@ -1,113 +0,0 @@
import re
import os
import hou
import pyblish.api
from ayon_houdini.api.lib import (
evalParmNoFrame,
get_color_management_preferences
)
from ayon_houdini.api import (
colorspace,
plugin
)
class CollectKarmaROPRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect Karma Render Products
Collects the instance.data["files"] for the multipart render product.
Provides:
instance -> files
"""
label = "Karma ROP Render Products"
# This specific order value is used so that
# this plugin runs after CollectFrames
order = pyblish.api.CollectorOrder + 0.11
families = ["karma_rop"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
if chunk_size_parm:
chunk_size = int(chunk_size_parm.eval())
instance.data["chunkSize"] = chunk_size
self.log.debug("Chunk Size: %s" % chunk_size)
default_prefix = evalParmNoFrame(rop, "picture")
render_products = []
# Default beauty AOV
beauty_product = self.get_render_product_name(
prefix=default_prefix, suffix=None
)
render_products.append(beauty_product)
files_by_aov = {
"beauty": self.generate_expected_files(instance,
beauty_product)
}
# Review Logic expects this key to exist and be True
# if render is a multipart Exr.
# As long as we have one AOV then multipartExr should be True.
# By default karma render is a multipart Exr.
instance.data["multipartExr"] = True
filenames = list(render_products)
instance.data["files"] = filenames
instance.data["renderProducts"] = colorspace.ARenderProduct()
for product in render_products:
self.log.debug("Found render product: %s" % product)
if "expectedFiles" not in instance.data:
instance.data["expectedFiles"] = list()
instance.data["expectedFiles"].append(files_by_aov)
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_render_product_name(self, prefix, suffix):
product_name = prefix
if suffix:
# Add ".{suffix}" before the extension
prefix_base, ext = os.path.splitext(prefix)
product_name = "{}.{}{}".format(prefix_base, suffix, ext)
return product_name
def generate_expected_files(self, instance, path):
"""Create expected files in instance data"""
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
def replace(match):
return "%0{}d".format(len(match.group()))
file = re.sub("#+", replace, file)
if "%" not in file:
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for i in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(dir, (file % i)).replace("\\", "/"))
return expected_files

View file

@ -1,138 +0,0 @@
import os
import pyblish.api
from ayon_core.pipeline.create import get_product_name
from ayon_core.pipeline.farm.patterning import match_aov_pattern
from ayon_core.pipeline.publish import (
get_plugin_settings,
apply_plugin_settings_automatically
)
from ayon_houdini.api import plugin
class CollectLocalRenderInstances(plugin.HoudiniInstancePlugin):
"""Collect instances for local render.
Agnostic Local Render Collector.
"""
# this plugin runs after Collect Render Products
order = pyblish.api.CollectorOrder + 0.12
families = ["mantra_rop",
"karma_rop",
"redshift_rop",
"arnold_rop",
"vray_rop",
"usdrender"]
label = "Collect local render instances"
use_deadline_aov_filter = False
aov_filter = {"host_name": "houdini",
"value": [".*([Bb]eauty).*"]}
@classmethod
def apply_settings(cls, project_settings):
# Preserve automatic settings applying logic
settings = get_plugin_settings(plugin=cls,
project_settings=project_settings,
log=cls.log,
category="houdini")
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
if not cls.use_deadline_aov_filter:
# get aov_filter from collector settings
# and restructure it as match_aov_pattern requires.
cls.aov_filter = {
cls.aov_filter["host_name"]: cls.aov_filter["value"]
}
else:
# get aov_filter from deadline settings
cls.aov_filter = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"]["aov_filter"]
cls.aov_filter = {
item["name"]: item["value"]
for item in cls.aov_filter
}
def process(self, instance):
if instance.data["farm"]:
self.log.debug("Render on farm is enabled. "
"Skipping local render collecting.")
return
# Create Instance for each AOV.
context = instance.context
expectedFiles = next(iter(instance.data["expectedFiles"]), {})
product_type = "render" # is always render
product_group = get_product_name(
context.data["projectName"],
context.data["taskEntity"]["name"],
context.data["taskEntity"]["taskType"],
context.data["hostName"],
product_type,
instance.data["productName"]
)
for aov_name, aov_filepaths in expectedFiles.items():
product_name = product_group
if aov_name:
product_name = "{}_{}".format(product_name, aov_name)
# Create instance for each AOV
aov_instance = context.create_instance(product_name)
# Prepare Representation for each AOV
aov_filenames = [os.path.basename(path) for path in aov_filepaths]
staging_dir = os.path.dirname(aov_filepaths[0])
ext = aov_filepaths[0].split(".")[-1]
# Decide if instance is reviewable
preview = False
if instance.data.get("multipartExr", False):
# Add preview tag because its multipartExr.
preview = True
else:
# Add Preview tag if the AOV matches the filter.
preview = match_aov_pattern(
"houdini", self.aov_filter, aov_filenames[0]
)
preview = preview and instance.data.get("review", False)
# Support Single frame.
# The integrator wants single files to be a single
# filename instead of a list.
# More info: https://github.com/ynput/ayon-core/issues/238
if len(aov_filenames) == 1:
aov_filenames = aov_filenames[0]
aov_instance.data.update({
# 'label': label,
"task": instance.data["task"],
"folderPath": instance.data["folderPath"],
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
"productType": product_type,
"family": product_type,
"productName": product_name,
"productGroup": product_group,
"families": ["render.local.hou", "review"],
"instance_node": instance.data["instance_node"],
"representations": [
{
"stagingDir": staging_dir,
"ext": ext,
"name": ext,
"tags": ["review"] if preview else [],
"files": aov_filenames,
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"]
}
]
})
# Skip integrating original render instance.
# We are not removing it because it's used to trigger the render.
instance.data["integrate"] = False

View file

@ -1,159 +0,0 @@
import re
import os
import hou
import pyblish.api
from ayon_houdini.api.lib import (
evalParmNoFrame,
get_color_management_preferences
)
from ayon_houdini.api import (
colorspace,
plugin
)
class CollectMantraROPRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect Mantra Render Products
Collects the instance.data["files"] for the render products.
Provides:
instance -> files
"""
label = "Mantra ROP Render Products"
# This specific order value is used so that
# this plugin runs after CollectFrames
order = pyblish.api.CollectorOrder + 0.11
families = ["mantra_rop"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
if chunk_size_parm:
chunk_size = int(chunk_size_parm.eval())
instance.data["chunkSize"] = chunk_size
self.log.debug("Chunk Size: %s" % chunk_size)
default_prefix = evalParmNoFrame(rop, "vm_picture")
render_products = []
export_prefix = None
export_products = []
if instance.data["splitRender"]:
export_prefix = evalParmNoFrame(
rop, "soho_diskfile", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
# Default beauty AOV
beauty_product = self.get_render_product_name(
prefix=default_prefix, suffix=None
)
render_products.append(beauty_product)
files_by_aov = {
"beauty": self.generate_expected_files(instance,
beauty_product)
}
# Assume it's a multipartExr Render.
multipartExr = True
# TODO: This logic doesn't take into considerations
# cryptomatte defined in 'Images > Cryptomatte'
aov_numbers = rop.evalParm("vm_numaux")
if aov_numbers > 0:
# get the filenames of the AOVs
for i in range(1, aov_numbers + 1):
var = rop.evalParm("vm_variable_plane%d" % i)
if var:
aov_name = "vm_filename_plane%d" % i
aov_boolean = "vm_usefile_plane%d" % i
aov_enabled = rop.evalParm(aov_boolean)
has_aov_path = rop.evalParm(aov_name)
if has_aov_path and aov_enabled == 1:
aov_prefix = evalParmNoFrame(rop, aov_name)
aov_product = self.get_render_product_name(
prefix=aov_prefix, suffix=None
)
render_products.append(aov_product)
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
# Set to False as soon as we have a separated aov.
multipartExr = False
# Review Logic expects this key to exist and be True
# if render is a multipart Exr.
# As long as we have one AOV then multipartExr should be True.
instance.data["multipartExr"] = multipartExr
for product in render_products:
self.log.debug("Found render product: %s" % product)
filenames = list(render_products)
instance.data["files"] = filenames
instance.data["renderProducts"] = colorspace.ARenderProduct()
# For now by default do NOT try to publish the rendered output
instance.data["publishJobState"] = "Suspended"
instance.data["attachTo"] = [] # stub required data
if "expectedFiles" not in instance.data:
instance.data["expectedFiles"] = list()
instance.data["expectedFiles"].append(files_by_aov)
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_render_product_name(self, prefix, suffix):
product_name = prefix
if suffix:
# Add ".{suffix}" before the extension
prefix_base, ext = os.path.splitext(prefix)
product_name = prefix_base + "." + suffix + ext
return product_name
def generate_expected_files(self, instance, path):
"""Create expected files in instance data"""
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
def replace(match):
return "%0{}d".format(len(match.group()))
file = re.sub("#+", replace, file)
if "%" not in file:
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for i in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(dir, (file % i)).replace("\\", "/"))
return expected_files

View file

@ -1,77 +0,0 @@
import pyblish.api
from ayon_core.pipeline.publish import KnownPublishError
from ayon_houdini.api import plugin
class CollectOutputSOPPath(plugin.HoudiniInstancePlugin):
"""Collect the out node's SOP/COP Path value."""
order = pyblish.api.CollectorOrder
families = [
"pointcache",
"camera",
"vdbcache",
"imagesequence",
"usd",
"usdrender",
"redshiftproxy",
"staticMesh",
"model"
]
label = "Collect Output Node Path"
def process(self, instance):
import hou
node = hou.node(instance.data["instance_node"])
# Get sop path
node_type = node.type().name()
if node_type == "geometry":
out_node = node.parm("soppath").evalAsNode()
elif node_type == "alembic":
# Alembic can switch between using SOP Path or object
if node.parm("use_sop_path").eval():
out_node = node.parm("sop_path").evalAsNode()
else:
root = node.parm("root").eval()
objects = node.parm("objects").eval()
path = root + "/" + objects
out_node = hou.node(path)
elif node_type == "comp":
out_node = node.parm("coppath").evalAsNode()
elif node_type == "usd" or node_type == "usdrender":
out_node = node.parm("loppath").evalAsNode()
elif node_type == "usd_rop" or node_type == "usdrender_rop":
# Inside Solaris e.g. /stage (not in ROP context)
# When incoming connection is present it takes it directly
inputs = node.inputs()
if inputs:
out_node = inputs[0]
else:
out_node = node.parm("loppath").evalAsNode()
elif node_type == "Redshift_Proxy_Output":
out_node = node.parm("RS_archive_sopPath").evalAsNode()
elif node_type == "filmboxfbx":
out_node = node.parm("startnode").evalAsNode()
else:
raise KnownPublishError(
"ROP node type '{}' is not supported.".format(node_type)
)
if not out_node:
self.log.warning("No output node collected.")
return
self.log.debug("Output node: %s" % out_node.path())
instance.data["output_node"] = out_node

View file

@ -1,185 +0,0 @@
import re
import os
import hou
import pyblish.api
from ayon_houdini.api.lib import (
evalParmNoFrame,
get_color_management_preferences
)
from ayon_houdini.api import (
colorspace,
plugin
)
class CollectRedshiftROPRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect USD Render Products
Collects the instance.data["files"] for the render products.
Provides:
instance -> files
"""
label = "Redshift ROP Render Products"
# This specific order value is used so that
# this plugin runs after CollectFrames
order = pyblish.api.CollectorOrder + 0.11
families = ["redshift_rop"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
if chunk_size_parm:
chunk_size = int(chunk_size_parm.eval())
instance.data["chunkSize"] = chunk_size
self.log.debug("Chunk Size: %s" % chunk_size)
default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix")
beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix")
export_products = []
if instance.data["splitRender"]:
export_prefix = evalParmNoFrame(
rop, "RS_archive_file", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2")
if full_exr_mode:
# Ignore beauty suffix if full mode is enabled
# As this is what the rop does.
beauty_suffix = ""
# Assume it's a multipartExr Render.
multipartExr = True
# Default beauty/main layer AOV
beauty_product = self.get_render_product_name(
prefix=default_prefix, suffix=beauty_suffix
)
render_products = [beauty_product]
files_by_aov = {
beauty_suffix: self.generate_expected_files(instance,
beauty_product)
}
aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode()
if aovs_rop:
rop = aovs_rop
num_aovs = 0
if not rop.evalParm('RS_aovAllAOVsDisabled'):
num_aovs = rop.evalParm("RS_aov")
for index in range(num_aovs):
i = index + 1
# Skip disabled AOVs
if not rop.evalParm(f"RS_aovEnable_{i}"):
continue
aov_suffix = rop.evalParm(f"RS_aovSuffix_{i}")
aov_prefix = evalParmNoFrame(rop, f"RS_aovCustomPrefix_{i}")
if not aov_prefix:
aov_prefix = default_prefix
if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \
not full_exr_mode:
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
render_products.append(aov_product)
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
aov_product) # noqa
# Set to False as soon as we have a separated aov.
multipartExr = False
# Review Logic expects this key to exist and be True
# if render is a multipart Exr.
# As long as we have one AOV then multipartExr should be True.
instance.data["multipartExr"] = multipartExr
for product in render_products:
self.log.debug("Found render product: %s" % product)
filenames = list(render_products)
instance.data["files"] = filenames
instance.data["renderProducts"] = colorspace.ARenderProduct()
# For now by default do NOT try to publish the rendered output
instance.data["publishJobState"] = "Suspended"
instance.data["attachTo"] = [] # stub required data
if "expectedFiles" not in instance.data:
instance.data["expectedFiles"] = []
instance.data["expectedFiles"].append(files_by_aov)
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_render_product_name(self, prefix, suffix):
"""Return the output filename using the AOV prefix and suffix"""
# When AOV is explicitly defined in prefix we just swap it out
# directly with the AOV suffix to embed it.
# Note: '$AOV' seems to be evaluated in the parameter as '%AOV%'
has_aov_in_prefix = "%AOV%" in prefix
if has_aov_in_prefix:
# It seems that when some special separator characters are present
# before the %AOV% token that Redshift will secretly remove it if
# there is no suffix for the current product, for example:
# foo_%AOV% -> foo.exr
pattern = "%AOV%" if suffix else "[._-]?%AOV%"
product_name = re.sub(pattern, suffix, prefix, flags=re.IGNORECASE)
else:
if suffix:
# Add ".{suffix}" before the extension
prefix_base, ext = os.path.splitext(prefix)
product_name = prefix_base + "." + suffix + ext
else:
product_name = prefix
return product_name
def generate_expected_files(self, instance, path):
"""Create expected files in instance data"""
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
def replace(match):
return "%0{}d".format(len(match.group()))
file = re.sub("#+", replace, file)
if "%" not in file:
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for i in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(dir, (file % i)).replace("\\", "/"))
return expected_files

View file

@ -1,248 +0,0 @@
import re
import os
import hou
import pxr.UsdRender
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.usd import (
get_usd_render_rop_rendersettings
)
class CollectRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect USD Render Products.
The render products are collected from the USD Render ROP node by detecting
what the selected Render Settings prim path is, then finding those
Render Settings in the USD Stage and collecting the targeted Render
Products and their expected filenames.
Note: Product refers USD Render Product, not to an AYON Product
"""
label = "Collect Render Products"
# This plugin should run after CollectUsdRender
# and, before CollectLocalRenderInstances
order = pyblish.api.CollectorOrder + 0.04
families = ["usdrender"]
def process(self, instance):
rop_node = hou.node(instance.data["instance_node"])
node = instance.data.get("output_node")
if not node:
rop_path = rop_node.path()
self.log.error(
"No output node found. Make sure to connect a valid "
"input to the USD ROP: %s" % rop_path
)
return
override_output_image = rop_node.evalParm("outputimage")
filenames = []
files_by_product = {}
stage = node.stage()
for prim_path in self.get_render_products(rop_node, stage):
prim = stage.GetPrimAtPath(prim_path)
if not prim or not prim.IsA(pxr.UsdRender.Product):
self.log.warning("Found invalid render product path "
"configured in render settings that is not a "
"Render Product prim: %s", prim_path)
continue
render_product = pxr.UsdRender.Product(prim)
# Get Render Product Name
if override_output_image:
name = override_output_image
else:
# We force taking it from any random time sample as opposed to
# "default" that the USD Api falls back to since that won't
# return time sampled values if they were set per time sample.
name = render_product.GetProductNameAttr().Get(time=0)
dirname = os.path.dirname(name)
basename = os.path.basename(name)
dollarf_regex = r"(\$F([0-9]?))"
if re.match(dollarf_regex, basename):
# TODO: Confirm this actually is allowed USD stages and HUSK
# Substitute $F
def replace(match):
"""Replace $F4 with padded #."""
padding = int(match.group(2)) if match.group(2) else 1
return "#" * padding
filename_base = re.sub(dollarf_regex, replace, basename)
filename = os.path.join(dirname, filename_base)
else:
# Last group of digits in the filename before the extension
# The frame number must always be prefixed by underscore or dot
# Allow product names like:
# - filename.1001.exr
# - filename.1001.aov.exr
# - filename.aov.1001.exr
# - filename_1001.exr
frame_regex = r"(.*[._])(\d+)(?!.*\d)(.*\.[A-Za-z0-9]+$)"
# It may be the case that the current USD stage has stored
# product name samples (e.g. when loading a USD file with
# time samples) where it does not refer to e.g. $F4. And thus
# it refers to the actual path like /path/to/frame.1001.exr
# TODO: It would be better to maybe sample product name
# attribute `ValueMightBeTimeVarying` and if so get it per
# frame using `attr.Get(time=frame)` to ensure we get the
# actual product name set at that point in time?
# Substitute basename.0001.ext
def replace(match):
head, frame, tail = match.groups()
padding = "#" * len(frame)
return head + padding + tail
filename_base = re.sub(frame_regex, replace, basename)
filename = os.path.join(dirname, filename_base)
filename = filename.replace("\\", "/")
assert "#" in filename, (
"Couldn't resolve render product name "
"with frame number: %s" % name
)
filenames.append(filename)
# TODO: Improve AOV name detection logic
aov_identifier = self.get_aov_identifier(render_product)
if aov_identifier in files_by_product:
self.log.error(
"Multiple render products are identified as the same AOV "
"which means one of the two will not be ingested during"
"publishing. AOV: '%s'", aov_identifier
)
self.log.warning("Skipping Render Product: %s", render_product)
files_by_product[aov_identifier] = self.generate_expected_files(
instance,
filename
)
aov_label = f"'{aov_identifier}' aov in " if aov_identifier else ""
self.log.debug("Render Product %s%s", aov_label, prim_path)
self.log.debug("Product name: %s", filename)
# Filenames for Deadline
instance.data["files"] = filenames
instance.data.setdefault("expectedFiles", []).append(files_by_product)
# Farm Publishing add review logic expects this key to exist and
# be True if render is a multipart Exr.
# otherwise it will most probably fail the AOV filter as multipartExr
# files mostly don't include aov name in the file path.
# Assume multipartExr is 'True' as long as we have one AOV.
instance.data["multipartExr"] = len(files_by_product) <= 1
def get_aov_identifier(self, render_product):
"""Return the AOV identifier for a Render Product
A Render Product does not really define what 'AOV' it is, it
defines the product name (output path) and the render vars to
include.
So we need to define what in particular of a `UsdRenderProduct`
we use to separate the AOV (and thus apply sub-grouping with).
For now we'll consider any Render Product that only refers
to a single rendervar that the rendervars prim name is the AOV
otherwise we'll assume renderproduct to be a combined multilayer
'main' layer
Args:
render_product (pxr.UsdRender.Product): The Render Product
Returns:
str: The AOV identifier
"""
targets = render_product.GetOrderedVarsRel().GetTargets()
if len(targets) > 1:
# Cryptomattes usually are combined render vars, for example:
# - crypto_asset, crypto_asset01, crypto_asset02, crypto_asset03
# - crypto_object, crypto_object01, etc.
# These still refer to the same AOV so we take the common prefix
# e.g. `crypto_asset` or `crypto` (if multiple are combined)
if all(target.name.startswith("crypto") for target in targets):
start = os.path.commonpath([target.name for target in targets])
return start.rstrip("_") # remove any trailing _
# Main layer
return ""
elif len(targets) == 1:
# AOV for a single var
return targets[0].name
else:
self.log.warning(
f"Render product has no rendervars set: {render_product}")
return ""
def get_render_products(self, usdrender_rop, stage):
""""The render products in the defined render settings
Args:
usdrender_rop (hou.Node): The Houdini USD Render ROP node.
stage (pxr.Usd.Stage): The USD stage to find the render settings
in. This is usually the stage from the LOP path the USD Render
ROP node refers to.
Returns:
List[Sdf.Path]: Render Product paths enabled in the render settings
"""
render_settings = get_usd_render_rop_rendersettings(usdrender_rop,
stage,
logger=self.log)
if not render_settings:
return []
return render_settings.GetProductsRel().GetTargets()
def generate_expected_files(self, instance, path):
"""Generate full sequence of expected files from a filepath.
The filepath should have '#' token as placeholder for frame numbers or
should have %04d or %d placeholders. The `#` characters indicate frame
number and padding, e.g. #### becomes 0001 for frame 1.
Args:
instance (pyblish.api.Instance): The publish instance.
path (str): The filepath to generate the list of output files for.
Returns:
list: Filepath per frame.
"""
folder = os.path.dirname(path)
filename = os.path.basename(path)
if "#" in filename:
def replace(match):
return "%0{}d".format(len(match.group()))
filename = re.sub("#+", replace, filename)
if "%" not in filename:
# Not a sequence, single file
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for frame in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(folder, (filename % frame)).replace("\\", "/"))
return expected_files

View file

@ -1,85 +0,0 @@
import hou
import pyblish.api
from ayon_houdini.api import plugin
class CollectHoudiniReviewData(plugin.HoudiniInstancePlugin):
"""Collect Review Data."""
label = "Collect Review Data"
# This specific order value is used so that
# this plugin runs after CollectRopFrameRange
# Also after CollectLocalRenderInstances
order = pyblish.api.CollectorOrder + 0.13
families = ["review"]
def process(self, instance):
# This fixes the burnin having the incorrect start/end timestamps
# because without this it would take it from the context instead
# which isn't the actual frame range that this instance renders.
instance.data["handleStart"] = 0
instance.data["handleEnd"] = 0
instance.data["fps"] = instance.context.data["fps"]
# Enable ftrack functionality
instance.data.setdefault("families", []).append('ftrack')
# Get the camera from the rop node to collect the focal length
ropnode_path = instance.data["instance_node"]
ropnode = hou.node(ropnode_path)
# Get camera based on the instance_node type.
camera_path = self._get_camera_path(ropnode)
camera_node = hou.node(camera_path)
if not camera_node:
self.log.warning("No valid camera node found on review node: "
"{}".format(camera_path))
return
# Collect focal length.
focal_length_parm = camera_node.parm("focal")
if not focal_length_parm:
self.log.warning("No 'focal' (focal length) parameter found on "
"camera: {}".format(camera_path))
return
if focal_length_parm.isTimeDependent():
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"] + 1
focal_length = [
focal_length_parm.evalAsFloatAtFrame(t)
for t in range(int(start), int(end))
]
else:
focal_length = focal_length_parm.evalAsFloat()
# Store focal length in `burninDataMembers`
burnin_members = instance.data.setdefault("burninDataMembers", {})
burnin_members["focalLength"] = focal_length
def _get_camera_path(self, ropnode):
"""Get the camera path associated with the given rop node.
This function evaluates the camera parameter according to the
type of the given rop node.
Returns:
Union[str, None]: Camera path or None.
This function can return empty string if the camera
path is empty i.e. no camera path.
"""
if ropnode.type().name() in {
"opengl", "karma", "ifd", "arnold"
}:
return ropnode.parm("camera").eval()
elif ropnode.type().name() == "Redshift_ROP":
return ropnode.parm("RS_renderCamera").eval()
elif ropnode.type().name() == "vray_renderer":
return ropnode.parm("render_camera").eval()
return None

View file

@ -1,24 +0,0 @@
import pyblish.api
from ayon_houdini.api import plugin
class CollectReviewableInstances(plugin.HoudiniInstancePlugin):
"""Collect Reviewable Instances.
Basically, all instances of the specified families
with creator_attribure["review"]
"""
order = pyblish.api.CollectorOrder
label = "Collect Reviewable Instances"
families = ["mantra_rop",
"karma_rop",
"redshift_rop",
"arnold_rop",
"vray_rop",
"usdrender"]
def process(self, instance):
creator_attribute = instance.data["creator_attributes"]
instance.data["review"] = creator_attribute.get("review", False)

View file

@ -1,37 +0,0 @@
# -*- coding: utf-8 -*-
"""Collector plugin for frames data on ROP instances."""
import hou # noqa
import pyblish.api
from ayon_houdini.api import lib, plugin
class CollectRopFrameRange(plugin.HoudiniInstancePlugin):
"""Collect all frames which would be saved from the ROP nodes"""
order = pyblish.api.CollectorOrder
label = "Collect RopNode Frame Range"
def process(self, instance):
node_path = instance.data.get("instance_node")
if node_path is None:
# Instance without instance node like a workfile instance
self.log.debug(
"No instance node found for instance: {}".format(instance)
)
return
ropnode = hou.node(node_path)
frame_data = lib.get_frame_data(
ropnode, self.log
)
if not frame_data:
return
# Log debug message about the collected frame range
self.log.debug(
"Collected frame_data: {}".format(frame_data)
)
instance.data.update(frame_data)

View file

@ -1,20 +0,0 @@
# -*- coding: utf-8 -*-
"""Collector for staticMesh types. """
import pyblish.api
from ayon_houdini.api import plugin
class CollectStaticMeshType(plugin.HoudiniInstancePlugin):
"""Collect data type for fbx instance."""
families = ["staticMesh"]
label = "Collect type of staticMesh"
order = pyblish.api.CollectorOrder
def process(self, instance):
if instance.data["creator_identifier"] == "io.openpype.creators.houdini.staticmesh.fbx": # noqa: E501
# Marking this instance as FBX triggers the FBX extractor.
instance.data["families"] += ["fbx"]

View file

@ -1,158 +0,0 @@
import copy
import os
import re
import pyblish.api
from ayon_core.pipeline.create import get_product_name
from ayon_houdini.api import plugin
import ayon_houdini.api.usd as usdlib
import hou
def copy_instance_data(instance_src, instance_dest, attr):
"""Copy instance data from `src` instance to `dest` instance.
Examples:
>>> copy_instance_data(instance_src, instance_dest,
>>> attr="publish_attributes.CollectRopFrameRange")
Arguments:
instance_src (pyblish.api.Instance): Source instance to copy from
instance_dest (pyblish.api.Instance): Target instance to copy to
attr (str): Attribute on the source instance to copy. This can be
a nested key joined by `.` to only copy sub entries of dictionaries
in the source instance's data.
Raises:
KeyError: If the key does not exist on the source instance.
AssertionError: If a parent key already exists on the destination
instance but is not of the correct type (= is not a dict)
"""
src_data = instance_src.data
dest_data = instance_dest.data
keys = attr.split(".")
for i, key in enumerate(keys):
if key not in src_data:
break
src_value = src_data[key]
if i != len(key):
dest_data = dest_data.setdefault(key, {})
assert isinstance(dest_data, dict), "Destination must be a dict"
src_data = src_value
else:
# Last iteration - assign the value
dest_data[key] = copy.deepcopy(src_value)
class CollectUsdLayers(plugin.HoudiniInstancePlugin):
"""Collect the USD Layers that have configured save paths."""
order = pyblish.api.CollectorOrder + 0.25
label = "Collect USD Layers"
families = ["usdrop"]
def process(self, instance):
# TODO: Replace this with a Hidden Creator so we collect these BEFORE
# starting the publish so the user sees them before publishing
# - however user should not be able to individually enable/disable
# this from the main ROP its created from?
output = instance.data.get("output_node")
if not output:
self.log.debug("No output node found..")
return
rop_node = hou.node(instance.data["instance_node"])
save_layers = []
for layer in usdlib.get_configured_save_layers(rop_node):
info = layer.rootPrims.get("HoudiniLayerInfo")
save_path = info.customData.get("HoudiniSavePath")
creator = info.customData.get("HoudiniCreatorNode")
self.log.debug("Found configured save path: "
"%s -> %s", layer, save_path)
# Log node that configured this save path
creator_node = hou.nodeBySessionId(creator) if creator else None
if creator_node:
self.log.debug(
"Created by: %s", creator_node.path()
)
save_layers.append((layer, save_path, creator_node))
# Store on the instance
instance.data["usdConfiguredSavePaths"] = save_layers
# Create configured layer instances so User can disable updating
# specific configured layers for publishing.
context = instance.context
for layer, save_path, creator_node in save_layers:
name = os.path.basename(save_path)
layer_inst = context.create_instance(name)
# include same USD ROP
layer_inst.append(rop_node)
staging_dir, fname = os.path.split(save_path)
fname_no_ext, ext = os.path.splitext(fname)
variant = fname_no_ext
# Strip off any trailing version number in the form of _v[0-9]+
variant = re.sub("_v[0-9]+$", "", variant)
layer_inst.data["usd_layer"] = layer
layer_inst.data["usd_layer_save_path"] = save_path
project_name = context.data["projectName"]
variant_base = instance.data["variant"]
subset = get_product_name(
project_name=project_name,
# TODO: This should use task from `instance`
task_name=context.data["anatomyData"]["task"]["name"],
task_type=context.data["anatomyData"]["task"]["type"],
host_name=context.data["hostName"],
product_type="usd",
variant=variant_base + "_" + variant,
project_settings=context.data["project_settings"]
)
label = "{0} -> {1}".format(instance.data["name"], subset)
family = "usd"
layer_inst.data["family"] = family
layer_inst.data["families"] = [family]
layer_inst.data["subset"] = subset
layer_inst.data["label"] = label
layer_inst.data["asset"] = instance.data["asset"]
layer_inst.data["task"] = instance.data.get("task")
layer_inst.data["instance_node"] = instance.data["instance_node"]
layer_inst.data["render"] = False
layer_inst.data["output_node"] = creator_node
# Inherit "use handles" from the source instance
# TODO: Do we want to maybe copy full `publish_attributes` instead?
copy_instance_data(
instance, layer_inst,
attr="publish_attributes.CollectRopFrameRange.use_handles"
)
# Allow this subset to be grouped into a USD Layer on creation
layer_inst.data["subsetGroup"] = "USD Layer"
# For now just assume the representation will get published
representation = {
"name": "usd",
"ext": ext.lstrip("."),
"stagingDir": staging_dir,
"files": fname
}
layer_inst.data.setdefault("representations", []).append(
representation)

View file

@ -1,243 +0,0 @@
import re
import os
import glob
from typing import List, Optional
import dataclasses
import pyblish.api
import hou
from pxr import Sdf
from ayon_houdini.api import plugin
# Colorspace attributes differ per renderer implementation in the USD data
# Some have dedicated input names like Arnold and Redshift, whereas others like
# MaterialX store `colorSpace` metadata on the asset property itself.
# See `get_colorspace` method on the plug-in for more details
COLORSPACE_ATTRS = [
"inputs:color_space", # Image Vop (arnold::image)
"inputs:tex0_colorSpace", # RS Texture Vop (redshift::TextureSampler)
# TODO: USD UV Texture VOP doesn't seem to use colorspaces from the actual
# OCIO configuration so we skip these for now. Especially since the
# texture is usually used for 'preview' purposes anyway.
# "inputs:sourceColorSpace", # USD UV Texture Vop (usduvtexture::2.0)
]
@dataclasses.dataclass
class Resource:
attribute: str # property path
source: str # unresolved source path
files: List[str] # resolve list of files, e.g. multiple for <UDIM>
color_space: str = None # colorspace of the resource
def get_layer_property_paths(layer: Sdf.Layer) -> List[Sdf.Path]:
"""Return all property paths from a layer"""
paths = []
def collect_paths(path):
if not path.IsPropertyPath():
return
paths.append(path)
layer.Traverse("/", collect_paths)
return paths
class CollectUsdLookAssets(plugin.HoudiniInstancePlugin):
"""Collect all assets introduced by the look.
We are looking to collect e.g. all texture resources so we can transfer
them with the publish and write then to the publish location.
If possible, we'll also try to identify the colorspace of the asset.
"""
# TODO: Implement $F frame support (per frame values)
# TODO: If input image is already a published texture or resource than
# preferably we'd keep the link in-tact and NOT update it. We can just
# start ignoring AYON URIs
label = "Collect USD Look Assets"
order = pyblish.api.CollectorOrder
hosts = ["houdini"]
families = ["look"]
exclude_suffixes = [".usd", ".usda", ".usdc", ".usdz", ".abc", ".vbd"]
def process(self, instance):
rop: hou.RopNode = hou.node(instance.data.get("instance_node"))
if not rop:
return
lop_node: hou.LopNode = instance.data.get("output_node")
if not lop_node:
return
above_break_layers = set(lop_node.layersAboveLayerBreak())
stage = lop_node.stage()
layers = [
layer for layer
in stage.GetLayerStack(includeSessionLayers=False)
if layer.identifier not in above_break_layers
]
instance_resources = self.get_layer_assets(layers)
# Define a relative asset remapping for the USD Extractor so that
# any textures are remapped to their 'relative' publish path.
# All textures will be in a relative `./resources/` folder
remap = {}
for resource in instance_resources:
source = resource.source
name = os.path.basename(source)
remap[os.path.normpath(source)] = f"./resources/{name}"
instance.data["assetRemap"] = remap
# Store resources on instance
resources = instance.data.setdefault("resources", [])
for resource in instance_resources:
resources.append(dataclasses.asdict(resource))
# Log all collected textures
# Note: It is fine for a single texture to be included more than once
# where even one of them does not have a color space set, but the other
# does. For example, there may be a USD UV Texture just for a GL
# preview material which does not specify an OCIO color
# space.
all_files = []
for resource in instance_resources:
all_files.append(f"{resource.attribute}:")
for filepath in resource.files:
if resource.color_space:
file_label = f"- {filepath} ({resource.color_space})"
else:
file_label = f"- {filepath}"
all_files.append(file_label)
self.log.info(
"Collected assets:\n{}".format(
"\n".join(all_files)
)
)
def get_layer_assets(self, layers: List[Sdf.Layer]) -> List[Resource]:
# TODO: Correctly resolve paths using Asset Resolver.
# Preferably this would use one cached
# resolver context to optimize the path resolving.
# TODO: Fix for timesamples - if timesamples, then `.default` might
# not be authored on the spec
resources: List[Resource] = list()
for layer in layers:
for path in get_layer_property_paths(layer):
spec = layer.GetAttributeAtPath(path)
if not spec:
continue
if spec.typeName != "asset":
continue
asset: Sdf.AssetPath = spec.default
base, ext = os.path.splitext(asset.path)
if ext in self.exclude_suffixes:
continue
filepath = asset.path.replace("\\", "/")
# Expand <UDIM> to all files of the available files on disk
# TODO: Add support for `<TILE>`
# TODO: Add support for `<ATTR:name INDEX:name DEFAULT:value>`
if "<UDIM>" in filepath.upper():
pattern = re.sub(
r"<UDIM>",
# UDIM is always four digits
"[0-9]" * 4,
filepath,
flags=re.IGNORECASE
)
files = glob.glob(pattern)
else:
# Single file
files = [filepath]
# Detect the colorspace of the input asset property
colorspace = self.get_colorspace(spec)
resource = Resource(
attribute=path.pathString,
source=asset.path,
files=files,
color_space=colorspace
)
resources.append(resource)
# Sort by filepath
resources.sort(key=lambda r: r.source)
return resources
def get_colorspace(self, spec: Sdf.AttributeSpec) -> Optional[str]:
"""Return colorspace for a Asset attribute spec.
There is currently no USD standard on how colorspaces should be
represented for shaders or asset properties - each renderer's material
implementations seem to currently use their own way of specifying the
colorspace on the shader. As such, this comes with some guesswork.
Args:
spec (Sdf.AttributeSpec): The asset type attribute to retrieve
the colorspace for.
Returns:
Optional[str]: The colorspace for the given attribute, if any.
"""
# TODO: Support Karma, V-Ray, Renderman texture colorspaces
# Materialx image defines colorspace as custom info on the attribute
if spec.HasInfo("colorSpace"):
return spec.GetInfo("colorSpace")
# Arnold materials define the colorspace as a separate primvar
# TODO: Fix for timesamples - if timesamples, then `.default` might
# not be authored on the spec
prim_path = spec.path.GetPrimPath()
layer = spec.layer
for name in COLORSPACE_ATTRS:
colorspace_property_path = prim_path.AppendProperty(name)
colorspace_spec = layer.GetAttributeAtPath(
colorspace_property_path
)
if colorspace_spec and colorspace_spec.default:
return colorspace_spec.default
class CollectUsdLookResourceTransfers(plugin.HoudiniInstancePlugin):
"""Define the publish direct file transfers for any found resources.
This ensures that any source texture will end up in the published look
in the `resourcesDir`.
"""
label = "Collect USD Look Transfers"
order = pyblish.api.CollectorOrder + 0.496
hosts = ["houdini"]
families = ["look"]
def process(self, instance):
resources_dir = instance.data["resourcesDir"]
transfers = instance.data.setdefault("transfers", [])
for resource in instance.data.get("resources", []):
for src in resource["files"]:
dest = os.path.join(resources_dir, os.path.basename(src))
transfers.append((src, dest))
self.log.debug("Registering transfer: %s -> %s", src, dest)

View file

@ -1,86 +0,0 @@
import os
import re
import hou
import pyblish.api
from ayon_houdini.api import (
colorspace,
plugin
)
from ayon_houdini.api.lib import (
evalParmNoFrame,
get_color_management_preferences
)
class CollectUsdRender(plugin.HoudiniInstancePlugin):
"""Collect publishing data for USD Render ROP.
If `rendercommand` parm is disabled (and thus no rendering triggers by the
usd render rop) it is assumed to be a "Split Render" job where the farm
will get an additional render job after the USD file is extracted.
Provides:
instance -> ifdFile
instance -> colorspaceConfig
instance -> colorspaceDisplay
instance -> colorspaceView
"""
label = "Collect USD Render Rop"
order = pyblish.api.CollectorOrder
hosts = ["houdini"]
families = ["usdrender"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
if instance.data["splitRender"]:
# USD file output
lop_output = evalParmNoFrame(
rop, "lopoutput", pad_character="#"
)
# The file is usually relative to the Output Processor's 'Save to
# Directory' which forces all USD files to end up in that directory
# TODO: It is possible for a user to disable this
# TODO: When enabled I think only the basename of the `lopoutput`
# parm is preserved, any parent folders defined are likely ignored
folder = evalParmNoFrame(
rop, "savetodirectory_directory", pad_character="#"
)
export_file = os.path.join(folder, lop_output)
# Substitute any # characters in the name back to their $F4
# equivalent
def replace_to_f(match):
number = len(match.group(0))
if number <= 1:
number = "" # make it just $F not $F1 or $F0
return "$F{}".format(number)
export_file = re.sub("#+", replace_to_f, export_file)
self.log.debug(
"Found export file: {}".format(export_file)
)
instance.data["ifdFile"] = export_file
# The render job is not frame dependent but fully dependent on
# the job having been completed, since the extracted file is a
# single file.
if "$F" not in export_file:
instance.data["splitRenderFrameDependent"] = False
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
# stub required data for Submit Publish Job publish plug-in
instance.data["attachTo"] = []
instance.data["renderProducts"] = colorspace.ARenderProduct()

View file

@ -1,154 +0,0 @@
import re
import os
import hou
import pyblish.api
from ayon_houdini.api.lib import (
evalParmNoFrame,
get_color_management_preferences
)
from ayon_houdini.api import (
colorspace,
plugin
)
class CollectVrayROPRenderProducts(plugin.HoudiniInstancePlugin):
"""Collect Vray Render Products
Collects the instance.data["files"] for the render products.
Provides:
instance -> files
"""
label = "VRay ROP Render Products"
# This specific order value is used so that
# this plugin runs after CollectFrames
order = pyblish.api.CollectorOrder + 0.11
families = ["vray_rop"]
def process(self, instance):
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")
if chunk_size_parm:
chunk_size = int(chunk_size_parm.eval())
instance.data["chunkSize"] = chunk_size
self.log.debug("Chunk Size: %s" % chunk_size)
default_prefix = evalParmNoFrame(rop, "SettingsOutput_img_file_path")
render_products = []
# TODO: add render elements if render element
export_prefix = None
export_products = []
if instance.data["splitRender"]:
export_prefix = evalParmNoFrame(
rop, "render_export_filepath", pad_character="0"
)
beauty_export_product = self.get_render_product_name(
prefix=export_prefix,
suffix=None)
export_products.append(beauty_export_product)
self.log.debug(
"Found export product: {}".format(beauty_export_product)
)
instance.data["ifdFile"] = beauty_export_product
instance.data["exportFiles"] = list(export_products)
beauty_product = self.get_render_product_name(default_prefix)
render_products.append(beauty_product)
files_by_aov = {
"": self.generate_expected_files(instance,
beauty_product)}
# Assume it's a multipartExr Render.
multipartExr = True
if instance.data.get("RenderElement", True):
render_element = self.get_render_element_name(rop, default_prefix)
if render_element:
for aov, renderpass in render_element.items():
render_products.append(renderpass)
files_by_aov[aov] = self.generate_expected_files(
instance, renderpass)
# Set to False as soon as we have a separated aov.
multipartExr = False
# Review Logic expects this key to exist and be True
# if render is a multipart Exr.
# As long as we have one AOV then multipartExr should be True.
instance.data["multipartExr"] = multipartExr
for product in render_products:
self.log.debug("Found render product: %s" % product)
filenames = list(render_products)
instance.data["files"] = filenames
instance.data["renderProducts"] = colorspace.ARenderProduct()
# For now by default do NOT try to publish the rendered output
instance.data["publishJobState"] = "Suspended"
instance.data["attachTo"] = [] # stub required data
if "expectedFiles" not in instance.data:
instance.data["expectedFiles"] = list()
instance.data["expectedFiles"].append(files_by_aov)
self.log.debug("expectedFiles:{}".format(files_by_aov))
# update the colorspace data
colorspace_data = get_color_management_preferences()
instance.data["colorspaceConfig"] = colorspace_data["config"]
instance.data["colorspaceDisplay"] = colorspace_data["display"]
instance.data["colorspaceView"] = colorspace_data["view"]
def get_render_product_name(self, prefix, suffix="<reName>"):
"""Return the beauty output filename if render element enabled
"""
# Remove aov suffix from the product: `prefix.aov_suffix` -> `prefix`
aov_parm = ".{}".format(suffix)
return prefix.replace(aov_parm, "")
def get_render_element_name(self, node, prefix, suffix="<reName>"):
"""Return the output filename using the AOV prefix and suffix
"""
render_element_dict = {}
# need a rewrite
re_path = node.evalParm("render_network_render_channels")
if re_path:
node_children = hou.node(re_path).children()
for element in node_children:
if element.shaderName() != "vray:SettingsRenderChannels":
aov = str(element)
render_product = prefix.replace(suffix, aov)
render_element_dict[aov] = render_product
return render_element_dict
def generate_expected_files(self, instance, path):
"""Create expected files in instance data"""
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
def replace(match):
return "%0{}d".format(len(match.group()))
file = re.sub("#+", replace, file)
if "%" not in file:
return path
expected_files = []
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
for i in range(int(start), (int(end) + 1)):
expected_files.append(
os.path.join(dir, (file % i)).replace("\\", "/"))
return expected_files

View file

@ -1,34 +0,0 @@
import os
import pyblish.api
from ayon_houdini.api import plugin
class CollectWorkfile(plugin.HoudiniInstancePlugin):
"""Inject workfile representation into instance"""
order = pyblish.api.CollectorOrder - 0.01
label = "Houdini Workfile Data"
families = ["workfile"]
def process(self, instance):
current_file = instance.context.data["currentFile"]
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
instance.data.update({
"setMembers": [current_file],
"frameStart": instance.context.data['frameStart'],
"frameEnd": instance.context.data['frameEnd'],
"handleStart": instance.context.data['handleStart'],
"handleEnd": instance.context.data['handleEnd']
})
instance.data['representations'] = [{
'name': ext.lstrip("."),
'ext': ext.lstrip("."),
'files': file,
"stagingDir": folder,
}]
self.log.debug('Collected workfile instance: {}'.format(file))

View file

@ -1,15 +0,0 @@
import hou
import pyblish.api
from ayon_houdini.api import plugin
class CollectWorksceneFPS(plugin.HoudiniContextPlugin):
"""Get the FPS of the work scene."""
label = "Workscene FPS"
order = pyblish.api.CollectorOrder
def process(self, context):
fps = hou.fps()
self.log.info("Workscene FPS: %s" % fps)
context.data.update({"fps": fps})

View file

@ -1,59 +0,0 @@
import tempfile
import pyblish.api
from ayon_core.pipeline import OptionalPyblishPluginMixin
from ayon_houdini.api import lib, plugin
from ayon_houdini.api.pipeline import IS_HEADLESS
class ExtractActiveViewThumbnail(plugin.HoudiniExtractorPlugin,
OptionalPyblishPluginMixin):
"""Set instance thumbnail to a screengrab of current active viewport.
This makes it so that if an instance does not have a thumbnail set yet that
it will get a thumbnail of the currently active view at the time of
publishing as a fallback.
"""
order = pyblish.api.ExtractorOrder + 0.49
label = "Extract Active View Thumbnail"
families = ["workfile"]
def process(self, instance):
if not self.is_active(instance.data):
return
if IS_HEADLESS:
self.log.debug(
"Skip extraction of active view thumbnail, due to being in"
"headless mode."
)
return
thumbnail = instance.data.get("thumbnailPath")
if thumbnail:
# A thumbnail was already set for this instance
return
view_thumbnail = self.get_view_thumbnail(instance)
if not view_thumbnail:
return
self.log.debug("Setting instance thumbnail path to: {}"
.format(view_thumbnail)
)
instance.data["thumbnailPath"] = view_thumbnail
def get_view_thumbnail(self, instance):
sceneview = lib.get_scene_viewer()
if sceneview is None:
self.log.debug("Skipping Extract Active View Thumbnail"
" because no scene view was detected.")
return
with tempfile.NamedTemporaryFile("w", suffix=".jpg", delete=False) as tmp:
lib.sceneview_snapshot(sceneview, tmp.name)
thumbnail_path = tmp.name
instance.context.data["cleanupFullPaths"].append(thumbnail_path)
return thumbnail_path

View file

@ -1,41 +0,0 @@
# -*- coding: utf-8 -*-
import os
from pprint import pformat
import hou
import pyblish.api
from ayon_houdini.api import plugin
class ExtractHDA(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract HDA"
families = ["hda"]
def process(self, instance):
self.log.info(pformat(instance.data))
hda_node = hou.node(instance.data.get("instance_node"))
hda_def = hda_node.type().definition()
hda_options = hda_def.options()
hda_options.setSaveInitialParmsAndContents(True)
next_version = instance.data["anatomyData"]["version"]
self.log.info("setting version: {}".format(next_version))
hda_def.setVersion(str(next_version))
hda_def.setOptions(hda_options)
hda_def.save(hda_def.libraryFilePath(), hda_node, hda_options)
if "representations" not in instance.data:
instance.data["representations"] = []
file = os.path.basename(hda_def.libraryFilePath())
staging_dir = os.path.dirname(hda_def.libraryFilePath())
self.log.info("Using HDA from {}".format(hda_def.libraryFilePath()))
representation = {
'name': 'hda',
'ext': 'hda',
'files': file,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -1,85 +0,0 @@
import os
import hou
import pyblish.api
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
class ExtractRender(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract Render"
families = ["mantra_rop",
"karma_rop",
"redshift_rop",
"arnold_rop",
"vray_rop",
"usdrender"]
def process(self, instance):
creator_attribute = instance.data["creator_attributes"]
product_type = instance.data["productType"]
rop_node = hou.node(instance.data.get("instance_node"))
# TODO: This section goes against pyblish concepts where
# pyblish plugins should change the state of the scene.
# However, in ayon publisher tool users can have options and
# these options should some how synced with the houdini nodes.
# More info: https://github.com/ynput/ayon-core/issues/417
# Align split parameter value on rop node to the render target.
if instance.data["splitRender"]:
if product_type == "arnold_rop":
rop_node.setParms({"ar_ass_export_enable": 1})
elif product_type == "mantra_rop":
rop_node.setParms({"soho_outputmode": 1})
elif product_type == "redshift_rop":
rop_node.setParms({"RS_archive_enable": 1})
elif product_type == "vray_rop":
rop_node.setParms({"render_export_mode": "2"})
elif product_type == "usdrender":
rop_node.setParms({"runcommand": 0})
else:
if product_type == "arnold_rop":
rop_node.setParms({"ar_ass_export_enable": 0})
elif product_type == "mantra_rop":
rop_node.setParms({"soho_outputmode": 0})
elif product_type == "redshift_rop":
rop_node.setParms({"RS_archive_enable": 0})
elif product_type == "vray_rop":
rop_node.setParms({"render_export_mode": "1"})
elif product_type == "usdrender":
rop_node.setParms({"runcommand": 1})
if instance.data.get("farm"):
self.log.debug("Render should be processed on farm, skipping local render.")
return
if creator_attribute.get("render_target") == "local":
ropnode = hou.node(instance.data.get("instance_node"))
render_rop(ropnode)
# `ExpectedFiles` is a list that includes one dict.
expected_files = instance.data["expectedFiles"][0]
# Each key in that dict is a list of files.
# Combine lists of files into one big list.
all_frames = []
for value in expected_files.values():
if isinstance(value, str):
all_frames.append(value)
elif isinstance(value, list):
all_frames.extend(value)
# Check missing frames.
# Frames won't exist if user cancels the render.
missing_frames = [
frame
for frame in all_frames
if not os.path.exists(frame)
]
if missing_frames:
# TODO: Use user friendly error reporting.
raise RuntimeError("Failed to complete render extraction. "
"Missing output files: {}".format(
missing_frames))

View file

@ -1,150 +0,0 @@
import os
import hou
import pyblish.api
from ayon_core.pipeline import publish
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop, splitext
class ExtractROP(plugin.HoudiniExtractorPlugin):
"""Generic Extractor for any ROP node."""
label = "Extract ROP"
order = pyblish.api.ExtractorOrder
families = ["abc", "camera", "bgeo", "pointcache", "fbx",
"vdbcache", "ass", "redshiftproxy", "mantraifd"]
targets = ["local", "remote"]
def process(self, instance: pyblish.api.Instance):
if instance.data.get("farm"):
self.log.debug("Should be processed on farm, skipping.")
return
rop_node = hou.node(instance.data["instance_node"])
files = instance.data["frames"]
first_file = files[0] if isinstance(files, (list, tuple)) else files
_, ext = splitext(
first_file, allowed_multidot_extensions=[
".ass.gz", ".bgeo.sc", ".bgeo.gz",
".bgeo.lzma", ".bgeo.bz2"]
)
ext = ext.lstrip(".")
self.log.debug(f"Rendering {rop_node.path()} to {first_file}..")
render_rop(rop_node)
self.validate_expected_frames(instance)
# In some cases representation name is not the the extension
# TODO: Preferably we remove this very specific naming
product_type = instance.data["productType"]
name = {
"bgeo": "bgeo",
"rs": "rs",
"ass": "ass"
}.get(product_type, ext)
representation = {
"name": name,
"ext": ext,
"files": instance.data["frames"],
"stagingDir": instance.data["stagingDir"],
"frameStart": instance.data["frameStartHandle"],
"frameEnd": instance.data["frameEndHandle"],
}
self.update_representation_data(instance, representation)
instance.data.setdefault("representations", []).append(representation)
def validate_expected_frames(self, instance: pyblish.api.Instance):
"""
Validate all expected files in `instance.data["frames"]` exist in
the staging directory.
"""
filenames = instance.data["frames"]
staging_dir = instance.data["stagingDir"]
if isinstance(filenames, str):
# Single frame
filenames = [filenames]
missing_filenames = [
filename for filename in filenames
if not os.path.isfile(os.path.join(staging_dir, filename))
]
if missing_filenames:
raise RuntimeError(f"Missing frames: {missing_filenames}")
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
"""Allow subclass to override the representation data in-place"""
pass
class ExtractOpenGL(ExtractROP,
publish.ColormanagedPyblishPluginMixin):
order = pyblish.api.ExtractorOrder - 0.01
label = "Extract OpenGL"
families = ["review"]
def process(self, instance):
# This plugin is triggered when marking render as reviewable.
# Therefore, this plugin will run over wrong instances.
# TODO: Don't run this plugin on wrong instances.
# This plugin should run only on review product type
# with instance node of opengl type.
instance_node = instance.data.get("instance_node")
if not instance_node:
self.log.debug("Skipping instance without instance node.")
return
rop_node = hou.node(instance_node)
if rop_node.type().name() != "opengl":
self.log.debug("Skipping OpenGl extraction. Rop node {} "
"is not an OpenGl node.".format(rop_node.path()))
return
super(ExtractOpenGL, self).process(instance)
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
tags = ["review"]
if not instance.data.get("keepImages"):
tags.append("delete")
representation.update({
# TODO: Avoid this override?
"name": instance.data["imageFormat"],
"ext": instance.data["imageFormat"],
"tags": tags,
"preview": True,
"camera_name": instance.data.get("review_camera")
})
class ExtractComposite(ExtractROP,
publish.ColormanagedPyblishPluginMixin):
label = "Extract Composite (Image Sequence)"
families = ["imagesequence"]
def update_representation_data(self,
instance: pyblish.api.Instance,
representation: dict):
if representation["ext"].lower() != "exr":
return
# Inject colorspace with 'scene_linear' as that's the
# default Houdini working colorspace and all extracted
# OpenEXR images should be in that colorspace.
# https://www.sidefx.com/docs/houdini/render/linear.html#image-formats
self.set_representation_colorspace(
representation, instance.context,
colorspace="scene_linear"
)

View file

@ -1,104 +0,0 @@
import os
from typing import List, AnyStr
import pyblish.api
from ayon_core.pipeline.publish.lib import get_instance_expected_output_path
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import render_rop
from ayon_houdini.api.usd import remap_paths
import hou
class ExtractUSD(plugin.HoudiniExtractorPlugin):
order = pyblish.api.ExtractorOrder
label = "Extract USD"
families = ["usdrop"]
def process(self, instance):
ropnode = hou.node(instance.data.get("instance_node"))
# Get the filename from the filename parameter
output = ropnode.evalParm("lopoutput")
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir))
mapping = self.get_source_to_publish_paths(instance.context)
# Allow instance-specific path remapping overrides, e.g. changing
# paths on used resources/textures for looks
instance_mapping = instance.data.get("assetRemap", {})
if instance_mapping:
self.log.debug("Instance-specific asset path remapping:\n"
f"{instance_mapping}")
mapping.update(instance_mapping)
with remap_paths(ropnode, mapping):
render_rop(ropnode)
assert os.path.exists(output), "Output does not exist: %s" % output
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'usd',
'ext': 'usd',
'files': file_name,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)
def get_source_to_publish_paths(self, context):
"""Define a mapping of all current instances in context from source
file to publish file so this can be used on the USD save to remap
asset layer paths on publish via AyonRemapPaths output processor"""
mapping = {}
for instance in context:
if not instance.data.get("active", True):
continue
if not instance.data.get("publish", True):
continue
for repre in instance.data.get("representations", []):
name = repre.get("name")
ext = repre.get("ext")
# TODO: The remapping might need to get more involved if the
# asset paths that are set use e.g. $F
# TODO: If the representation has multiple files we might need
# to define the path remapping per file of the sequence
path = get_instance_expected_output_path(
instance, representation_name=name, ext=ext
)
for source_path in get_source_paths(instance, repre):
source_path = os.path.normpath(source_path)
mapping[source_path] = path
return mapping
def get_source_paths(
instance: pyblish.api.Instance,
repre: dict
) -> List[AnyStr]:
"""Return the full source filepaths for an instance's representations"""
staging = repre.get("stagingDir", instance.data.get("stagingDir"))
files = repre.get("files", [])
if isinstance(files, list):
return [os.path.join(staging, fname) for fname in files]
elif isinstance(files, str):
# Single file
return [os.path.join(staging, files)]
raise TypeError(f"Unsupported type for representation files: {files} "
"(supports list or str)")

View file

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Invalid VDB</title>
<description>
## Invalid VDB output
All primitives of the output geometry must be VDBs, no other primitive
types are allowed. That means that regardless of the amount of VDBs in the
geometry it will have an equal amount of VDBs, points, primitives and
vertices since each VDB primitive is one point, one vertex and one VDB.
This validation only checks the geometry on the first frame of the export
frame range.
</description>
<detail>
### Detailed Info
ROP node `{rop_path}` is set to export SOP path `{sop_path}`.
{message}
</detail>
</error>
</root>

View file

@ -1,54 +0,0 @@
import pyblish.api
from ayon_core.lib import version_up
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.publish import (
get_errored_plugins_from_context,
KnownPublishError
)
from ayon_houdini.api import plugin
class IncrementCurrentFile(plugin.HoudiniContextPlugin):
"""Increment the current file.
Saves the current scene with an increased version number.
"""
label = "Increment current file"
order = pyblish.api.IntegratorOrder + 9.0
families = ["workfile",
"usdrender",
"mantra_rop",
"karma_rop",
"redshift_rop",
"arnold_rop",
"vray_rop",
"render.local.hou",
"publish.hou"]
optional = True
def process(self, context):
errored_plugins = get_errored_plugins_from_context(context)
if any(
plugin.__name__ == "HoudiniSubmitPublishDeadline"
for plugin in errored_plugins
):
raise KnownPublishError(
"Skipping incrementing current file because "
"submission to deadline failed."
)
# Filename must not have changed since collecting
host = registered_host()
current_file = host.current_file()
if context.data["currentFile"] != current_file:
raise KnownPublishError(
"Collected filename mismatches from current scene name."
)
new_filepath = version_up(current_file)
host.save_workfile(new_filepath)

View file

@ -1,27 +0,0 @@
import pyblish.api
from ayon_core.pipeline import registered_host
from ayon_houdini.api import plugin
class SaveCurrentScene(plugin.HoudiniContextPlugin):
"""Save current scene"""
label = "Save current file"
order = pyblish.api.ExtractorOrder - 0.49
def process(self, context):
# Filename must not have changed since collecting
host = registered_host()
current_file = host.get_current_workfile()
assert context.data['currentFile'] == current_file, (
"Collected filename from current scene name."
)
if host.workfile_has_unsaved_changes():
self.log.info("Saving current file: {}".format(current_file))
host.save_workfile(current_file)
else:
self.log.debug("No unsaved changes, skipping file save..")

View file

@ -1,150 +0,0 @@
# -*- coding: utf-8 -*-
from collections import defaultdict
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateAbcPrimitiveToDetail(plugin.HoudiniInstancePlugin):
"""Validate Alembic ROP Primitive to Detail attribute is consistent.
The Alembic ROP crashes Houdini whenever an attribute in the "Primitive to
Detail" parameter exists on only a part of the primitives that belong to
the same hierarchy path. Whenever it encounters inconsistent values,
specifically where some are empty as opposed to others then Houdini
crashes. (Tested in Houdini 17.5.229)
"""
order = pyblish.api.ValidatorOrder + 0.1
families = ["abc"]
label = "Validate Primitive to Detail (Abc)"
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
("Primitives found with inconsistent primitive "
"to detail attributes. See log."),
title=self.label
)
@classmethod
def get_invalid(cls, instance):
import hou # noqa
output_node = instance.data.get("output_node")
rop_node = hou.node(instance.data["instance_node"])
if output_node is None:
cls.log.error(
"SOP Output node in '%s' does not exist. "
"Ensure a valid SOP output path is set." % rop_node.path()
)
return [rop_node.path()]
pattern = rop_node.parm("prim_to_detail_pattern").eval().strip()
if not pattern:
cls.log.debug(
"Alembic ROP has no 'Primitive to Detail' pattern. "
"Validation is ignored.."
)
return
build_from_path = rop_node.parm("build_from_path").eval()
if not build_from_path:
cls.log.debug(
"Alembic ROP has 'Build from Path' disabled. "
"Validation is ignored.."
)
return
path_attr = rop_node.parm("path_attrib").eval()
if not path_attr:
cls.log.error(
"The Alembic ROP node has no Path Attribute"
"value set, but 'Build Hierarchy from Attribute'"
"is enabled."
)
return [rop_node.path()]
# Let's assume each attribute is explicitly named for now and has no
# wildcards for Primitive to Detail. This simplifies the check.
cls.log.debug("Checking Primitive to Detail pattern: %s" % pattern)
cls.log.debug("Checking with path attribute: %s" % path_attr)
if not hasattr(output_node, "geometry"):
# In the case someone has explicitly set an Object
# node instead of a SOP node in Geometry context
# then for now we ignore - this allows us to also
# export object transforms.
cls.log.warning("No geometry output node found, skipping check..")
return
# Check if the primitive attribute exists
frame = instance.data.get("frameStart", 0)
geo = output_node.geometryAtFrame(frame)
# If there are no primitives on the start frame then it might be
# something that is emitted over time. As such we can't actually
# validate whether the attributes exist, because they won't exist
# yet. In that case, just warn the user and allow it.
if len(geo.iterPrims()) == 0:
cls.log.warning(
"No primitives found on current frame. Validation"
" for Primitive to Detail will be skipped."
)
return
attrib = geo.findPrimAttrib(path_attr)
if not attrib:
cls.log.info(
"Geometry Primitives are missing "
"path attribute: `%s`" % path_attr
)
return [output_node.path()]
# Ensure at least a single string value is present
if not attrib.strings():
cls.log.info(
"Primitive path attribute has no "
"string values: %s" % path_attr
)
return [output_node.path()]
paths = None
for attr in pattern.split(" "):
if not attr.strip():
# Ignore empty values
continue
# Check if the primitive attribute exists
attrib = geo.findPrimAttrib(attr)
if not attrib:
# It is allowed to not have the attribute at all
continue
# The issue can only happen if at least one string attribute is
# present. So we ignore cases with no values whatsoever.
if not attrib.strings():
continue
check = defaultdict(set)
values = geo.primStringAttribValues(attr)
if paths is None:
paths = geo.primStringAttribValues(path_attr)
for path, value in zip(paths, values):
check[path].add(value)
for path, values in check.items():
# Whenever a single path has multiple values for the
# Primitive to Detail attribute then we consider it
# inconsistent and invalidate the ROP node's content.
if len(values) > 1:
cls.log.warning(
"Path has multiple values: %s (path: %s)"
% (list(values), path)
)
return [output_node.path()]

View file

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_houdini.api import plugin
class ValidateAlembicROPFaceSets(plugin.HoudiniInstancePlugin):
"""Validate Face Sets are disabled for extraction to pointcache.
When groups are saved as Face Sets with the Alembic these show up
as shadingEngine connections in Maya - however, with animated groups
these connections in Maya won't work as expected, it won't update per
frame. Additionally, it can break shader assignments in some cases
where it requires to first break this connection to allow a shader to
be assigned.
It is allowed to include Face Sets, so only an issue is logged to
identify that it could introduce issues down the pipeline.
"""
order = pyblish.api.ValidatorOrder + 0.1
families = ["abc"]
label = "Validate Alembic ROP Face Sets"
def process(self, instance):
rop = hou.node(instance.data["instance_node"])
facesets = rop.parm("facesets").eval()
# 0 = No Face Sets
# 1 = Save Non-Empty Groups as Face Sets
# 2 = Save All Groups As Face Sets
if facesets != 0:
self.log.warning(
"Alembic ROP saves 'Face Sets' for Geometry. "
"Are you sure you want this?"
)

View file

@ -1,66 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateAlembicInputNode(plugin.HoudiniInstancePlugin):
"""Validate that the node connected to the output is correct.
The connected node cannot be of the following types for Alembic:
- VDB
- Volume
"""
order = pyblish.api.ValidatorOrder + 0.1
families = ["abc"]
label = "Validate Input Node (Abc)"
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
("Primitive types found that are not supported "
"for Alembic output."),
title=self.label
)
@classmethod
def get_invalid(cls, instance):
invalid_prim_types = ["VDB", "Volume"]
output_node = instance.data.get("output_node")
if output_node is None:
node = hou.node(instance.data["instance_node"])
cls.log.error(
"SOP Output node in '%s' does not exist. "
"Ensure a valid SOP output path is set." % node.path()
)
return [node.path()]
if not hasattr(output_node, "geometry"):
# In the case someone has explicitly set an Object
# node instead of a SOP node in Geometry context
# then for now we ignore - this allows us to also
# export object transforms.
cls.log.warning("No geometry output node found, skipping check..")
return
frame = instance.data.get("frameStart", 0)
geo = output_node.geometryAtFrame(frame)
invalid = False
for prim_type in invalid_prim_types:
if geo.countPrimType(prim_type) > 0:
cls.log.error(
"Found a primitive which is of type '%s' !" % prim_type
)
invalid = True
if invalid:
return [instance]

View file

@ -1,53 +0,0 @@
import hou
import pyblish.api
from ayon_core.pipeline.publish import PublishValidationError
from ayon_houdini.api import lib, plugin
class ValidateAnimationSettings(plugin.HoudiniInstancePlugin):
"""Validate if the unexpanded string contains the frame ('$F') token
This validator will only check the output parameter of the node if
the Valid Frame Range is not set to 'Render Current Frame'
Rules:
If you render out a frame range it is mandatory to have the
frame token - '$F4' or similar - to ensure that each frame gets
written. If this is not the case you will override the same file
every time a frame is written out.
Examples:
Good: 'my_vbd_cache.$F4.vdb'
Bad: 'my_vbd_cache.vdb'
"""
order = pyblish.api.ValidatorOrder
label = "Validate Frame Settings"
families = ["vdbcache"]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Output settings do no match for '%s'" % instance
)
@classmethod
def get_invalid(cls, instance):
node = hou.node(instance.data["instance_node"])
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
if frame_range == 0:
return []
output_parm = lib.get_output_parameter(node)
unexpanded_str = output_parm.unexpandedString()
if "$F" not in unexpanded_str:
cls.log.error("No frame token found in '%s'" % node.path())
return [instance]

View file

@ -1,47 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateBypassed(plugin.HoudiniInstancePlugin):
"""Validate all primitives build hierarchy from attribute when enabled.
The name of the attribute must exist on the prims and have the same name
as Build Hierarchy from Attribute's `Path Attribute` value on the Alembic
ROP node whenever Build Hierarchy from Attribute is enabled.
"""
order = pyblish.api.ValidatorOrder - 0.1
families = ["*"]
label = "Validate ROP Bypass"
def process(self, instance):
if not instance.data.get("instance_node"):
# Ignore instances without an instance node
# e.g. in memory bootstrap instances
self.log.debug(
"Skipping instance without instance node: {}".format(instance)
)
return
invalid = self.get_invalid(instance)
if invalid:
rop = invalid[0]
raise PublishValidationError(
("ROP node {} is set to bypass, publishing cannot "
"continue.".format(rop.path())),
title=self.label
)
@classmethod
def get_invalid(cls, instance):
rop = hou.node(instance.data["instance_node"])
if hasattr(rop, "isBypassed") and rop.isBypassed():
return [rop]

View file

@ -1,61 +0,0 @@
# -*- coding: utf-8 -*-
"""Validator plugin for Houdini Camera ROP settings."""
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateCameraROP(plugin.HoudiniInstancePlugin):
"""Validate Camera ROP settings."""
order = pyblish.api.ValidatorOrder
families = ["camera"]
label = "Camera ROP"
def process(self, instance):
import hou
node = hou.node(instance.data.get("instance_node"))
if node.parm("use_sop_path").eval():
raise PublishValidationError(
("Alembic ROP for Camera export should not be "
"set to 'Use Sop Path'. Please disable."),
title=self.label
)
# Get the root and objects parameter of the Alembic ROP node
root = node.parm("root").eval()
objects = node.parm("objects").eval()
errors = []
if not root:
errors.append("Root parameter must be set on Alembic ROP")
if not root.startswith("/"):
errors.append("Root parameter must start with slash /")
if not objects:
errors.append("Objects parameter must be set on Alembic ROP")
if len(objects.split(" ")) != 1:
errors.append("Must have only a single object.")
if errors:
for error in errors:
self.log.error(error)
raise PublishValidationError(
"Some checks failed, see validator log.",
title=self.label)
# Check if the object exists and is a camera
path = root + "/" + objects
camera = hou.node(path)
if not camera:
raise PublishValidationError(
"Camera path does not exist: %s" % path,
title=self.label)
if camera.type().name() != "cam":
raise PublishValidationError(
("Object set in Alembic ROP is not a camera: "
"{} (type: {})").format(camera, camera.type().name()),
title=self.label)

View file

@ -1,69 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import plugin
class ValidateCopOutputNode(plugin.HoudiniInstancePlugin):
"""Validate the instance COP Output Node.
This will ensure:
- The COP Path is set.
- The COP Path refers to an existing object.
- The COP Path node is a COP node.
"""
order = pyblish.api.ValidatorOrder
families = ["imagesequence"]
label = "Validate COP Output Node"
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Output node '{}' is incorrect. "
"See plug-in log for details.".format(invalid),
title=self.label,
description=(
"### Invalid COP output node\n\n"
"The output node path for the instance must be set to a "
"valid COP node path.\n\nSee the log for more details."
)
)
@classmethod
def get_invalid(cls, instance):
output_node = instance.data.get("output_node")
if not output_node:
node = hou.node(instance.data.get("instance_node"))
cls.log.error(
"COP Output node in '%s' does not exist. "
"Ensure a valid COP output path is set." % node.path()
)
return [node.path()]
# Output node must be a Sop node.
if not isinstance(output_node, hou.CopNode):
cls.log.error(
"Output node %s is not a COP node. "
"COP Path must point to a COP node, "
"instead found category type: %s",
output_node.path(), output_node.type().category().name()
)
return [output_node.path()]
# For the sake of completeness also assert the category type
# is Cop2 to avoid potential edge case scenarios even though
# the isinstance check above should be stricter than this category
if output_node.type().category().name() != "Cop2":
cls.log.error(
"Output node %s is not of category Cop2.", output_node.path()
)
return [output_node.path()]

View file

@ -1,59 +0,0 @@
# -*- coding: utf-8 -*-
"""Validator for checking that export is a single frame."""
from ayon_core.pipeline import (
PublishValidationError,
OptionalPyblishPluginMixin
)
from ayon_core.pipeline.publish import ValidateContentsOrder
from ayon_houdini.api.action import SelectInvalidAction
from ayon_houdini.api import plugin
class ValidateSingleFrame(plugin.HoudiniInstancePlugin,
OptionalPyblishPluginMixin):
"""Validate Export is a Single Frame.
It checks if rop node is exporting one frame.
This is mainly for Model product type.
"""
families = ["model"]
label = "Validate Single Frame"
order = ValidateContentsOrder + 0.1
actions = [SelectInvalidAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
nodes = [n.path() for n in invalid]
raise PublishValidationError(
"See log for details. "
"Invalid nodes: {0}".format(nodes)
)
@classmethod
def get_invalid(cls, instance):
invalid = []
frame_start = instance.data.get("frameStartHandle")
frame_end = instance.data.get("frameEndHandle")
# This happens if instance node has no 'trange' parameter.
if frame_start is None or frame_end is None:
cls.log.debug(
"No frame data, skipping check.."
)
return
if frame_start != frame_end:
invalid.append(instance.data["instance_node"])
cls.log.error(
"Invalid frame range on '%s'."
"You should use the same frame number for 'f1' "
"and 'f2' parameters.",
instance.data["instance_node"].path()
)
return invalid

View file

@ -1,140 +0,0 @@
# -*- coding: utf-8 -*-
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api.action import (
SelectInvalidAction,
SelectROPAction,
)
from ayon_houdini.api import plugin
from ayon_houdini.api.lib import get_obj_node_output
import hou
class ValidateFBXOutputNode(plugin.HoudiniInstancePlugin):
"""Validate the instance Output Node.
This will ensure:
- The Output Node Path is set.
- The Output Node Path refers to an existing object.
- The Output Node is a Sop or Obj node.
- The Output Node has geometry data.
- The Output Node doesn't include invalid primitive types.
"""
order = pyblish.api.ValidatorOrder
families = ["fbx"]
label = "Validate FBX Output Node"
actions = [SelectROPAction, SelectInvalidAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
nodes = [n.path() for n in invalid]
raise PublishValidationError(
"See log for details. "
"Invalid nodes: {0}".format(nodes),
title="Invalid output node(s)"
)
@classmethod
def get_invalid(cls, instance):
output_node = instance.data.get("output_node")
# Check if The Output Node Path is set and
# refers to an existing object.
if output_node is None:
rop_node = hou.node(instance.data["instance_node"])
cls.log.error(
"Output node in '%s' does not exist. "
"Ensure a valid output path is set.", rop_node.path()
)
return [rop_node]
# Check if the Output Node is a Sop or an Obj node
# also, list all sop output nodes inside as well as
# invalid empty nodes.
all_out_sops = []
invalid = []
# if output_node is an ObjSubnet or an ObjNetwork
if output_node.childTypeCategory() == hou.objNodeTypeCategory():
for node in output_node.allSubChildren():
if node.type().name() == "geo":
out = get_obj_node_output(node)
if out:
all_out_sops.append(out)
else:
invalid.append(node) # empty_objs
cls.log.error(
"Geo Obj Node '%s' is empty!",
node.path()
)
if not all_out_sops:
invalid.append(output_node) # empty_objs
cls.log.error(
"Output Node '%s' is empty!",
node.path()
)
# elif output_node is an ObjNode
elif output_node.type().name() == "geo":
out = get_obj_node_output(output_node)
if out:
all_out_sops.append(out)
else:
invalid.append(node) # empty_objs
cls.log.error(
"Output Node '%s' is empty!",
node.path()
)
# elif output_node is a SopNode
elif output_node.type().category().name() == "Sop":
all_out_sops.append(output_node)
# Then it's a wrong node type
else:
cls.log.error(
"Output node %s is not a SOP or OBJ Geo or OBJ SubNet node. "
"Instead found category type: %s %s",
output_node.path(), output_node.type().category().name(),
output_node.type().name()
)
return [output_node]
# Check if all output sop nodes have geometry
# and don't contain invalid prims
invalid_prim_types = ["VDB", "Volume"]
for sop_node in all_out_sops:
# Empty Geometry test
if not hasattr(sop_node, "geometry"):
invalid.append(sop_node) # empty_geometry
cls.log.error(
"Sop node '%s' doesn't include any prims.",
sop_node.path()
)
continue
frame = instance.data.get("frameStart", 0)
geo = sop_node.geometryAtFrame(frame)
if len(geo.iterPrims()) == 0:
invalid.append(sop_node) # empty_geometry
cls.log.error(
"Sop node '%s' doesn't include any prims.",
sop_node.path()
)
continue
# Invalid Prims test
for prim_type in invalid_prim_types:
if geo.countPrimType(prim_type) > 0:
invalid.append(sop_node) # invalid_prims
cls.log.error(
"Sop node '%s' includes invalid prims of type '%s'.",
sop_node.path(), prim_type
)
if invalid:
return invalid

View file

@ -1,64 +0,0 @@
# -*- coding: utf-8 -*-
import os
import hou
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_houdini.api import lib, plugin
class ValidateFileExtension(plugin.HoudiniInstancePlugin):
"""Validate the output file extension fits the output family.
File extensions:
- Pointcache must be .abc
- Camera must be .abc
- VDB must be .vdb
"""
order = pyblish.api.ValidatorOrder
families = ["camera", "vdbcache"]
label = "Output File Extension"
family_extensions = {
"camera": ".abc",
"vdbcache": ".vdb",
}
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"ROP node has incorrect file extension: {}".format(invalid),
title=self.label
)
@classmethod
def get_invalid(cls, instance):
# Get ROP node from instance
node = hou.node(instance.data["instance_node"])
# Create lookup for current family in instance
families = []
product_type = instance.data.get("productType")
if product_type:
families.append(product_type)
families = set(families)
# Perform extension check
output = lib.get_output_parameter(node).eval()
_, output_extension = os.path.splitext(output)
for family in families:
extension = cls.family_extensions.get(family, None)
if extension is None:
raise PublishValidationError(
"Unsupported family: {}".format(family),
title=cls.label)
if output_extension != extension:
return [node.path()]

View file

@ -1,108 +0,0 @@
# -*- coding: utf-8 -*-
import hou
import pyblish.api
from ayon_core.pipeline import PublishValidationError
from ayon_core.pipeline.publish import RepairAction
from ayon_houdini.api.action import SelectInvalidAction
from ayon_houdini.api import plugin
class DisableUseFolderHandlesAction(RepairAction):
label = "Disable use folder handles"
icon = "mdi.toggle-switch-off"
class ValidateFrameRange(plugin.HoudiniInstancePlugin):
"""Validate Frame Range.
Due to the usage of start and end handles,
then Frame Range must be >= (start handle + end handle)
which results that frameEnd be smaller than frameStart
"""
order = pyblish.api.ValidatorOrder - 0.1
label = "Validate Frame Range"
actions = [DisableUseFolderHandlesAction, SelectInvalidAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
title="Invalid Frame Range",
message=(
"Invalid frame range because the instance "
"start frame ({0[frameStart]}) is higher than "
"the end frame ({0[frameEnd]})"
.format(instance.data)
),
description=(
"## Invalid Frame Range\n"
"The frame range for the instance is invalid because "
"the start frame is higher than the end frame.\n\nThis "
"is likely due to folder handles being applied to your "
"instance or the ROP node's start frame "
"is set higher than the end frame.\n\nIf your ROP frame "
"range is correct and you do not want to apply folder "
"handles make sure to disable Use folder handles on the "
"publish instance."
)
)
@classmethod
def get_invalid(cls, instance):
if not instance.data.get("instance_node"):
return
rop_node = hou.node(instance.data["instance_node"])
frame_start = instance.data.get("frameStart")
frame_end = instance.data.get("frameEnd")
if frame_start is None or frame_end is None:
cls.log.debug(
"Skipping frame range validation for "
"instance without frame data: {}".format(rop_node.path())
)
return
if frame_start > frame_end:
cls.log.info(
"The ROP node render range is set to "
"{0[frameStartHandle]} - {0[frameEndHandle]} "
"The folder handles applied to the instance are start handle "
"{0[handleStart]} and end handle {0[handleEnd]}"
.format(instance.data)
)
return [rop_node]
@classmethod
def repair(cls, instance):
if not cls.get_invalid(instance):
# Already fixed
return
# Disable use folder handles
context = instance.context
create_context = context.data["create_context"]
instance_id = instance.data.get("instance_id")
if not instance_id:
cls.log.debug("'{}' must have instance id"
.format(instance))
return
created_instance = create_context.get_instance_by_id(instance_id)
if not instance_id:
cls.log.debug("Unable to find instance '{}' by id"
.format(instance))
return
created_instance.publish_attributes["CollectAssetHandles"]["use_handles"] = False # noqa
create_context.save_changes()
cls.log.debug("use folder handles is turned off for '{}'"
.format(instance))

View file

@ -1,52 +0,0 @@
import hou
import pyblish.api
from ayon_houdini.api import lib, plugin
class ValidateFrameToken(plugin.HoudiniInstancePlugin):
"""Validate if the unexpanded string contains the frame ('$F') token.
This validator will *only* check the output parameter of the node if
the Valid Frame Range is not set to 'Render Current Frame'
Rules:
If you render out a frame range it is mandatory to have the
frame token - '$F4' or similar - to ensure that each frame gets
written. If this is not the case you will override the same file
every time a frame is written out.
Examples:
Good: 'my_vbd_cache.$F4.vdb'
Bad: 'my_vbd_cache.vdb'
"""
order = pyblish.api.ValidatorOrder
label = "Validate Frame Token"
families = ["vdbcache"]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
"Output settings do no match for '%s'" % instance
)
@classmethod
def get_invalid(cls, instance):
node = hou.node(instance.data["instance_node"])
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
if frame_range == 0:
return []
output_parm = lib.get_output_parameter(node)
unexpanded_str = output_parm.unexpandedString()
if "$F" not in unexpanded_str:
cls.log.error("No frame token found in '%s'" % node.path())
return [instance]

Some files were not shown because too many files have changed in this diff Show more