mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
213b3c1198
78 changed files with 1920 additions and 1029 deletions
|
|
@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context):
|
|||
return get_errored_plugins_from_context(context)
|
||||
|
||||
|
||||
# 'RepairAction' and 'RepairContextAction' were moved to
|
||||
# 'openpype.pipeline.publish' please change you imports.
|
||||
# There is no "reasonable" way hot mark these classes as deprecated to show
|
||||
# warning of wrong import.
|
||||
# Deprecated since 3.14.* will be removed in 3.16.*
|
||||
class RepairAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
is available on the plugin.
|
||||
|
||||
Deprecated:
|
||||
'RepairAction' and 'RepairContextAction' were moved to
|
||||
'openpype.pipeline.publish' please change you imports.
|
||||
There is no "reasonable" way hot mark these classes as deprecated
|
||||
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||
removed in 3.16.*
|
||||
|
||||
"""
|
||||
label = "Repair"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
|
|
@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action):
|
|||
plugin.repair(instance)
|
||||
|
||||
|
||||
# Deprecated since 3.14.* will be removed in 3.16.*
|
||||
class RepairContextAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
is available on the plugin.
|
||||
|
||||
Deprecated:
|
||||
'RepairAction' and 'RepairContextAction' were moved to
|
||||
'openpype.pipeline.publish' please change you imports.
|
||||
There is no "reasonable" way hot mark these classes as deprecated
|
||||
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||
removed in 3.16.*
|
||||
|
||||
"""
|
||||
label = "Repair"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@ class IWorkfileHost:
|
|||
Remove when all usages are replaced.
|
||||
"""
|
||||
|
||||
self.save_workfile()
|
||||
self.save_workfile(dst_path)
|
||||
|
||||
def open_file(self, filepath):
|
||||
"""Deprecated variant of 'open_workfile'.
|
||||
|
|
|
|||
|
|
@ -1,24 +1,13 @@
|
|||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
|
||||
HoudiniHost,
|
||||
ls,
|
||||
containerise,
|
||||
containerise
|
||||
)
|
||||
|
||||
from .plugin import (
|
||||
Creator,
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
lsattr,
|
||||
lsattrs,
|
||||
|
|
@ -29,22 +18,13 @@ from .lib import (
|
|||
|
||||
|
||||
__all__ = [
|
||||
"install",
|
||||
"uninstall",
|
||||
"HoudiniHost",
|
||||
|
||||
"ls",
|
||||
"containerise",
|
||||
|
||||
"Creator",
|
||||
|
||||
# Workfiles API
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
# Utility functions
|
||||
"lsattr",
|
||||
"lsattrs",
|
||||
|
|
@ -52,7 +32,3 @@ __all__ = [
|
|||
|
||||
"maintained_selection"
|
||||
]
|
||||
|
||||
# Backwards API compatibility
|
||||
open = open_file
|
||||
save = save_file
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import os
|
||||
import uuid
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
|
|
@ -8,10 +12,13 @@ from openpype.client import get_asset_by_name
|
|||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._parent = None
|
||||
log = logging.getLogger(__name__)
|
||||
JSON_PREFIX = "JSON:::"
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
|
|
@ -29,23 +36,18 @@ def set_id(node, unique_id, overwrite=False):
|
|||
|
||||
|
||||
def get_id(node):
|
||||
"""
|
||||
Get the `cbId` attribute of the given node
|
||||
"""Get the `cbId` attribute of the given node.
|
||||
|
||||
Args:
|
||||
node (hou.Node): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str
|
||||
str: cbId attribute of the node.
|
||||
|
||||
"""
|
||||
|
||||
if node is None:
|
||||
return
|
||||
|
||||
id = node.parm("id")
|
||||
if node is None:
|
||||
return
|
||||
return id
|
||||
if node is not None:
|
||||
return node.parm("id")
|
||||
|
||||
|
||||
def generate_ids(nodes, asset_id=None):
|
||||
|
|
@ -281,7 +283,7 @@ def render_rop(ropnode):
|
|||
raise RuntimeError("Render failed: {0}".format(exc))
|
||||
|
||||
|
||||
def imprint(node, data):
|
||||
def imprint(node, data, update=False):
|
||||
"""Store attributes with value on a node
|
||||
|
||||
Depending on the type of attribute it creates the correct parameter
|
||||
|
|
@ -290,49 +292,76 @@ def imprint(node, data):
|
|||
|
||||
http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html
|
||||
|
||||
Because of some update glitch where you cannot overwrite existing
|
||||
ParmTemplates on node using:
|
||||
`setParmTemplates()` and `parmTuplesInFolder()`
|
||||
update is done in another pass.
|
||||
|
||||
Args:
|
||||
node(hou.Node): node object from Houdini
|
||||
data(dict): collection of attributes and their value
|
||||
update (bool, optional): flag if imprint should update
|
||||
already existing data or leave them untouched and only
|
||||
add new.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
if not data:
|
||||
return
|
||||
if not node:
|
||||
self.log.error("Node is not set, calling imprint on invalid data.")
|
||||
return
|
||||
|
||||
parm_group = node.parmTemplateGroup()
|
||||
current_parms = {p.name(): p for p in node.spareParms()}
|
||||
update_parms = []
|
||||
templates = []
|
||||
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Extra")
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, float):
|
||||
parm = hou.FloatParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
elif isinstance(value, bool):
|
||||
parm = hou.ToggleParmTemplate(name=key,
|
||||
label=key,
|
||||
default_value=value)
|
||||
elif isinstance(value, int):
|
||||
parm = hou.IntParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
elif isinstance(value, six.string_types):
|
||||
parm = hou.StringParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
else:
|
||||
raise TypeError("Unsupported type: %r" % type(value))
|
||||
parm = get_template_from_value(key, value)
|
||||
|
||||
parm_folder.addParmTemplate(parm)
|
||||
if key in current_parms:
|
||||
if node.evalParm(key) == data[key]:
|
||||
continue
|
||||
if not update:
|
||||
log.debug(f"{key} already exists on {node}")
|
||||
else:
|
||||
log.debug(f"replacing {key}")
|
||||
update_parms.append(parm)
|
||||
continue
|
||||
|
||||
templates.append(parm)
|
||||
|
||||
parm_group = node.parmTemplateGroup()
|
||||
parm_folder = parm_group.findFolder("Extra")
|
||||
|
||||
# if folder doesn't exist yet, create one and append to it,
|
||||
# else append to existing one
|
||||
if not parm_folder:
|
||||
parm_folder = hou.FolderParmTemplate("folder", "Extra")
|
||||
parm_folder.setParmTemplates(templates)
|
||||
parm_group.append(parm_folder)
|
||||
else:
|
||||
for template in templates:
|
||||
parm_group.appendToFolder(parm_folder, template)
|
||||
# this is needed because the pointer to folder
|
||||
# is for some reason lost every call to `appendToFolder()`
|
||||
parm_folder = parm_group.findFolder("Extra")
|
||||
|
||||
parm_group.append(parm_folder)
|
||||
node.setParmTemplateGroup(parm_group)
|
||||
|
||||
# TODO: Updating is done here, by calling probably deprecated functions.
|
||||
# This needs to be addressed in the future.
|
||||
if not update_parms:
|
||||
return
|
||||
|
||||
for parm in update_parms:
|
||||
node.replaceSpareParmTuple(parm.name(), parm)
|
||||
|
||||
|
||||
def lsattr(attr, value=None, root="/"):
|
||||
"""Return nodes that have `attr`
|
||||
|
|
@ -397,8 +426,22 @@ def read(node):
|
|||
|
||||
"""
|
||||
# `spareParms` returns a tuple of hou.Parm objects
|
||||
return {parameter.name(): parameter.eval() for
|
||||
parameter in node.spareParms()}
|
||||
data = {}
|
||||
if not node:
|
||||
return data
|
||||
for parameter in node.spareParms():
|
||||
value = parameter.eval()
|
||||
# test if value is json encoded dict
|
||||
if isinstance(value, six.string_types) and \
|
||||
value.startswith(JSON_PREFIX):
|
||||
try:
|
||||
value = json.loads(value[len(JSON_PREFIX):])
|
||||
except json.JSONDecodeError:
|
||||
# not a json
|
||||
pass
|
||||
data[parameter.name()] = value
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -460,3 +503,89 @@ def reset_framerange():
|
|||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Houdini's main window"""
|
||||
if self._parent is None:
|
||||
self._parent = hou.ui.mainQtWindow()
|
||||
return self._parent
|
||||
|
||||
|
||||
def get_template_from_value(key, value):
|
||||
if isinstance(value, float):
|
||||
parm = hou.FloatParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
elif isinstance(value, bool):
|
||||
parm = hou.ToggleParmTemplate(name=key,
|
||||
label=key,
|
||||
default_value=value)
|
||||
elif isinstance(value, int):
|
||||
parm = hou.IntParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
elif isinstance(value, six.string_types):
|
||||
parm = hou.StringParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(value,))
|
||||
elif isinstance(value, (dict, list, tuple)):
|
||||
parm = hou.StringParmTemplate(name=key,
|
||||
label=key,
|
||||
num_components=1,
|
||||
default_value=(
|
||||
JSON_PREFIX + json.dumps(value),))
|
||||
else:
|
||||
raise TypeError("Unsupported type: %r" % type(value))
|
||||
|
||||
return parm
|
||||
|
||||
|
||||
def get_frame_data(node):
|
||||
"""Get the frame data: start frame, end frame and steps.
|
||||
|
||||
Args:
|
||||
node(hou.Node)
|
||||
|
||||
Returns:
|
||||
dict: frame data for star, end and steps.
|
||||
|
||||
"""
|
||||
data = {}
|
||||
|
||||
if node.parm("trange") is None:
|
||||
|
||||
return data
|
||||
|
||||
if node.evalParm("trange") == 0:
|
||||
self.log.debug("trange is 0")
|
||||
return data
|
||||
|
||||
data["frameStart"] = node.evalParm("f1")
|
||||
data["frameEnd"] = node.evalParm("f2")
|
||||
data["steps"] = node.evalParm("f3")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def splitext(name, allowed_multidot_extensions):
|
||||
# type: (str, list) -> tuple
|
||||
"""Split file name to name and extension.
|
||||
|
||||
Args:
|
||||
name (str): File name to split.
|
||||
allowed_multidot_extensions (list of str): List of allowed multidot
|
||||
extensions.
|
||||
|
||||
Returns:
|
||||
tuple: Name and extension.
|
||||
"""
|
||||
|
||||
for ext in allowed_multidot_extensions:
|
||||
if name.endswith(ext):
|
||||
return name[:-len(ext)], ext
|
||||
|
||||
return os.path.splitext(name)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Houdini integration."""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
import hou
|
||||
import hou # noqa
|
||||
|
||||
from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -26,6 +30,7 @@ from .lib import get_asset_fps
|
|||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS"
|
||||
CONTEXT_CONTAINER = "/obj/OpenPypeContext"
|
||||
IS_HEADLESS = not hasattr(hou, "ui")
|
||||
|
||||
PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins")
|
||||
|
|
@ -35,71 +40,139 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._has_been_setup = False
|
||||
self._parent = None
|
||||
self._events = dict()
|
||||
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher):
|
||||
name = "houdini"
|
||||
|
||||
def __init__(self):
|
||||
super(HoudiniHost, self).__init__()
|
||||
self._op_events = {}
|
||||
self._has_been_setup = False
|
||||
|
||||
def install():
|
||||
_register_callbacks()
|
||||
def install(self):
|
||||
pyblish.api.register_host("houdini")
|
||||
pyblish.api.register_host("hython")
|
||||
pyblish.api.register_host("hpython")
|
||||
|
||||
pyblish.api.register_host("houdini")
|
||||
pyblish.api.register_host("hython")
|
||||
pyblish.api.register_host("hpython")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
self._register_callbacks()
|
||||
register_event_callback("before.save", before_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
register_event_callback("new", on_new)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
register_event_callback("before.save", before_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
register_event_callback("new", on_new)
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
self._has_been_setup = True
|
||||
# add houdini vendor packages
|
||||
hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor")
|
||||
|
||||
self._has_been_setup = True
|
||||
# add houdini vendor packages
|
||||
hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor")
|
||||
sys.path.append(hou_pythonpath)
|
||||
|
||||
sys.path.append(hou_pythonpath)
|
||||
# Set asset settings for the empty scene directly after launch of
|
||||
# Houdini so it initializes into the correct scene FPS,
|
||||
# Frame Range, etc.
|
||||
# TODO: make sure this doesn't trigger when
|
||||
# opening with last workfile.
|
||||
_set_context_settings()
|
||||
shelves.generate_shelves()
|
||||
|
||||
# Set asset settings for the empty scene directly after launch of Houdini
|
||||
# so it initializes into the correct scene FPS, Frame Range, etc.
|
||||
# todo: make sure this doesn't trigger when opening with last workfile
|
||||
_set_context_settings()
|
||||
shelves.generate_shelves()
|
||||
def has_unsaved_changes(self):
|
||||
return hou.hipFile.hasUnsavedChanges()
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".hip", ".hiplc", ".hipnc"]
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Houdini-specific functionality of avalon-core.
|
||||
def save_workfile(self, dst_path=None):
|
||||
# Force forwards slashes to avoid segfault
|
||||
if dst_path:
|
||||
dst_path = dst_path.replace("\\", "/")
|
||||
hou.hipFile.save(file_name=dst_path,
|
||||
save_to_recent_files=True)
|
||||
return dst_path
|
||||
|
||||
This function is called automatically on calling `api.uninstall()`.
|
||||
"""
|
||||
def open_workfile(self, filepath):
|
||||
# Force forwards slashes to avoid segfault
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
pyblish.api.deregister_host("hython")
|
||||
pyblish.api.deregister_host("hpython")
|
||||
pyblish.api.deregister_host("houdini")
|
||||
hou.hipFile.load(filepath,
|
||||
suppress_save_prompt=True,
|
||||
ignore_load_warnings=False)
|
||||
|
||||
return filepath
|
||||
|
||||
def _register_callbacks():
|
||||
for event in self._events.copy().values():
|
||||
if event is None:
|
||||
continue
|
||||
def get_current_workfile(self):
|
||||
current_filepath = hou.hipFile.path()
|
||||
if (os.path.basename(current_filepath) == "untitled.hip" and
|
||||
not os.path.exists(current_filepath)):
|
||||
# By default a new scene in houdini is saved in the current
|
||||
# working directory as "untitled.hip" so we need to capture
|
||||
# that and consider it 'not saved' when it's in that state.
|
||||
return None
|
||||
|
||||
try:
|
||||
hou.hipFile.removeEventCallback(event)
|
||||
except RuntimeError as e:
|
||||
log.info(e)
|
||||
return current_filepath
|
||||
|
||||
self._events[on_file_event_callback] = hou.hipFile.addEventCallback(
|
||||
on_file_event_callback
|
||||
)
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def _register_callbacks(self):
|
||||
for event in self._op_events.copy().values():
|
||||
if event is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
hou.hipFile.removeEventCallback(event)
|
||||
except RuntimeError as e:
|
||||
log.info(e)
|
||||
|
||||
self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback(
|
||||
on_file_event_callback
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_context_node():
|
||||
"""Helper for creating context holding node.
|
||||
|
||||
Returns:
|
||||
hou.Node: context node
|
||||
|
||||
"""
|
||||
obj_network = hou.node("/obj")
|
||||
op_ctx = obj_network.createNode(
|
||||
"null", node_name="OpenPypeContext")
|
||||
op_ctx.moveToGoodPosition()
|
||||
op_ctx.setBuiltExplicitly(False)
|
||||
op_ctx.setCreatorState("OpenPype")
|
||||
op_ctx.setComment("OpenPype node to hold context metadata")
|
||||
op_ctx.setColor(hou.Color((0.081, 0.798, 0.810)))
|
||||
op_ctx.hide(True)
|
||||
return op_ctx
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
|
||||
lib.imprint(op_ctx, data)
|
||||
|
||||
def get_context_data(self):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
return lib.read(op_ctx)
|
||||
|
||||
def save_file(self, dst_path=None):
|
||||
# Force forwards slashes to avoid segfault
|
||||
dst_path = dst_path.replace("\\", "/")
|
||||
|
||||
hou.hipFile.save(file_name=dst_path,
|
||||
save_to_recent_files=True)
|
||||
|
||||
|
||||
def on_file_event_callback(event):
|
||||
|
|
@ -113,22 +186,6 @@ def on_file_event_callback(event):
|
|||
emit_event("new")
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Houdini's main window"""
|
||||
if self._parent is None:
|
||||
self._parent = hou.ui.mainQtWindow()
|
||||
return self._parent
|
||||
|
||||
|
||||
def teardown():
|
||||
"""Remove integration"""
|
||||
if not self._has_been_setup:
|
||||
return
|
||||
|
||||
self._has_been_setup = False
|
||||
print("pyblish: Integration torn down successfully")
|
||||
|
||||
|
||||
def containerise(name,
|
||||
namespace,
|
||||
nodes,
|
||||
|
|
@ -251,7 +308,7 @@ def on_open():
|
|||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Get main window
|
||||
parent = get_main_window()
|
||||
parent = lib.get_main_window()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated content pop-up "
|
||||
"because Houdini window can't be found.")
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Houdini specific Avalon/Pyblish plugin definitions."""
|
||||
import sys
|
||||
from abc import (
|
||||
ABCMeta
|
||||
)
|
||||
import six
|
||||
|
||||
import hou
|
||||
from openpype.pipeline import (
|
||||
CreatorError,
|
||||
LegacyCreator
|
||||
LegacyCreator,
|
||||
Creator as NewCreator,
|
||||
CreatedInstance
|
||||
)
|
||||
from .lib import imprint
|
||||
from openpype.lib import BoolDef
|
||||
from .lib import imprint, read, lsattr
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
|
|
@ -30,12 +35,15 @@ class Creator(LegacyCreator):
|
|||
when hovering over a node. The information is visible under the name of
|
||||
the node.
|
||||
|
||||
Deprecated:
|
||||
This creator is deprecated and will be removed in future version.
|
||||
|
||||
"""
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Creator, self).__init__(*args, **kwargs)
|
||||
self.nodes = list()
|
||||
self.nodes = []
|
||||
|
||||
def process(self):
|
||||
"""This is the base functionality to create instances in Houdini
|
||||
|
|
@ -84,3 +92,187 @@ class Creator(LegacyCreator):
|
|||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
|
||||
class HoudiniCreatorBase(object):
|
||||
@staticmethod
|
||||
def cache_subsets(shared_data):
|
||||
"""Cache instances for Creators to shared data.
|
||||
|
||||
Create `houdini_cached_subsets` key when needed in shared data and
|
||||
fill it with all collected instances from the scene under its
|
||||
respective creator identifiers.
|
||||
|
||||
If legacy instances are detected in the scene, create
|
||||
`houdini_cached_legacy_subsets` there and fill it with
|
||||
all legacy subsets under family as a key.
|
||||
|
||||
Args:
|
||||
Dict[str, Any]: Shared data.
|
||||
|
||||
Return:
|
||||
Dict[str, Any]: Shared data dictionary.
|
||||
|
||||
"""
|
||||
if shared_data.get("houdini_cached_subsets") is None:
|
||||
shared_data["houdini_cached_subsets"] = {}
|
||||
if shared_data.get("houdini_cached_legacy_subsets") is None:
|
||||
shared_data["houdini_cached_legacy_subsets"] = {}
|
||||
cached_instances = lsattr("id", "pyblish.avalon.instance")
|
||||
for i in cached_instances:
|
||||
if not i.parm("creator_identifier"):
|
||||
# we have legacy instance
|
||||
family = i.parm("family").eval()
|
||||
if family not in shared_data[
|
||||
"houdini_cached_legacy_subsets"]:
|
||||
shared_data["houdini_cached_legacy_subsets"][
|
||||
family] = [i]
|
||||
else:
|
||||
shared_data[
|
||||
"houdini_cached_legacy_subsets"][family].append(i)
|
||||
continue
|
||||
|
||||
creator_id = i.parm("creator_identifier").eval()
|
||||
if creator_id not in shared_data["houdini_cached_subsets"]:
|
||||
shared_data["houdini_cached_subsets"][creator_id] = [i]
|
||||
else:
|
||||
shared_data[
|
||||
"houdini_cached_subsets"][creator_id].append(i) # noqa
|
||||
return shared_data
|
||||
|
||||
@staticmethod
|
||||
def create_instance_node(
|
||||
node_name, parent,
|
||||
node_type="geometry"):
|
||||
# type: (str, str, str) -> hou.Node
|
||||
"""Create node representing instance.
|
||||
|
||||
Arguments:
|
||||
node_name (str): Name of the new node.
|
||||
parent (str): Name of the parent node.
|
||||
node_type (str, optional): Type of the node.
|
||||
|
||||
Returns:
|
||||
hou.Node: Newly created instance node.
|
||||
|
||||
"""
|
||||
parent_node = hou.node(parent)
|
||||
instance_node = parent_node.createNode(
|
||||
node_type, node_name=node_name)
|
||||
instance_node.moveToGoodPosition()
|
||||
return instance_node
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
||||
"""Base class for most of the Houdini creator plugins."""
|
||||
selected_nodes = []
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
try:
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = hou.selectedNodes()
|
||||
|
||||
# Get the node type and remove it from the data, not needed
|
||||
node_type = instance_data.pop("node_type", None)
|
||||
if node_type is None:
|
||||
node_type = "geometry"
|
||||
|
||||
instance_node = self.create_instance_node(
|
||||
subset_name, "/out", node_type)
|
||||
|
||||
self.customize_node_look(instance_node)
|
||||
|
||||
instance_data["instance_node"] = instance_node.path()
|
||||
instance = CreatedInstance(
|
||||
self.family,
|
||||
subset_name,
|
||||
instance_data,
|
||||
self)
|
||||
self._add_instance_to_context(instance)
|
||||
imprint(instance_node, instance.data_to_store())
|
||||
return instance
|
||||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def lock_parameters(self, node, parameters):
|
||||
"""Lock list of specified parameters on the node.
|
||||
|
||||
Args:
|
||||
node (hou.Node): Houdini node to lock parameters on.
|
||||
parameters (list of str): List of parameter names.
|
||||
|
||||
"""
|
||||
for name in parameters:
|
||||
try:
|
||||
parm = node.parm(name)
|
||||
parm.lock(True)
|
||||
except AttributeError:
|
||||
self.log.debug("missing lock pattern {}".format(name))
|
||||
|
||||
def collect_instances(self):
|
||||
# cache instances if missing
|
||||
self.cache_subsets(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data[
|
||||
"houdini_cached_subsets"].get(self.identifier, []):
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(instance), self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
instance_node = hou.node(created_inst.get("instance_node"))
|
||||
|
||||
new_values = {
|
||||
key: new_value
|
||||
for key, (_old_value, new_value) in _changes.items()
|
||||
}
|
||||
imprint(
|
||||
instance_node,
|
||||
new_values,
|
||||
update=True
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Remove specified instance from the scene.
|
||||
|
||||
This is only removing `id` parameter so instance is no longer
|
||||
instance, because it might contain valuable data for artist.
|
||||
|
||||
"""
|
||||
for instance in instances:
|
||||
instance_node = hou.node(instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
instance_node.destroy()
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection")
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def customize_node_look(
|
||||
node, color=None,
|
||||
shape="chevron_down"):
|
||||
"""Set custom look for instance nodes.
|
||||
|
||||
Args:
|
||||
node (hou.Node): Node to set look.
|
||||
color (hou.Color, Optional): Color of the node.
|
||||
shape (str, Optional): Shape name of the node.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
if not color:
|
||||
color = hou.Color((0.616, 0.871, 0.769))
|
||||
node.setUserData('nodeshape', shape)
|
||||
node.setColor(color)
|
||||
|
|
|
|||
|
|
@ -1,57 +0,0 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import os
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [".hip", ".hiplc", ".hipnc"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
return hou.hipFile.hasUnsavedChanges()
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
|
||||
# Force forwards slashes to avoid segfault
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
hou.hipFile.save(file_name=filepath,
|
||||
save_to_recent_files=True)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
|
||||
# Force forwards slashes to avoid segfault
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
hou.hipFile.load(filepath,
|
||||
suppress_save_prompt=True,
|
||||
ignore_load_warnings=False)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
def current_file():
|
||||
|
||||
current_filepath = hou.hipFile.path()
|
||||
if (os.path.basename(current_filepath) == "untitled.hip" and
|
||||
not os.path.exists(current_filepath)):
|
||||
# By default a new scene in houdini is saved in the current
|
||||
# working directory as "untitled.hip" so we need to capture
|
||||
# that and consider it 'not saved' when it's in that state.
|
||||
return None
|
||||
|
||||
return current_filepath
|
||||
|
||||
|
||||
def work_root(session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return os.path.join(work_dir, scene_dir)
|
||||
else:
|
||||
return work_dir
|
||||
74
openpype/hosts/houdini/plugins/create/convert_legacy.py
Normal file
74
openpype/hosts/houdini/plugins/create/convert_legacy.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Convertor for legacy Houdini subsets."""
|
||||
from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin
|
||||
from openpype.hosts.houdini.api.lib import imprint
|
||||
|
||||
|
||||
class HoudiniLegacyConvertor(SubsetConvertorPlugin):
|
||||
"""Find and convert any legacy subsets in the scene.
|
||||
|
||||
This Convertor will find all legacy subsets in the scene and will
|
||||
transform them to the current system. Since the old subsets doesn't
|
||||
retain any information about their original creators, the only mapping
|
||||
we can do is based on their families.
|
||||
|
||||
Its limitation is that you can have multiple creators creating subset
|
||||
of the same family and there is no way to handle it. This code should
|
||||
nevertheless cover all creators that came with OpenPype.
|
||||
|
||||
"""
|
||||
identifier = "io.openpype.creators.houdini.legacy"
|
||||
family_to_id = {
|
||||
"camera": "io.openpype.creators.houdini.camera",
|
||||
"ass": "io.openpype.creators.houdini.ass",
|
||||
"imagesequence": "io.openpype.creators.houdini.imagesequence",
|
||||
"hda": "io.openpype.creators.houdini.hda",
|
||||
"pointcache": "io.openpype.creators.houdini.pointcache",
|
||||
"redshiftproxy": "io.openpype.creators.houdini.redshiftproxy",
|
||||
"redshift_rop": "io.openpype.creators.houdini.redshift_rop",
|
||||
"usd": "io.openpype.creators.houdini.usd",
|
||||
"usdrender": "io.openpype.creators.houdini.usdrender",
|
||||
"vdbcache": "io.openpype.creators.houdini.vdbcache"
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs)
|
||||
self.legacy_subsets = {}
|
||||
|
||||
def find_instances(self):
|
||||
"""Find legacy subsets in the scene.
|
||||
|
||||
Legacy subsets are the ones that doesn't have `creator_identifier`
|
||||
parameter on them.
|
||||
|
||||
This is using cached entries done in
|
||||
:py:meth:`~HoudiniCreatorBase.cache_subsets()`
|
||||
|
||||
"""
|
||||
self.legacy_subsets = self.collection_shared_data.get(
|
||||
"houdini_cached_legacy_subsets")
|
||||
if not self.legacy_subsets:
|
||||
return
|
||||
self.add_convertor_item("Found {} incompatible subset{}.".format(
|
||||
len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "")
|
||||
)
|
||||
|
||||
def convert(self):
|
||||
"""Convert all legacy subsets to current.
|
||||
|
||||
It is enough to add `creator_identifier` and `instance_node`.
|
||||
|
||||
"""
|
||||
if not self.legacy_subsets:
|
||||
return
|
||||
|
||||
for family, subsets in self.legacy_subsets.items():
|
||||
if family in self.family_to_id:
|
||||
for subset in subsets:
|
||||
data = {
|
||||
"creator_identifier": self.family_to_id[family],
|
||||
"instance_node": subset.path()
|
||||
}
|
||||
self.log.info("Converting {} to {}".format(
|
||||
subset.path(), self.family_to_id[family]))
|
||||
imprint(subset, data)
|
||||
|
|
@ -1,46 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating alembic camera subsets."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance, CreatorError
|
||||
|
||||
|
||||
class CreateAlembicCamera(plugin.Creator):
|
||||
"""Single baked camera from Alembic ROP"""
|
||||
class CreateAlembicCamera(plugin.HoudiniCreator):
|
||||
"""Single baked camera from Alembic ROP."""
|
||||
|
||||
name = "camera"
|
||||
identifier = "io.openpype.creators.houdini.camera"
|
||||
label = "Camera (Abc)"
|
||||
family = "camera"
|
||||
icon = "camera"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAlembicCamera, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "alembic"})
|
||||
|
||||
# Set node type to create for output
|
||||
self.data.update({"node_type": "alembic"})
|
||||
instance = super(CreateAlembicCamera, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
parms = {
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
"filename": hou.text.expandString(
|
||||
"$HIP/pyblish/{}.abc".format(subset_name)),
|
||||
"use_sop_path": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
path = node.path()
|
||||
if self.selected_nodes:
|
||||
if len(self.selected_nodes) > 1:
|
||||
raise CreatorError("More than one item selected.")
|
||||
path = self.selected_nodes[0].path()
|
||||
# Split the node path into the first root and the remainder
|
||||
# So we can set the root and objects parameters correctly
|
||||
_, root, remainder = path.split("/", 2)
|
||||
parms.update({"root": "/" + root, "objects": remainder})
|
||||
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock the Use Sop Path setting so the
|
||||
# user doesn't accidentally enable it.
|
||||
instance.parm("use_sop_path").lock(True)
|
||||
instance.parm("trange").set(1)
|
||||
to_lock = ["use_sop_path"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
instance_node.parm("trange").set(1)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating Arnold ASS files."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateArnoldAss(plugin.Creator):
|
||||
class CreateArnoldAss(plugin.HoudiniCreator):
|
||||
"""Arnold .ass Archive"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.ass"
|
||||
label = "Arnold ASS"
|
||||
family = "ass"
|
||||
icon = "magic"
|
||||
|
|
@ -12,42 +15,39 @@ class CreateArnoldAss(plugin.Creator):
|
|||
# Default extension: `.ass` or `.ass.gz`
|
||||
ext = ".ass"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateArnoldAss, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "arnold"})
|
||||
|
||||
self.data.update({"node_type": "arnold"})
|
||||
instance = super(CreateArnoldAss, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: plugin.CreatedInstance
|
||||
|
||||
def process(self):
|
||||
node = super(CreateArnoldAss, self).process()
|
||||
|
||||
basename = node.name()
|
||||
node.setName(basename + "_ASS", unique_name=True)
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Hide Properties Tab on Arnold ROP since that's used
|
||||
# for rendering instead of .ass Archive Export
|
||||
parm_template_group = node.parmTemplateGroup()
|
||||
parm_template_group = instance_node.parmTemplateGroup()
|
||||
parm_template_group.hideFolder("Properties", True)
|
||||
node.setParmTemplateGroup(parm_template_group)
|
||||
instance_node.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext)
|
||||
filepath = "{}{}".format(
|
||||
hou.text.expandString("$HIP/pyblish/"),
|
||||
"{}.$F4{}".format(subset_name, self.ext)
|
||||
)
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
|
||||
# Arnold ROP settings
|
||||
"ar_ass_file": filepath,
|
||||
"ar_ass_export_enable": 1
|
||||
}
|
||||
node.setParms(parms)
|
||||
|
||||
# Lock the ASS export attribute
|
||||
node.parm("ar_ass_export_enable").lock(True)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = node.parm(name)
|
||||
parm.lock(True)
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["ar_ass_export_enable", "family", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,44 +1,42 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating composite sequences."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateCompositeSequence(plugin.Creator):
|
||||
class CreateCompositeSequence(plugin.HoudiniCreator):
|
||||
"""Composite ROP to Image Sequence"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.imagesequence"
|
||||
label = "Composite (Image Sequence)"
|
||||
family = "imagesequence"
|
||||
icon = "gears"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateCompositeSequence, self).__init__(*args, **kwargs)
|
||||
ext = ".exr"
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
# Type of ROP node to create
|
||||
self.data.update({"node_type": "comp"})
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "comp"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
instance = super(CreateCompositeSequence, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
filepath = "{}{}".format(
|
||||
hou.text.expandString("$HIP/pyblish/"),
|
||||
"{}.$F4{}".format(subset_name, self.ext)
|
||||
)
|
||||
parms = {
|
||||
"trange": 1,
|
||||
"copoutput": filepath
|
||||
}
|
||||
|
||||
"""
|
||||
parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"coppath": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
try:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
except AttributeError:
|
||||
# missing lock pattern
|
||||
self.log.debug(
|
||||
"missing lock pattern {}".format(name))
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,28 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import hou
|
||||
|
||||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_subsets,
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.houdini.api import lib
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
|
||||
|
||||
class CreateHDA(plugin.Creator):
|
||||
class CreateHDA(plugin.HoudiniCreator):
|
||||
"""Publish Houdini Digital Asset file."""
|
||||
|
||||
name = "hda"
|
||||
identifier = "io.openpype.creators.houdini.hda"
|
||||
label = "Houdini Digital Asset (Hda)"
|
||||
family = "hda"
|
||||
icon = "gears"
|
||||
maintain_selection = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateHDA, self).__init__(*args, **kwargs)
|
||||
self.data.pop("active", None)
|
||||
|
||||
def _check_existing(self, subset_name):
|
||||
# type: (str) -> bool
|
||||
"""Check if existing subset name versions already exists."""
|
||||
|
|
@ -40,55 +34,51 @@ class CreateHDA(plugin.Creator):
|
|||
}
|
||||
return subset_name.lower() in existing_subset_names_low
|
||||
|
||||
def _process(self, instance):
|
||||
subset_name = self.data["subset"]
|
||||
# get selected nodes
|
||||
out = hou.node("/obj")
|
||||
self.nodes = hou.selectedNodes()
|
||||
def _create_instance_node(
|
||||
self, node_name, parent, node_type="geometry"):
|
||||
import hou
|
||||
|
||||
if (self.options or {}).get("useSelection") and self.nodes:
|
||||
# if we have `use selection` enabled and we have some
|
||||
parent_node = hou.node("/obj")
|
||||
if self.selected_nodes:
|
||||
# if we have `use selection` enabled, and we have some
|
||||
# selected nodes ...
|
||||
subnet = out.collapseIntoSubnet(
|
||||
self.nodes,
|
||||
subnet_name="{}_subnet".format(self.name))
|
||||
subnet = parent_node.collapseIntoSubnet(
|
||||
self.selected_nodes,
|
||||
subnet_name="{}_subnet".format(node_name))
|
||||
subnet.moveToGoodPosition()
|
||||
to_hda = subnet
|
||||
else:
|
||||
to_hda = out.createNode(
|
||||
"subnet", node_name="{}_subnet".format(self.name))
|
||||
to_hda = parent_node.createNode(
|
||||
"subnet", node_name="{}_subnet".format(node_name))
|
||||
if not to_hda.type().definition():
|
||||
# if node type has not its definition, it is not user
|
||||
# created hda. We test if hda can be created from the node.
|
||||
if not to_hda.canCreateDigitalAsset():
|
||||
raise Exception(
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
name=subset_name,
|
||||
hda_file_name="$HIP/{}.hda".format(subset_name)
|
||||
name=node_name,
|
||||
hda_file_name="$HIP/{}.hda".format(node_name)
|
||||
)
|
||||
hda_node.layoutChildren()
|
||||
elif self._check_existing(subset_name):
|
||||
elif self._check_existing(node_name):
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
("subset {} is already published with different HDA"
|
||||
"definition.").format(subset_name))
|
||||
"definition.").format(node_name))
|
||||
else:
|
||||
hda_node = to_hda
|
||||
|
||||
hda_node.setName(subset_name)
|
||||
|
||||
# delete node created by Avalon in /out
|
||||
# this needs to be addressed in future Houdini workflow refactor.
|
||||
|
||||
hou.node("/out/{}".format(subset_name)).destroy()
|
||||
|
||||
try:
|
||||
lib.imprint(hda_node, self.data)
|
||||
except hou.OperationFailed:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
("Cannot set metadata on asset. Might be that it already is "
|
||||
"OpenPype asset.")
|
||||
)
|
||||
|
||||
hda_node.setName(node_name)
|
||||
self.customize_node_look(hda_node)
|
||||
return hda_node
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
instance_data.pop("active", None)
|
||||
|
||||
instance = super(CreateHDA, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: plugin.CreatedInstance
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -1,48 +1,51 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating pointcache alembics."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreatePointCache(plugin.Creator):
|
||||
class CreatePointCache(plugin.HoudiniCreator):
|
||||
"""Alembic ROP to pointcache"""
|
||||
|
||||
name = "pointcache"
|
||||
identifier = "io.openpype.creators.houdini.pointcache"
|
||||
label = "Point Cache"
|
||||
family = "pointcache"
|
||||
icon = "gears"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreatePointCache, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "alembic"})
|
||||
|
||||
self.data.update({"node_type": "alembic"})
|
||||
instance = super(CreatePointCache, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
parms = {
|
||||
"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output
|
||||
"use_sop_path": True,
|
||||
"build_from_path": True,
|
||||
"path_attrib": "path",
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"facesets": 0, # No face sets (by default exclude them)
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name,
|
||||
"format": 2,
|
||||
"facesets": 0,
|
||||
"filename": hou.text.expandString(
|
||||
"$HIP/pyblish/{}.abc".format(subset_name))
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
if self.selected_nodes:
|
||||
parms["sop_path"] = self.selected_nodes[0].path()
|
||||
|
||||
instance.setParms(parms)
|
||||
instance.parm("trange").set(1)
|
||||
# try to find output node
|
||||
for child in self.selected_nodes[0].children():
|
||||
if child.type().name() == "output":
|
||||
parms["sop_path"] = child.path()
|
||||
break
|
||||
|
||||
instance_node.setParms(parms)
|
||||
instance_node.parm("trange").set(1)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,20 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating Redshift proxies."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateRedshiftProxy(plugin.Creator):
|
||||
class CreateRedshiftProxy(plugin.HoudiniCreator):
|
||||
"""Redshift Proxy"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.redshiftproxy"
|
||||
label = "Redshift Proxy"
|
||||
family = "redshiftproxy"
|
||||
icon = "magic"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateRedshiftProxy, self).__init__(*args, **kwargs)
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
|
||||
# Redshift provides a `Redshift_Proxy_Output` node type which shows
|
||||
# a limited set of parameters by default and is set to extract a
|
||||
|
|
@ -21,28 +23,24 @@ class CreateRedshiftProxy(plugin.Creator):
|
|||
# why this happens.
|
||||
# TODO: Somehow enforce so that it only shows the original limited
|
||||
# attributes of the Redshift_Proxy_Output node type
|
||||
self.data.update({"node_type": "Redshift_Proxy_Output"})
|
||||
instance_data.update({"node_type": "Redshift_Proxy_Output"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
instance = super(CreateRedshiftProxy, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs',
|
||||
"RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name),
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
path = node.path()
|
||||
parms["RS_archive_sopPath"] = path
|
||||
if self.selected_nodes:
|
||||
parms["RS_archive_sopPath"] = self.selected_nodes[0].path()
|
||||
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
to_lock = ["family", "id", "prim_to_detail_pattern"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,41 +1,40 @@
|
|||
import hou
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin to create Redshift ROP."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateRedshiftROP(plugin.Creator):
|
||||
class CreateRedshiftROP(plugin.HoudiniCreator):
|
||||
"""Redshift ROP"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.redshift_rop"
|
||||
label = "Redshift ROP"
|
||||
family = "redshift_rop"
|
||||
icon = "magic"
|
||||
defaults = ["master"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateRedshiftROP, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "Redshift_ROP"})
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 10
|
||||
|
||||
# Clear the family prefix from the subset
|
||||
subset = self.data["subset"]
|
||||
subset = subset_name
|
||||
subset_no_prefix = subset[len(self.family):]
|
||||
subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:]
|
||||
self.data["subset"] = subset_no_prefix
|
||||
subset_name = subset_no_prefix
|
||||
|
||||
# Add chunk size attribute
|
||||
self.data["chunkSize"] = 10
|
||||
instance = super(CreateRedshiftROP, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
self.data.update({"node_type": "Redshift_ROP"})
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
basename = instance.name()
|
||||
instance.setName(basename + "_ROP", unique_name=True)
|
||||
basename = instance_node.name()
|
||||
instance_node.setName(basename + "_ROP", unique_name=True)
|
||||
|
||||
# Also create the linked Redshift IPR Rop
|
||||
try:
|
||||
|
|
@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator):
|
|||
"Redshift_IPR", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise Exception(("Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"))
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
("Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"))
|
||||
|
||||
# Move it to directly under the Redshift ROP
|
||||
ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1))
|
||||
ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1))
|
||||
|
||||
# Set the linked rop to the Redshift ROP
|
||||
ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance))
|
||||
|
|
@ -61,10 +61,8 @@ class CreateRedshiftROP(plugin.Creator):
|
|||
"RS_outputMultilayerMode": 0, # no multi-layered exr
|
||||
"RS_outputBeautyAOVSuffix": "beauty",
|
||||
}
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,39 +1,39 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating USDs."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateUSD(plugin.Creator):
|
||||
class CreateUSD(plugin.HoudiniCreator):
|
||||
"""Universal Scene Description"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.usd"
|
||||
label = "USD (experimental)"
|
||||
family = "usd"
|
||||
icon = "gears"
|
||||
enabled = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSD, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "usd"})
|
||||
|
||||
self.data.update({"node_type": "usd"})
|
||||
instance = super(CreateUSD, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
"lopoutput": "$HIP/pyblish/%s.usd" % self.name,
|
||||
"lopoutput": "$HIP/pyblish/{}.usd".format(subset_name),
|
||||
"enableoutputprocessor_simplerelativepaths": False,
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
if self.selected_nodes:
|
||||
parms["loppath"] = self.selected_nodes[0].path()
|
||||
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = [
|
||||
|
|
@ -42,6 +42,4 @@ class CreateUSD(plugin.Creator):
|
|||
"family",
|
||||
"id",
|
||||
]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,42 +1,41 @@
|
|||
import hou
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating USD renders."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateUSDRender(plugin.Creator):
|
||||
class CreateUSDRender(plugin.HoudiniCreator):
|
||||
"""USD Render ROP in /stage"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.usdrender"
|
||||
label = "USD Render (experimental)"
|
||||
family = "usdrender"
|
||||
icon = "magic"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateUSDRender, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
self.parent = hou.node("/stage")
|
||||
instance_data["parent"] = hou.node("/stage")
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "usdrender"})
|
||||
|
||||
self.data.update({"node_type": "usdrender"})
|
||||
instance = super(CreateUSDRender, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1
|
||||
}
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"loppath": node.path()})
|
||||
instance.setParms(parms)
|
||||
if self.selected_nodes:
|
||||
parms["loppath"] = self.selected_nodes[0].path()
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
to_lock = ["family", "id"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
|
|
|||
|
|
@ -1,38 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating VDB Caches."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateVDBCache(plugin.Creator):
|
||||
class CreateVDBCache(plugin.HoudiniCreator):
|
||||
"""OpenVDB from Geometry ROP"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.vdbcache"
|
||||
name = "vbdcache"
|
||||
label = "VDB Cache"
|
||||
family = "vdbcache"
|
||||
icon = "cloud"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateVDBCache, self).__init__(*args, **kwargs)
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "geometry"})
|
||||
|
||||
# Set node type to create for output
|
||||
self.data["node_type"] = "geometry"
|
||||
instance = super(CreateVDBCache, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
parms = {
|
||||
"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name),
|
||||
"initsim": True,
|
||||
"trange": 1
|
||||
}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"soppath": node.path()})
|
||||
if self.selected_nodes:
|
||||
parms["soppath"] = self.selected_nodes[0].path()
|
||||
|
||||
instance.setParms(parms)
|
||||
instance_node.setParms(parms)
|
||||
|
|
|
|||
93
openpype/hosts/houdini/plugins/create/create_workfile.py
Normal file
93
openpype/hosts/houdini/plugins/create/create_workfile.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating workfiles."""
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.hosts.houdini.api.lib import read, imprint
|
||||
from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER
|
||||
from openpype.pipeline import CreatedInstance, AutoCreator
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.client import get_asset_by_name
|
||||
import hou
|
||||
|
||||
|
||||
class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
|
||||
"""Workfile auto-creator."""
|
||||
identifier = "io.openpype.creators.houdini.workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
icon = "document"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def create(self):
|
||||
variant = self.default_variant
|
||||
current_instance = next(
|
||||
(
|
||||
instance for instance in self.create_context.instances
|
||||
if instance.creator_identifier == self.identifier
|
||||
), None)
|
||||
|
||||
project_name = self.project_name
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
|
||||
if current_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
data.update(
|
||||
self.get_dynamic_data(
|
||||
variant, task_name, asset_doc,
|
||||
project_name, host_name, current_instance)
|
||||
)
|
||||
self.log.info("Auto-creating workfile instance...")
|
||||
current_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(current_instance)
|
||||
elif (
|
||||
current_instance["asset"] != asset_name
|
||||
or current_instance["task"] != task_name
|
||||
):
|
||||
# Update instance context if is not the same
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
current_instance["asset"] = asset_name
|
||||
current_instance["task"] = task_name
|
||||
current_instance["subset"] = subset_name
|
||||
|
||||
# write workfile information to context container.
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
|
||||
workfile_data = {"workfile": current_instance.data_to_store()}
|
||||
imprint(op_ctx, workfile_data)
|
||||
|
||||
def collect_instances(self):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
instance = read(op_ctx)
|
||||
if not instance:
|
||||
return
|
||||
workfile = instance.get("workfile")
|
||||
if not workfile:
|
||||
return
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
workfile, self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
for created_inst, _changes in update_list:
|
||||
if created_inst["creator_identifier"] == self.identifier:
|
||||
workfile_data = {"workfile": created_inst.data_to_store()}
|
||||
imprint(op_ctx, workfile_data, update=True)
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import pyblish.api
|
||||
import hou
|
||||
|
||||
|
||||
class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
||||
|
|
@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
|||
|
||||
# Check bypass state and reverse
|
||||
active = True
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
if hasattr(node, "isBypassed"):
|
||||
active = not node.isBypassed()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
||||
class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Houdini Current File"
|
||||
hosts = ["houdini"]
|
||||
family = ["workfile"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
"""Inject the current working file"""
|
||||
|
||||
current_file = hou.hipFile.path()
|
||||
if not os.path.exists(current_file):
|
||||
# By default Houdini will even point a new scene to a path.
|
||||
# By default, Houdini will even point a new scene to a path.
|
||||
# However if the file is not saved at all and does not exist,
|
||||
# we assume the user never set it.
|
||||
filepath = ""
|
||||
|
|
@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
|||
"saved correctly."
|
||||
)
|
||||
|
||||
context.data["currentFile"] = current_file
|
||||
instance.context.data["currentFile"] = current_file
|
||||
|
||||
folder, file = os.path.split(current_file)
|
||||
filename, ext = os.path.splitext(file)
|
||||
|
||||
task = legacy_io.Session["AVALON_TASK"]
|
||||
|
||||
data = {}
|
||||
|
||||
# create instance
|
||||
instance = context.create_instance(name=filename)
|
||||
subset = 'workfile' + task.capitalize()
|
||||
|
||||
data.update({
|
||||
"subset": subset,
|
||||
"asset": os.getenv("AVALON_ASSET", None),
|
||||
"label": subset,
|
||||
"publish": True,
|
||||
"family": 'workfile',
|
||||
"families": ['workfile'],
|
||||
instance.data.update({
|
||||
"setMembers": [current_file],
|
||||
"frameStart": context.data['frameStart'],
|
||||
"frameEnd": context.data['frameEnd'],
|
||||
"handleStart": context.data['handleStart'],
|
||||
"handleEnd": context.data['handleEnd']
|
||||
"frameStart": instance.context.data['frameStart'],
|
||||
"frameEnd": instance.context.data['frameEnd'],
|
||||
"handleStart": instance.context.data['handleStart'],
|
||||
"handleEnd": instance.context.data['handleEnd']
|
||||
})
|
||||
|
||||
data['representations'] = [{
|
||||
instance.data['representations'] = [{
|
||||
'name': ext.lstrip("."),
|
||||
'ext': ext.lstrip("."),
|
||||
'files': file,
|
||||
"stagingDir": folder,
|
||||
}]
|
||||
|
||||
instance.data.update(data)
|
||||
|
||||
self.log.info('Collected instance: {}'.format(file))
|
||||
self.log.info('Scene path: {}'.format(current_file))
|
||||
self.log.info('staging Dir: {}'.format(folder))
|
||||
self.log.info('subset: {}'.format(subset))
|
||||
|
|
|
|||
|
|
@ -1,19 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collector plugin for frames data on ROP instances."""
|
||||
import os
|
||||
import re
|
||||
|
||||
import hou
|
||||
import hou # noqa
|
||||
import pyblish.api
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
def splitext(name, allowed_multidot_extensions):
|
||||
|
||||
for ext in allowed_multidot_extensions:
|
||||
if name.endswith(ext):
|
||||
return name[:-len(ext)], ext
|
||||
|
||||
return os.path.splitext(name)
|
||||
|
||||
|
||||
class CollectFrames(pyblish.api.InstancePlugin):
|
||||
"""Collect all frames which would be saved from the ROP nodes"""
|
||||
|
|
@ -24,7 +18,9 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
frame_data = lib.get_frame_data(ropnode)
|
||||
instance.data.update(frame_data)
|
||||
|
||||
start_frame = instance.data.get("frameStart", None)
|
||||
end_frame = instance.data.get("frameEnd", None)
|
||||
|
|
@ -38,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
self.log.warning("Using current frame: {}".format(hou.frame()))
|
||||
output = output_parm.eval()
|
||||
|
||||
_, ext = splitext(output,
|
||||
_, ext = lib.splitext(output,
|
||||
allowed_multidot_extensions=[".ass.gz"])
|
||||
file_name = os.path.basename(output)
|
||||
result = file_name
|
||||
|
||||
# Get the filename pattern match from the output
|
||||
# path so we can compute all frames that would
|
||||
# path, so we can compute all frames that would
|
||||
# come out from rendering the ROP node if there
|
||||
# is a frame pattern in the name
|
||||
pattern = r"\w+\.(\d+)" + re.escape(ext)
|
||||
|
|
@ -63,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
# for a custom frame list. So this should be refactored.
|
||||
instance.data.update({"frames": result})
|
||||
|
||||
def create_file_list(self, match, start_frame, end_frame):
|
||||
"""Collect files based on frame range and regex.match
|
||||
@staticmethod
|
||||
def create_file_list(match, start_frame, end_frame):
|
||||
"""Collect files based on frame range and `regex.match`
|
||||
|
||||
Args:
|
||||
match(re.match): match object
|
||||
|
|
|
|||
|
|
@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if node.evalParm("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
|
||||
# instance was created by new creator code, skip it as
|
||||
# it is already collected.
|
||||
if node.parm("creator_identifier"):
|
||||
continue
|
||||
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
|
|
@ -58,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
data.update({"active": not node.isBypassed()})
|
||||
|
||||
# temporarily translation of `active` to `publish` till issue has
|
||||
# been resolved, https://github.com/pyblish/pyblish-base/issues/307
|
||||
# been resolved.
|
||||
# https://github.com/pyblish/pyblish-base/issues/307
|
||||
if "active" in data:
|
||||
data["publish"] = data["active"]
|
||||
|
||||
|
|
@ -78,6 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
instance.data["families"] = [instance.data["family"]]
|
||||
|
||||
instance[:] = [node]
|
||||
instance.data["instance_node"] = node.path()
|
||||
instance.data.update(data)
|
||||
|
||||
def sort_by_family(instance):
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
|||
|
||||
import hou
|
||||
|
||||
node = instance[0]
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get sop path
|
||||
node_type = node.type().name()
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Collect chunkSize
|
||||
chunk_size_parm = rop.parm("chunkSize")
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
node = instance.data.get("output_node")
|
||||
if not node:
|
||||
rop_path = instance[0].path()
|
||||
rop_path = instance.data["instance_node"].path()
|
||||
raise RuntimeError(
|
||||
"No output node found. Make sure to connect an "
|
||||
"input to the USD ROP: %s" % rop_path
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
from openyppe.client import get_subset_by_name, get_asset_by_name
|
||||
from openpype.client import get_subset_by_name, get_asset_by_name
|
||||
from openpype.pipeline import legacy_io
|
||||
import openpype.lib.usdlib as usdlib
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import os
|
|||
import pyblish.api
|
||||
import openpype.hosts.houdini.api.usd as usdlib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CollectUsdLayers(pyblish.api.InstancePlugin):
|
||||
"""Collect the USD Layers that have configured save paths."""
|
||||
|
|
@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
|
|||
self.log.debug("No output node found..")
|
||||
return
|
||||
|
||||
rop_node = instance[0]
|
||||
rop_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
save_layers = []
|
||||
for layer in usdlib.get_configured_save_layers(rop_node):
|
||||
|
|
@ -54,8 +56,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
|
|||
layer_inst.data["subset"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.append(instance[0]) # include same USD ROP
|
||||
layer_inst.append((layer, save_path)) # include layer data
|
||||
# include same USD ROP
|
||||
layer_inst.append(rop_node)
|
||||
# include layer data
|
||||
layer_inst.append((layer, save_path))
|
||||
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
layer_inst.data["subsetGroup"] = "USD Layer"
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractAlembic(publish.Extractor):
|
||||
|
||||
|
|
@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("filename")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractAss(publish.Extractor):
|
||||
|
||||
|
|
@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
@ -33,8 +35,12 @@ class ExtractAss(publish.Extractor):
|
|||
# error and thus still continues to the integrator. To capture that
|
||||
# we make sure all files exist
|
||||
files = instance.data["frames"]
|
||||
missing = [fname for fname in files
|
||||
if not os.path.exists(os.path.join(staging_dir, fname))]
|
||||
missing = []
|
||||
for file_name in files:
|
||||
full_path = os.path.normpath(os.path.join(staging_dir, file_name))
|
||||
if not os.path.exists(full_path):
|
||||
missing.append(full_path)
|
||||
|
||||
if missing:
|
||||
raise RuntimeError("Failed to complete Arnold ass extraction. "
|
||||
"Missing output files: {}".format(missing))
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
from openpype.hosts.houdini.api.lib import render_rop, splitext
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractComposite(publish.Extractor):
|
||||
|
|
@ -15,7 +16,7 @@ class ExtractComposite(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the copoutput parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
@ -28,8 +29,24 @@ class ExtractComposite(publish.Extractor):
|
|||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
output = instance.data["frames"]
|
||||
_, ext = splitext(output[0], [])
|
||||
ext = ext.lstrip(".")
|
||||
|
||||
frames = instance.data["frames"]
|
||||
instance.data["files"].append(frames)
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
}
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
self.log.info(pformat(representation))
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractHDA(publish.Extractor):
|
||||
|
|
@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
self.log.info(pformat(instance.data))
|
||||
hda_node = instance[0]
|
||||
hda_node = hou.node(instance.data.get("instance_node"))
|
||||
hda_def = hda_node.type().definition()
|
||||
hda_options = hda_def.options()
|
||||
hda_options.setSaveInitialParmsAndContents(True)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractRedshiftProxy(publish.Extractor):
|
||||
|
||||
|
|
@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
class ExtractUSD(publish.Extractor):
|
||||
|
||||
|
|
@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("lopoutput")
|
||||
|
|
|
|||
|
|
@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor):
|
|||
|
||||
# Main ROP node, either a USD Rop or ROP network with
|
||||
# multiple USD ROPs
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Collect any output dependencies that have not been processed yet
|
||||
# during extraction of other instances
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractVDBCache(publish.Extractor):
|
||||
|
||||
|
|
@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
ropnode = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<description>
|
||||
## Invalid input node
|
||||
|
||||
VDB input must have the same number of VDBs, points, primitives and vertices as output.
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -2,7 +2,7 @@ import pyblish.api
|
|||
|
||||
from openpype.lib import version_up
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
from openpype.hosts.houdini.api import HoudiniHost
|
||||
|
||||
class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Increment the current file.
|
||||
|
|
@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
|
||||
# Filename must not have changed since collecting
|
||||
host = registered_host()
|
||||
host = registered_host() # type: HoudiniHost
|
||||
current_file = host.current_file()
|
||||
assert (
|
||||
context.data["currentFile"] == current_file
|
||||
), "Collected filename from current scene name."
|
||||
|
||||
new_filepath = version_up(current_file)
|
||||
host.save(new_filepath)
|
||||
host.save_workfile(new_filepath)
|
||||
|
|
|
|||
|
|
@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin):
|
|||
|
||||
# Filename must not have changed since collecting
|
||||
host = registered_host()
|
||||
current_file = host.current_file()
|
||||
current_file = host.get_current_workfile()
|
||||
assert context.data['currentFile'] == current_file, (
|
||||
"Collected filename from current scene name."
|
||||
)
|
||||
|
||||
if host.has_unsaved_changes():
|
||||
self.log.info("Saving current file..")
|
||||
host.save_file(current_file)
|
||||
self.log.info("Saving current file {}...".format(current_file))
|
||||
host.save_workfile(current_file)
|
||||
else:
|
||||
self.log.debug("No unsaved changes, skipping file save..")
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB.
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (VDB)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
nr_of_prims = len(prims)
|
||||
|
||||
nr_of_points = len(node.geometry().points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
||||
|
|
@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Primitive to Detail (Abc)"
|
||||
|
|
@ -24,18 +24,26 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Primitives found with inconsistent primitive "
|
||||
"to detail attributes. See log."
|
||||
raise PublishValidationError(
|
||||
("Primitives found with inconsistent primitive "
|
||||
"to detail attributes. See log."),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
import hou # noqa
|
||||
output_node = instance.data.get("output_node")
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
if output_node is None:
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % rop_node.path()
|
||||
)
|
||||
|
||||
output = instance.data["output_node"]
|
||||
return [rop_node.path()]
|
||||
|
||||
rop = instance[0]
|
||||
pattern = rop.parm("prim_to_detail_pattern").eval().strip()
|
||||
pattern = rop_node.parm("prim_to_detail_pattern").eval().strip()
|
||||
if not pattern:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has no 'Primitive to Detail' pattern. "
|
||||
|
|
@ -43,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
)
|
||||
return
|
||||
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
build_from_path = rop_node.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
|
|
@ -51,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
path_attr = rop_node.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
return [rop_node.path()]
|
||||
|
||||
# Let's assume each attribute is explicitly named for now and has no
|
||||
# wildcards for Primitive to Detail. This simplifies the check.
|
||||
|
|
@ -67,7 +75,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
geo = output_node.geometryAtFrame(frame)
|
||||
|
||||
# If there are no primitives on the start frame then it might be
|
||||
# something that is emitted over time. As such we can't actually
|
||||
|
|
@ -86,7 +94,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
|
|
@ -94,7 +102,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
||||
paths = None
|
||||
for attr in pattern.split(" "):
|
||||
|
|
@ -130,4 +138,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
|
|||
"Path has multiple values: %s (path: %s)"
|
||||
% (list(values), path)
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
|
||||
import hou
|
||||
|
||||
class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
|
||||
"""Validate Face Sets are disabled for extraction to pointcache.
|
||||
|
|
@ -18,14 +17,14 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Alembic ROP Face Sets"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.data["instance_node"])
|
||||
facesets = rop.parm("facesets").eval()
|
||||
|
||||
# 0 = No Face Sets
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -12,7 +13,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (Abc)"
|
||||
|
|
@ -20,18 +21,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Primitive types found that are not supported"
|
||||
"for Alembic output."
|
||||
raise PublishValidationError(
|
||||
("Primitive types found that are not supported"
|
||||
"for Alembic output."),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid_prim_types = ["VDB", "Volume"]
|
||||
node = instance.data["output_node"]
|
||||
output_node = instance.data.get("output_node")
|
||||
|
||||
if not hasattr(node, "geometry"):
|
||||
if output_node is None:
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
if not hasattr(output_node, "geometry"):
|
||||
# In the case someone has explicitly set an Object
|
||||
# node instead of a SOP node in Geometry context
|
||||
# then for now we ignore - this allows us to also
|
||||
|
|
@ -40,7 +51,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geo = node.geometryAtFrame(frame)
|
||||
geo = output_node.geometryAtFrame(frame)
|
||||
|
||||
invalid = False
|
||||
for prim_type in invalid_prim_types:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
||||
|
|
@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
import hou
|
||||
|
||||
class ValidateBypassed(pyblish.api.InstancePlugin):
|
||||
"""Validate all primitives build hierarchy from attribute when enabled.
|
||||
|
|
@ -11,7 +13,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder - 0.1
|
||||
order = pyblish.api.ValidatorOrder - 0.1
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate ROP Bypass"
|
||||
|
|
@ -26,14 +28,15 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
rop = invalid[0]
|
||||
raise RuntimeError(
|
||||
"ROP node %s is set to bypass, publishing cannot continue.."
|
||||
% rop.path()
|
||||
raise PublishValidationError(
|
||||
("ROP node {} is set to bypass, publishing cannot "
|
||||
"continue.".format(rop.path())),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
if hasattr(rop, "isBypassed") and rop.isBypassed():
|
||||
return [rop]
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator plugin for Houdini Camera ROP settings."""
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateCameraROP(pyblish.api.InstancePlugin):
|
||||
"""Validate Camera ROP settings."""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["camera"]
|
||||
hosts = ["houdini"]
|
||||
label = "Camera ROP"
|
||||
|
|
@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin):
|
|||
|
||||
import hou
|
||||
|
||||
node = instance[0]
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
if node.parm("use_sop_path").eval():
|
||||
raise RuntimeError(
|
||||
"Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable."
|
||||
raise PublishValidationError(
|
||||
("Alembic ROP for Camera export should not be "
|
||||
"set to 'Use Sop Path'. Please disable."),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
# Get the root and objects parameter of the Alembic ROP node
|
||||
root = node.parm("root").eval()
|
||||
objects = node.parm("objects").eval()
|
||||
assert root, "Root parameter must be set on Alembic ROP"
|
||||
assert root.startswith("/"), "Root parameter must start with slash /"
|
||||
assert objects, "Objects parameter must be set on Alembic ROP"
|
||||
assert len(objects.split(" ")) == 1, "Must have only a single object."
|
||||
errors = []
|
||||
if not root:
|
||||
errors.append("Root parameter must be set on Alembic ROP")
|
||||
if not root.startswith("/"):
|
||||
errors.append("Root parameter must start with slash /")
|
||||
if not objects:
|
||||
errors.append("Objects parameter must be set on Alembic ROP")
|
||||
if len(objects.split(" ")) != 1:
|
||||
errors.append("Must have only a single object.")
|
||||
|
||||
if errors:
|
||||
for error in errors:
|
||||
self.log.error(error)
|
||||
raise PublishValidationError(
|
||||
"Some checks failed, see validator log.",
|
||||
title=self.label)
|
||||
|
||||
# Check if the object exists and is a camera
|
||||
path = root + "/" + objects
|
||||
camera = hou.node(path)
|
||||
|
||||
if not camera:
|
||||
raise ValueError("Camera path does not exist: %s" % path)
|
||||
raise PublishValidationError(
|
||||
"Camera path does not exist: %s" % path,
|
||||
title=self.label)
|
||||
|
||||
if camera.type().name() != "cam":
|
||||
raise ValueError(
|
||||
"Object set in Alembic ROP is not a camera: "
|
||||
"%s (type: %s)" % (camera, camera.type().name())
|
||||
)
|
||||
raise PublishValidationError(
|
||||
("Object set in Alembic ROP is not a camera: "
|
||||
"{} (type: {})").format(camera, camera.type().name()),
|
||||
title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,9 +25,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
raise PublishValidationError(
|
||||
("Output node(s) `{}` are incorrect. "
|
||||
"See plug-in log for details.").format(invalid),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -30,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
try:
|
||||
output_node = instance.data["output_node"]
|
||||
except KeyError:
|
||||
six.reraise(
|
||||
PublishValidationError,
|
||||
PublishValidationError(
|
||||
"Can't determine COP output node.",
|
||||
title=cls.__name__),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
"Ensure a valid COP output path is set." % node.path()
|
||||
|
|
@ -54,7 +69,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
# For the sake of completeness also assert the category type
|
||||
# is Cop2 to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Cop2", (
|
||||
"Output node %s is not of category Cop2. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
if output_node.type().category().name() != "Cop2":
|
||||
raise PublishValidationError(
|
||||
("Output node %s is not of category Cop2. "
|
||||
"This is a bug...").format(output_node.path()),
|
||||
title=cls.label)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateFileExtension(pyblish.api.InstancePlugin):
|
||||
|
|
@ -29,15 +33,16 @@ class ValidateFileExtension(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"ROP node has incorrect " "file extension: %s" % invalid
|
||||
raise PublishValidationError(
|
||||
"ROP node has incorrect file extension: {}".format(invalid),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
# Get ROP node from instance
|
||||
node = instance[0]
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Create lookup for current family in instance
|
||||
families = []
|
||||
|
|
@ -53,7 +58,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin):
|
|||
for family in families:
|
||||
extension = cls.family_extensions.get(family, None)
|
||||
if extension is None:
|
||||
raise RuntimeError("Unsupported family: %s" % family)
|
||||
raise PublishValidationError(
|
||||
"Unsupported family: {}".format(family),
|
||||
title=cls.label)
|
||||
|
||||
if output_extension != extension:
|
||||
return [node.path()]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateFrameToken(pyblish.api.InstancePlugin):
|
||||
|
|
@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
|
||||
|
|
@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin):
|
|||
|
||||
license = hou.licenseCategory()
|
||||
if license != hou.licenseCategoryType.Commercial:
|
||||
raise RuntimeError(
|
||||
"USD Publishing requires a full Commercial "
|
||||
"license. You are on: %s" % license
|
||||
)
|
||||
raise PublishValidationError(
|
||||
("USD Publishing requires a full Commercial "
|
||||
"license. You are on: {}").format(license),
|
||||
title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
||||
"""Validate Create Intermediate Directories is enabled on ROP node."""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Create Intermediate Directories Checked"
|
||||
|
|
@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Found ROP node with Create Intermediate "
|
||||
"Directories turned off: %s" % invalid
|
||||
)
|
||||
raise PublishValidationError(
|
||||
("Found ROP node with Create Intermediate "
|
||||
"Directories turned off: {}".format(invalid)),
|
||||
title=self.label)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
import hou
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
def cook_in_range(node, start, end):
|
||||
|
|
@ -28,7 +29,7 @@ def get_errors(node):
|
|||
class ValidateNoErrors(pyblish.api.InstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["houdini"]
|
||||
label = "Validate no errors"
|
||||
|
||||
|
|
@ -37,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
|
|||
validate_nodes = []
|
||||
|
||||
if len(instance) > 0:
|
||||
validate_nodes.append(instance[0])
|
||||
validate_nodes.append(hou.node(instance.get("instance_node")))
|
||||
output_node = instance.data.get("output_node")
|
||||
if output_node:
|
||||
validate_nodes.append(output_node)
|
||||
|
|
@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
|
|||
errors = get_errors(node)
|
||||
if errors:
|
||||
self.log.error(errors)
|
||||
raise RuntimeError("Node has errors: %s" % node.path())
|
||||
raise PublishValidationError(
|
||||
"Node has errors: {}".format(node.path()),
|
||||
title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
import hou
|
||||
|
||||
|
||||
class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
||||
|
|
@ -19,19 +22,26 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"See log for details. " "Invalid nodes: {0}".format(invalid)
|
||||
raise PublishValidationError(
|
||||
"See log for details. " "Invalid nodes: {0}".format(invalid),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
output_node = instance.data.get("output_node")
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
output = instance.data["output_node"]
|
||||
if output_node is None:
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % rop_node.path()
|
||||
)
|
||||
|
||||
rop = instance[0]
|
||||
build_from_path = rop.parm("build_from_path").eval()
|
||||
return [rop_node.path()]
|
||||
|
||||
build_from_path = rop_node.parm("build_from_path").eval()
|
||||
if not build_from_path:
|
||||
cls.log.debug(
|
||||
"Alembic ROP has 'Build from Path' disabled. "
|
||||
|
|
@ -39,20 +49,20 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
)
|
||||
return
|
||||
|
||||
path_attr = rop.parm("path_attrib").eval()
|
||||
path_attr = rop_node.parm("path_attrib").eval()
|
||||
if not path_attr:
|
||||
cls.log.error(
|
||||
"The Alembic ROP node has no Path Attribute"
|
||||
"value set, but 'Build Hierarchy from Attribute'"
|
||||
"is enabled."
|
||||
)
|
||||
return [rop.path()]
|
||||
return [rop_node.path()]
|
||||
|
||||
cls.log.debug("Checking for attribute: %s" % path_attr)
|
||||
|
||||
# Check if the primitive attribute exists
|
||||
frame = instance.data.get("frameStart", 0)
|
||||
geo = output.geometryAtFrame(frame)
|
||||
geo = output_node.geometryAtFrame(frame)
|
||||
|
||||
# If there are no primitives on the current frame then we can't
|
||||
# check whether the path names are correct. So we'll just issue a
|
||||
|
|
@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
"Geometry Primitives are missing "
|
||||
"path attribute: `%s`" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure at least a single string value is present
|
||||
if not attrib.strings():
|
||||
|
|
@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
"Primitive path attribute has no "
|
||||
"string values: %s" % path_attr
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
||||
paths = geo.primStringAttribValues(path_attr)
|
||||
# Ensure all primitives are set to a valid path
|
||||
|
|
@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
|
|||
"Prims have no value for attribute `%s` "
|
||||
"(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims)
|
||||
)
|
||||
return [output.path()]
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
# -*-coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.houdini.api import lib
|
||||
from openpype.pipeline.publish import RepairContextAction
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
import hou
|
||||
|
||||
|
|
@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
|
|||
# We ensure it's a shell node and that it has the pre-render script
|
||||
# set correctly. Plus the shell script it will trigger should be
|
||||
# completely empty (doing nothing)
|
||||
assert node.type().name() == "shell", "Must be shell ROP node"
|
||||
assert node.parm("command").eval() == "", "Must have no command"
|
||||
assert not node.parm("shellexec").eval(), "Must not execute in shell"
|
||||
assert (
|
||||
node.parm("prerender").eval() == cmd
|
||||
), "REMOTE_PUBLISH node does not have correct prerender script."
|
||||
assert (
|
||||
node.parm("lprerender").eval() == "python"
|
||||
), "REMOTE_PUBLISH node prerender script type not set to 'python'"
|
||||
if node.type().name() != "shell":
|
||||
self.raise_error("Must be shell ROP node")
|
||||
if node.parm("command").eval() != "":
|
||||
self.raise_error("Must have no command")
|
||||
if node.parm("shellexec").eval():
|
||||
self.raise_error("Must not execute in shell")
|
||||
if node.parm("prerender").eval() != cmd:
|
||||
self.raise_error(("REMOTE_PUBLISH node does not have "
|
||||
"correct prerender script."))
|
||||
if node.parm("lprerender").eval() != "python":
|
||||
self.raise_error(("REMOTE_PUBLISH node prerender script "
|
||||
"type not set to 'python'"))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""(Re)create the node if it fails to pass validation."""
|
||||
lib.create_remote_publish_node(force=True)
|
||||
|
||||
def raise_error(self, message):
|
||||
self.log.error(message)
|
||||
raise PublishValidationError(message, title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
import hou
|
||||
from openpype.pipeline.publish import RepairContextAction
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
|
||||
|
|
@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
|
|||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
raise PublishValidationError(
|
||||
"Missing REMOTE_PUBLISH node.", title=self.label)
|
||||
|
||||
if node.isBypassed():
|
||||
raise RuntimeError("REMOTE_PUBLISH must not be bypassed.")
|
||||
raise PublishValidationError(
|
||||
"REMOTE_PUBLISH must not be bypassed.", title=self.label)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
|
|
@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin):
|
|||
|
||||
node = hou.node("/out/REMOTE_PUBLISH")
|
||||
if not node:
|
||||
raise RuntimeError("Missing REMOTE_PUBLISH node.")
|
||||
raise PublishValidationError(
|
||||
"Missing REMOTE_PUBLISH node.", title=cls.label)
|
||||
|
||||
cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH")
|
||||
node.bypass(False)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -22,9 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
raise PublishValidationError(
|
||||
"Output node(s) are incorrect",
|
||||
title="Invalid output node(s)"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -32,10 +34,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
output_node = instance.data.get("output_node")
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
|
|
@ -56,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
|||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
if output_node.type().category().name() != "Sop":
|
||||
raise PublishValidationError(
|
||||
("Output node {} is not of category Sop. "
|
||||
"This is a bug.").format(output_node.path()),
|
||||
title=cls.label)
|
||||
|
||||
# Ensure the node is cooked and succeeds to cook so we can correctly
|
||||
# check for its geometry data.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
||||
|
|
@ -24,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
@ -44,7 +48,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
|
|||
invalid.append(layer)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
raise PublishValidationError((
|
||||
"Loaded layers have backslashes. "
|
||||
"This is invalid for HUSK USD rendering."
|
||||
)
|
||||
"This is invalid for HUSK USD rendering."),
|
||||
title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
from pxr import UsdShade, UsdRender, UsdLux
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def fullname(o):
|
||||
"""Get fully qualified class name"""
|
||||
|
|
@ -37,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
@ -55,7 +58,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
|
|||
|
||||
if invalid:
|
||||
prim_paths = sorted([str(prim.GetPath()) for prim in invalid])
|
||||
raise RuntimeError("Found invalid primitives: %s" % prim_paths)
|
||||
raise PublishValidationError(
|
||||
"Found invalid primitives: {}".format(prim_paths))
|
||||
|
||||
|
||||
class ValidateUsdShade(ValidateUsdModel):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
raise PublishValidationError(
|
||||
("Output node(s) `{}` are incorrect. "
|
||||
"See plug-in log for details.").format(invalid),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -33,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
|
|||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
node = hou.node(instance.get("instance_node"))
|
||||
cls.log.error(
|
||||
"USD node '%s' LOP path does not exist. "
|
||||
"Ensure a valid LOP path is set." % node.path()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
import os
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
|
||||
|
|
@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
|
|||
if invalid:
|
||||
for message in invalid:
|
||||
self.log.error(message)
|
||||
raise RuntimeError("USD Render Paths are invalid.")
|
||||
raise PublishValidationError(
|
||||
"USD Render Paths are invalid.", title=self.label)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
import openpype.hosts.houdini.api.usd as hou_usdlib
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
|
||||
from pxr import UsdGeom
|
||||
import hou
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
|
|
@ -47,8 +50,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
|
|||
invalid.append(node)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
raise PublishValidationError((
|
||||
"SetDress contains local geometry. "
|
||||
"This is not allowed, it must be an assembly "
|
||||
"of referenced assets."
|
||||
"of referenced assets."),
|
||||
title=self.label
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -5,6 +6,7 @@ import pyblish.api
|
|||
from openpype.client import get_subset_by_name
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
|
||||
|
|
@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
|
|||
project_name, model_subset, asset_doc["_id"], fields=["_id"]
|
||||
)
|
||||
if not subset_doc:
|
||||
raise RuntimeError(
|
||||
"USD Model subset not found: "
|
||||
"%s (%s)" % (model_subset, asset_name)
|
||||
raise PublishValidationError(
|
||||
("USD Model subset not found: "
|
||||
"{} ({})").format(model_subset, asset_name),
|
||||
title=self.label
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
import hou
|
||||
|
||||
|
|
@ -12,14 +13,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade Workspace"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = instance[0]
|
||||
rop = hou.node(instance.get("instance_node"))
|
||||
workspace = rop.parent()
|
||||
|
||||
definition = workspace.type().definition()
|
||||
|
|
@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
|||
if node_type != other_node_type:
|
||||
continue
|
||||
|
||||
# Get highest version
|
||||
# Get the highest version
|
||||
highest = max(highest, other_version)
|
||||
|
||||
if version != highest:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace is not the latest version."
|
||||
" Found %s. Latest is %s." % (version, highest)
|
||||
raise PublishValidationError(
|
||||
("Shading Workspace is not the latest version."
|
||||
" Found {}. Latest is {}.").format(version, highest),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
# There were some issues with the editable node not having the right
|
||||
|
|
@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
|
|||
)
|
||||
rop_value = rop.parm("lopoutput").rawValue()
|
||||
if rop_value != value:
|
||||
raise RuntimeError(
|
||||
"Shading Workspace has invalid 'lopoutput'"
|
||||
" parameter value. The Shading Workspace"
|
||||
" needs to be reset to its default values."
|
||||
raise PublishValidationError(
|
||||
("Shading Workspace has invalid 'lopoutput'"
|
||||
" parameter value. The Shading Workspace"
|
||||
" needs to be reset to its default values."),
|
||||
title=self.label
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (VDB)"
|
||||
|
|
@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" "of type VDB!"
|
||||
raise PublishValidationError(
|
||||
self,
|
||||
"Node connected to the output node is not of type VDB",
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
import hou
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder + 0.1
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (VDB)"
|
||||
|
|
@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Node connected to the output node is not" " of type VDB!"
|
||||
raise PublishValidationError(
|
||||
"Node connected to the output node is not" " of type VDB!",
|
||||
title=self.label
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -36,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
|
|||
if node is None:
|
||||
cls.log.error(
|
||||
"SOP path is not correctly set on "
|
||||
"ROP node '%s'." % instance[0].path()
|
||||
"ROP node '%s'." % instance.get("instance_node")
|
||||
)
|
||||
return [instance]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
import hou
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
|
||||
|
||||
class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
|
||||
class ValidateWorkfilePaths(
|
||||
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
|
||||
"""Validate workfile paths so they are absolute."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
|
@ -19,6 +25,8 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
|
|||
prohibited_vars = ["$HIP", "$JOB"]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
invalid = self.get_invalid()
|
||||
self.log.info(
|
||||
"node types to check: {}".format(", ".join(self.node_types)))
|
||||
|
|
@ -30,15 +38,16 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
|
|||
self.log.error(
|
||||
"{}: {}".format(param.path(), param.unexpandedString()))
|
||||
|
||||
raise RuntimeError("Invalid paths found")
|
||||
raise PublishValidationError(
|
||||
"Invalid paths found", title=self.label)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls):
|
||||
invalid = []
|
||||
for param, _ in hou.fileReferences():
|
||||
if param is None:
|
||||
# it might return None for some reason
|
||||
if not param:
|
||||
continue
|
||||
|
||||
# skip nodes we are not interested in
|
||||
if param.node().type().name() not in cls.node_types:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="avalon_menu">
|
||||
<subMenu id="openpype_menu">
|
||||
<label>OpenPype</label>
|
||||
|
||||
<scriptItem id="avalon_create">
|
||||
<scriptItem id="openpype_create">
|
||||
<label>Create...</label>
|
||||
<scriptCode><![CDATA[
|
||||
import hou
|
||||
|
|
@ -14,7 +14,7 @@ host_tools.show_creator(parent)
|
|||
]]></scriptCode>
|
||||
</scriptItem>
|
||||
|
||||
<scriptItem id="avalon_load">
|
||||
<scriptItem id="openpype_load">
|
||||
<label>Load...</label>
|
||||
<scriptCode><![CDATA[
|
||||
import hou
|
||||
|
|
@ -30,11 +30,11 @@ host_tools.show_loader(parent=parent, use_context=True)
|
|||
import hou
|
||||
from openpype.tools.utils import host_tools
|
||||
parent = hou.qt.mainWindow()
|
||||
host_tools.show_publish(parent)
|
||||
host_tools.show_publisher(parent)
|
||||
]]></scriptCode>
|
||||
</scriptItem>
|
||||
|
||||
<scriptItem id="avalon_manage">
|
||||
<scriptItem id="openpype_manage">
|
||||
<label>Manage...</label>
|
||||
<scriptCode><![CDATA[
|
||||
import hou
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""OpenPype startup script."""
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.houdini import api
|
||||
from openpype.hosts.houdini.api import HoudiniHost
|
||||
|
||||
|
||||
def main():
|
||||
print("Installing OpenPype ...")
|
||||
install_host(api)
|
||||
install_host(HoudiniHost())
|
||||
|
||||
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""OpenPype startup script."""
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.houdini import api
|
||||
from openpype.hosts.houdini.api import HoudiniHost
|
||||
|
||||
|
||||
def main():
|
||||
print("Installing OpenPype ...")
|
||||
install_host(api)
|
||||
install_host(HoudiniHost())
|
||||
|
||||
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""OpenPype startup script."""
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.houdini import api
|
||||
from openpype.hosts.houdini.api import HoudiniHost
|
||||
|
||||
|
||||
def main():
|
||||
print("Installing OpenPype ...")
|
||||
install_host(api)
|
||||
install_host(HoudiniHost())
|
||||
|
||||
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -403,13 +403,13 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# history = cmds.listHistory(look_sets)
|
||||
history = []
|
||||
for material in materials:
|
||||
history.extend(cmds.listHistory(material))
|
||||
history.extend(cmds.listHistory(material, ac=True))
|
||||
|
||||
# handle VrayPluginNodeMtl node - see #1397
|
||||
vray_plugin_nodes = cmds.ls(
|
||||
history, type="VRayPluginNodeMtl", long=True)
|
||||
for vray_node in vray_plugin_nodes:
|
||||
history.extend(cmds.listHistory(vray_node))
|
||||
history.extend(cmds.listHistory(vray_node, ac=True))
|
||||
|
||||
# handling render attribute sets
|
||||
render_set_types = [
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def maketx(source, destination, args, logger):
|
|||
|
||||
maketx_path = get_oiio_tools_path("maketx")
|
||||
|
||||
if not os.path.exists(maketx_path):
|
||||
if not maketx_path:
|
||||
print(
|
||||
"OIIO tool not found in {}".format(maketx_path))
|
||||
raise AssertionError("OIIO tool not found")
|
||||
|
|
|
|||
|
|
@ -60,9 +60,10 @@ def find_executable(executable):
|
|||
path to file.
|
||||
|
||||
Returns:
|
||||
str: Full path to executable with extension (is file).
|
||||
None: When the executable was not found.
|
||||
Union[str, None]: Full path to executable with extension which was
|
||||
found otherwise None.
|
||||
"""
|
||||
|
||||
# Skip if passed path is file
|
||||
if is_file_executable(executable):
|
||||
return executable
|
||||
|
|
@ -70,24 +71,36 @@ def find_executable(executable):
|
|||
low_platform = platform.system().lower()
|
||||
_, ext = os.path.splitext(executable)
|
||||
|
||||
# Prepare variants for which it will be looked
|
||||
variants = [executable]
|
||||
# Add other extension variants only if passed executable does not have one
|
||||
if not ext:
|
||||
if low_platform == "windows":
|
||||
exts = [".exe", ".ps1", ".bat"]
|
||||
for ext in os.getenv("PATHEXT", "").split(os.pathsep):
|
||||
ext = ext.lower()
|
||||
if ext and ext not in exts:
|
||||
exts.append(ext)
|
||||
else:
|
||||
exts = [".sh"]
|
||||
# Prepare extensions to check
|
||||
exts = set()
|
||||
if ext:
|
||||
exts.add(ext.lower())
|
||||
|
||||
for ext in exts:
|
||||
variant = executable + ext
|
||||
if is_file_executable(variant):
|
||||
return variant
|
||||
variants.append(variant)
|
||||
else:
|
||||
# Add other possible extension variants only if passed executable
|
||||
# does not have any
|
||||
if low_platform == "windows":
|
||||
exts |= {".exe", ".ps1", ".bat"}
|
||||
for ext in os.getenv("PATHEXT", "").split(os.pathsep):
|
||||
exts.add(ext.lower())
|
||||
|
||||
else:
|
||||
exts |= {".sh"}
|
||||
|
||||
# Executable is a path but there may be missing extension
|
||||
# - this can happen primarily on windows where
|
||||
# e.g. "ffmpeg" should be "ffmpeg.exe"
|
||||
exe_dir, exe_filename = os.path.split(executable)
|
||||
if exe_dir and os.path.isdir(exe_dir):
|
||||
for filename in os.listdir(exe_dir):
|
||||
filepath = os.path.join(exe_dir, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if (
|
||||
basename == exe_filename
|
||||
and ext.lower() in exts
|
||||
and is_file_executable(filepath)
|
||||
):
|
||||
return filepath
|
||||
|
||||
# Get paths where to look for executable
|
||||
path_str = os.environ.get("PATH", None)
|
||||
|
|
@ -97,13 +110,27 @@ def find_executable(executable):
|
|||
elif hasattr(os, "defpath"):
|
||||
path_str = os.defpath
|
||||
|
||||
if path_str:
|
||||
paths = path_str.split(os.pathsep)
|
||||
for path in paths:
|
||||
for variant in variants:
|
||||
filepath = os.path.abspath(os.path.join(path, variant))
|
||||
if is_file_executable(filepath):
|
||||
return filepath
|
||||
if not path_str:
|
||||
return None
|
||||
|
||||
paths = path_str.split(os.pathsep)
|
||||
for path in paths:
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
for filename in os.listdir(path):
|
||||
filepath = os.path.abspath(os.path.join(path, filename))
|
||||
# Filename matches executable exactly
|
||||
if filename == executable and is_file_executable(filepath):
|
||||
return filepath
|
||||
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if (
|
||||
basename == executable
|
||||
and ext.lower() in exts
|
||||
and is_file_executable(filepath)
|
||||
):
|
||||
return filepath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -272,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"):
|
|||
oiio_dir = get_vendor_bin_path("oiio")
|
||||
if platform.system().lower() == "linux":
|
||||
oiio_dir = os.path.join(oiio_dir, "bin")
|
||||
default_path = os.path.join(oiio_dir, tool)
|
||||
if _oiio_executable_validation(default_path):
|
||||
default_path = find_executable(os.path.join(oiio_dir, tool))
|
||||
if default_path and _oiio_executable_validation(default_path):
|
||||
tool_executable_path = default_path
|
||||
|
||||
# Look to PATH for the tool
|
||||
|
|
|
|||
|
|
@ -14,6 +14,137 @@ from Deadline.Scripting import (
|
|||
ProcessUtils,
|
||||
)
|
||||
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
r"\.(?P<patch>0|[1-9]\d*)"
|
||||
r"(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?"
|
||||
r"(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?"
|
||||
)
|
||||
|
||||
|
||||
class OpenPypeVersion:
|
||||
"""Fake semver version class for OpenPype version purposes.
|
||||
|
||||
The version
|
||||
"""
|
||||
def __init__(self, major, minor, patch, prerelease, origin=None):
|
||||
self.major = major
|
||||
self.minor = minor
|
||||
self.patch = patch
|
||||
self.prerelease = prerelease
|
||||
|
||||
is_valid = True
|
||||
if not major or not minor or not patch:
|
||||
is_valid = False
|
||||
self.is_valid = is_valid
|
||||
|
||||
if origin is None:
|
||||
base = "{}.{}.{}".format(str(major), str(minor), str(patch))
|
||||
if not prerelease:
|
||||
origin = base
|
||||
else:
|
||||
origin = "{}-{}".format(base, str(prerelease))
|
||||
|
||||
self.origin = origin
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, version):
|
||||
"""Create an object of version from string.
|
||||
|
||||
Args:
|
||||
version (str): Version as a string.
|
||||
|
||||
Returns:
|
||||
Union[OpenPypeVersion, None]: Version object if input is nonempty
|
||||
string otherwise None.
|
||||
"""
|
||||
|
||||
if not version:
|
||||
return None
|
||||
valid_parts = VERSION_REGEX.findall(version)
|
||||
if len(valid_parts) != 1:
|
||||
# Return invalid version with filled 'origin' attribute
|
||||
return cls(None, None, None, None, origin=str(version))
|
||||
|
||||
# Unpack found version
|
||||
major, minor, patch, pre, post = valid_parts[0]
|
||||
prerelease = pre
|
||||
# Post release is not important anymore and should be considered as
|
||||
# part of prerelease
|
||||
# - comparison is implemented to find suitable build and builds should
|
||||
# never contain prerelease part so "not proper" parsing is
|
||||
# acceptable for this use case.
|
||||
if post:
|
||||
prerelease = "{}+{}".format(pre, post)
|
||||
|
||||
return cls(
|
||||
int(major), int(minor), int(patch), prerelease, origin=version
|
||||
)
|
||||
|
||||
def has_compatible_release(self, other):
|
||||
"""Version has compatible release as other version.
|
||||
|
||||
Both major and minor versions must be exactly the same. In that case
|
||||
a build can be considered as release compatible with any version.
|
||||
|
||||
Args:
|
||||
other (OpenPypeVersion): Other version.
|
||||
|
||||
Returns:
|
||||
bool: Version is release compatible with other version.
|
||||
"""
|
||||
|
||||
if self.is_valid and other.is_valid:
|
||||
return self.major == other.major and self.minor == other.minor
|
||||
return False
|
||||
|
||||
def __bool__(self):
|
||||
return self.is_valid
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {}>".format(self.__class__.__name__, self.origin)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return self.origin == other
|
||||
return self.origin == other.origin
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return None
|
||||
|
||||
if not self.is_valid:
|
||||
return True
|
||||
|
||||
if not other.is_valid:
|
||||
return False
|
||||
|
||||
if self.origin == other.origin:
|
||||
return None
|
||||
|
||||
same_major = self.major == other.major
|
||||
if not same_major:
|
||||
return self.major < other.major
|
||||
|
||||
same_minor = self.minor == other.minor
|
||||
if not same_minor:
|
||||
return self.minor < other.minor
|
||||
|
||||
same_patch = self.patch == other.patch
|
||||
if not same_patch:
|
||||
return self.patch < other.patch
|
||||
|
||||
if not self.prerelease:
|
||||
return False
|
||||
|
||||
if not other.prerelease:
|
||||
return True
|
||||
|
||||
pres = [self.prerelease, other.prerelease]
|
||||
pres.sort()
|
||||
return pres[0] == self.prerelease
|
||||
|
||||
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
"""Get OpenPype version from provided path.
|
||||
|
|
@ -21,9 +152,9 @@ def get_openpype_version_from_path(path, build=True):
|
|||
build (bool, optional): Get only builds, not sources
|
||||
|
||||
Returns:
|
||||
str or None: version of OpenPype if found.
|
||||
|
||||
Union[OpenPypeVersion, None]: version of OpenPype if found.
|
||||
"""
|
||||
|
||||
# fix path for application bundle on macos
|
||||
if platform.system().lower() == "darwin":
|
||||
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
|
||||
|
|
@ -46,8 +177,10 @@ def get_openpype_version_from_path(path, build=True):
|
|||
with open(version_file, "r") as vf:
|
||||
exec(vf.read(), version)
|
||||
|
||||
version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"])
|
||||
return version_match[1]
|
||||
version_str = version.get("__version__")
|
||||
if version_str:
|
||||
return OpenPypeVersion.from_string(version_str)
|
||||
return None
|
||||
|
||||
|
||||
def get_openpype_executable():
|
||||
|
|
@ -59,6 +192,91 @@ def get_openpype_executable():
|
|||
return exe_list, dir_list
|
||||
|
||||
|
||||
def get_openpype_versions(dir_list):
|
||||
print(">>> Getting OpenPype executable ...")
|
||||
openpype_versions = []
|
||||
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print("--- Looking for OpenPype at: {}".format(install_dir))
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(" - found: {} - {}".format(version, subdir))
|
||||
openpype_versions.append((version, subdir))
|
||||
return openpype_versions
|
||||
|
||||
|
||||
def get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
):
|
||||
requested_version_obj = OpenPypeVersion.from_string(requested_version)
|
||||
if not requested_version_obj:
|
||||
print((
|
||||
">>> Requested version does not match version regex \"{}\""
|
||||
).format(VERSION_REGEX))
|
||||
return None
|
||||
|
||||
print((
|
||||
">>> Scanning for compatible requested version {}"
|
||||
).format(requested_version))
|
||||
openpype_versions = get_openpype_versions(dir_list)
|
||||
if not openpype_versions:
|
||||
return None
|
||||
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
if exe:
|
||||
exe_dir = os.path.dirname(exe)
|
||||
print("Looking for OpenPype at: {}".format(exe_dir))
|
||||
version = get_openpype_version_from_path(exe_dir)
|
||||
if version:
|
||||
print(" - found: {} - {}".format(version, exe_dir))
|
||||
openpype_versions.append((version, exe_dir))
|
||||
|
||||
matching_item = None
|
||||
compatible_versions = []
|
||||
for version_item in openpype_versions:
|
||||
version, version_dir = version_item
|
||||
if requested_version_obj.has_compatible_release(version):
|
||||
compatible_versions.append(version_item)
|
||||
if version == requested_version_obj:
|
||||
# Store version item if version match exactly
|
||||
# - break if is found matching version
|
||||
matching_item = version_item
|
||||
break
|
||||
|
||||
if not compatible_versions:
|
||||
return None
|
||||
|
||||
compatible_versions.sort(key=lambda item: item[0])
|
||||
if matching_item:
|
||||
version, version_dir = matching_item
|
||||
print((
|
||||
"*** Found exact match build version {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
else:
|
||||
version, version_dir = compatible_versions[-1]
|
||||
|
||||
print((
|
||||
"*** Latest compatible version found is {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(version_dir, "openpype_console.exe"),
|
||||
os.path.join(version_dir, "openpype_console")
|
||||
]
|
||||
return FileUtils.SearchFileList(";".join(exe_list))
|
||||
|
||||
|
||||
def inject_openpype_environment(deadlinePlugin):
|
||||
""" Pull env vars from OpenPype and push them to rendering process.
|
||||
|
||||
|
|
@ -68,93 +286,29 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
|
||||
print(">>> Injecting OpenPype environments ...")
|
||||
try:
|
||||
print(">>> Getting OpenPype executable ...")
|
||||
exe_list, dir_list = get_openpype_executable()
|
||||
openpype_versions = []
|
||||
# if the job requires specific OpenPype version,
|
||||
# lets go over all available and find compatible build.
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
print((
|
||||
">>> Scanning for compatible requested version {}"
|
||||
).format(requested_version))
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print("--- Looking for OpenPype at: {}".format(install_dir))
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(" - found: {} - {}".format(version, subdir))
|
||||
openpype_versions.append((version, subdir))
|
||||
exe = get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
)
|
||||
if exe is None:
|
||||
raise RuntimeError((
|
||||
"Cannot find compatible version available for version {}"
|
||||
" requested by the job. Please add it through plugin"
|
||||
" configuration in Deadline or install it to configured"
|
||||
" directory."
|
||||
).format(requested_version))
|
||||
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
if openpype_versions:
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
print("Looking for OpenPype at: {}".format(os.path.dirname(exe)))
|
||||
version = get_openpype_version_from_path(
|
||||
os.path.dirname(exe))
|
||||
if version:
|
||||
print(" - found: {} - {}".format(
|
||||
version, os.path.dirname(exe)
|
||||
))
|
||||
openpype_versions.append((version, os.path.dirname(exe)))
|
||||
|
||||
if requested_version:
|
||||
# sort detected versions
|
||||
if openpype_versions:
|
||||
# use natural sorting
|
||||
openpype_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print((
|
||||
"*** Latest available version found is {}"
|
||||
).format(openpype_versions[-1][0]))
|
||||
requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501
|
||||
compatible_versions = []
|
||||
for version in openpype_versions:
|
||||
v = version[0].split(".")[:3]
|
||||
if v[0] == requested_major and v[1] == requested_minor:
|
||||
compatible_versions.append(version)
|
||||
if not compatible_versions:
|
||||
raise RuntimeError(
|
||||
("Cannot find compatible version available "
|
||||
"for version {} requested by the job. "
|
||||
"Please add it through plugin configuration "
|
||||
"in Deadline or install it to configured "
|
||||
"directory.").format(requested_version))
|
||||
# sort compatible versions nad pick the last one
|
||||
compatible_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print((
|
||||
"*** Latest compatible version found is {}"
|
||||
).format(compatible_versions[-1][0]))
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console.exe"),
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console")
|
||||
]
|
||||
exe = FileUtils.SearchFileList(";".join(exe_list))
|
||||
if exe == "":
|
||||
raise RuntimeError(
|
||||
"OpenPype executable was not found " +
|
||||
"in the semicolon separated list " +
|
||||
"\"" + ";".join(exe_list) + "\". " +
|
||||
"The path to the render executable can be configured " +
|
||||
"from the Plugin Configuration in the Deadline Monitor.")
|
||||
if not exe:
|
||||
raise RuntimeError((
|
||||
"OpenPype executable was not found in the semicolon "
|
||||
"separated list \"{}\"."
|
||||
"The path to the render executable can be configured"
|
||||
" from the Plugin Configuration in the Deadline Monitor."
|
||||
).format(";".join(exe_list)))
|
||||
|
||||
print("--- OpenPype executable: {}".format(exe))
|
||||
|
||||
|
|
@ -172,22 +326,22 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
export_url
|
||||
]
|
||||
|
||||
add_args = {}
|
||||
add_args['project'] = \
|
||||
job.GetJobEnvironmentKeyValue('AVALON_PROJECT')
|
||||
add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET')
|
||||
add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK')
|
||||
add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME')
|
||||
add_args["envgroup"] = "farm"
|
||||
add_kwargs = {
|
||||
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
|
||||
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
|
||||
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
|
||||
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
|
||||
"envgroup": "farm"
|
||||
}
|
||||
if all(add_kwargs.values()):
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
|
||||
if all(add_args.values()):
|
||||
for key, value in add_args.items():
|
||||
args.append("--{}".format(key))
|
||||
args.append(value)
|
||||
else:
|
||||
msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \
|
||||
"AVALON_TASK, AVALON_APP_NAME"
|
||||
raise RuntimeError(msg)
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
|
||||
" AVALON_TASK, AVALON_APP_NAME"
|
||||
))
|
||||
|
||||
if not os.environ.get("OPENPYPE_MONGO"):
|
||||
print(">>> Missing OPENPYPE_MONGO env var, process won't work")
|
||||
|
|
@ -208,12 +362,12 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
contents = json.load(fp)
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
|
|
|||
|
|
@ -7,10 +7,8 @@ import pyblish.api
|
|||
|
||||
from openpype.client import get_asset_by_id
|
||||
from openpype.lib import filter_profiles
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
||||
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
|
||||
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
|
||||
CUST_ATTR_GROUP = "openpype"
|
||||
|
||||
|
||||
|
|
@ -19,7 +17,6 @@ CUST_ATTR_GROUP = "openpype"
|
|||
def get_pype_attr(session, split_hierarchical=True):
|
||||
custom_attributes = []
|
||||
hier_custom_attributes = []
|
||||
# TODO remove deprecated "avalon" group from query
|
||||
cust_attrs_query = (
|
||||
"select id, entity_type, object_type_id, is_hierarchical, default"
|
||||
" from CustomAttributeConfiguration"
|
||||
|
|
@ -79,120 +76,284 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
create_task_status_profiles = []
|
||||
|
||||
def process(self, context):
|
||||
self.context = context
|
||||
if "hierarchyContext" not in self.context.data:
|
||||
if "hierarchyContext" not in context.data:
|
||||
return
|
||||
|
||||
hierarchy_context = self._get_active_assets(context)
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
|
||||
session = self.context.data["ftrackSession"]
|
||||
project_name = self.context.data["projectEntity"]["name"]
|
||||
query = 'Project where full_name is "{}"'.format(project_name)
|
||||
project = session.query(query).one()
|
||||
auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC]
|
||||
session = context.data["ftrackSession"]
|
||||
project_name = context.data["projectName"]
|
||||
project = session.query(
|
||||
'select id, full_name from Project where full_name is "{}"'.format(
|
||||
project_name
|
||||
)
|
||||
).first()
|
||||
if not project:
|
||||
raise KnownPublishError(
|
||||
"Project \"{}\" was not found on ftrack.".format(project_name)
|
||||
)
|
||||
|
||||
self.context = context
|
||||
self.session = session
|
||||
self.ft_project = project
|
||||
self.task_types = self.get_all_task_types(project)
|
||||
self.task_statuses = self.get_task_statuses(project)
|
||||
|
||||
# disable termporarily ftrack project's autosyncing
|
||||
if auto_sync_state:
|
||||
self.auto_sync_off(project)
|
||||
# import ftrack hierarchy
|
||||
self.import_to_ftrack(project_name, hierarchy_context)
|
||||
|
||||
try:
|
||||
# import ftrack hierarchy
|
||||
self.import_to_ftrack(project_name, hierarchy_context)
|
||||
except Exception:
|
||||
raise
|
||||
finally:
|
||||
if auto_sync_state:
|
||||
self.auto_sync_on(project)
|
||||
def query_ftrack_entitites(self, session, ft_project):
|
||||
project_id = ft_project["id"]
|
||||
entities = session.query((
|
||||
"select id, name, parent_id"
|
||||
" from TypedContext where project_id is \"{}\""
|
||||
).format(project_id)).all()
|
||||
|
||||
def import_to_ftrack(self, project_name, input_data, parent=None):
|
||||
entities_by_id = {}
|
||||
entities_by_parent_id = collections.defaultdict(list)
|
||||
for entity in entities:
|
||||
entities_by_id[entity["id"]] = entity
|
||||
parent_id = entity["parent_id"]
|
||||
entities_by_parent_id[parent_id].append(entity)
|
||||
|
||||
ftrack_hierarchy = []
|
||||
ftrack_id_queue = collections.deque()
|
||||
ftrack_id_queue.append((project_id, ftrack_hierarchy))
|
||||
while ftrack_id_queue:
|
||||
item = ftrack_id_queue.popleft()
|
||||
ftrack_id, parent_list = item
|
||||
if ftrack_id == project_id:
|
||||
entity = ft_project
|
||||
name = entity["full_name"]
|
||||
else:
|
||||
entity = entities_by_id[ftrack_id]
|
||||
name = entity["name"]
|
||||
|
||||
children = []
|
||||
parent_list.append({
|
||||
"name": name,
|
||||
"low_name": name.lower(),
|
||||
"entity": entity,
|
||||
"children": children,
|
||||
})
|
||||
for child in entities_by_parent_id[ftrack_id]:
|
||||
ftrack_id_queue.append((child["id"], children))
|
||||
return ftrack_hierarchy
|
||||
|
||||
def find_matching_ftrack_entities(
|
||||
self, hierarchy_context, ftrack_hierarchy
|
||||
):
|
||||
walk_queue = collections.deque()
|
||||
for entity_name, entity_data in hierarchy_context.items():
|
||||
walk_queue.append(
|
||||
(entity_name, entity_data, ftrack_hierarchy)
|
||||
)
|
||||
|
||||
matching_ftrack_entities = []
|
||||
while walk_queue:
|
||||
item = walk_queue.popleft()
|
||||
entity_name, entity_data, ft_children = item
|
||||
matching_ft_child = None
|
||||
for ft_child in ft_children:
|
||||
if ft_child["low_name"] == entity_name.lower():
|
||||
matching_ft_child = ft_child
|
||||
break
|
||||
|
||||
if matching_ft_child is None:
|
||||
continue
|
||||
|
||||
entity = matching_ft_child["entity"]
|
||||
entity_data["ft_entity"] = entity
|
||||
matching_ftrack_entities.append(entity)
|
||||
|
||||
hierarchy_children = entity_data.get("childs")
|
||||
if not hierarchy_children:
|
||||
continue
|
||||
|
||||
for child_name, child_data in hierarchy_children.items():
|
||||
walk_queue.append(
|
||||
(child_name, child_data, matching_ft_child["children"])
|
||||
)
|
||||
return matching_ftrack_entities
|
||||
|
||||
def query_custom_attribute_values(self, session, entities, hier_attrs):
|
||||
attr_ids = {
|
||||
attr["id"]
|
||||
for attr in hier_attrs
|
||||
}
|
||||
entity_ids = {
|
||||
entity["id"]
|
||||
for entity in entities
|
||||
}
|
||||
output = {
|
||||
entity_id: {}
|
||||
for entity_id in entity_ids
|
||||
}
|
||||
if not attr_ids or not entity_ids:
|
||||
return {}
|
||||
|
||||
joined_attr_ids = ",".join(
|
||||
['"{}"'.format(attr_id) for attr_id in attr_ids]
|
||||
)
|
||||
|
||||
# Query values in chunks
|
||||
chunk_size = int(5000 / len(attr_ids))
|
||||
# Make sure entity_ids is `list` for chunk selection
|
||||
entity_ids = list(entity_ids)
|
||||
results = []
|
||||
for idx in range(0, len(entity_ids), chunk_size):
|
||||
joined_entity_ids = ",".join([
|
||||
'"{}"'.format(entity_id)
|
||||
for entity_id in entity_ids[idx:idx + chunk_size]
|
||||
])
|
||||
results.extend(
|
||||
session.query(
|
||||
(
|
||||
"select value, entity_id, configuration_id"
|
||||
" from CustomAttributeValue"
|
||||
" where entity_id in ({}) and configuration_id in ({})"
|
||||
).format(
|
||||
joined_entity_ids,
|
||||
joined_attr_ids
|
||||
)
|
||||
).all()
|
||||
)
|
||||
|
||||
for result in results:
|
||||
attr_id = result["configuration_id"]
|
||||
entity_id = result["entity_id"]
|
||||
output[entity_id][attr_id] = result["value"]
|
||||
|
||||
return output
|
||||
|
||||
def import_to_ftrack(self, project_name, hierarchy_context):
|
||||
# Prequery hiearchical custom attributes
|
||||
hier_custom_attributes = get_pype_attr(self.session)[1]
|
||||
hier_attrs = get_pype_attr(self.session)[1]
|
||||
hier_attr_by_key = {
|
||||
attr["key"]: attr
|
||||
for attr in hier_custom_attributes
|
||||
for attr in hier_attrs
|
||||
}
|
||||
# Query user entity (for comments)
|
||||
user = self.session.query(
|
||||
"User where username is \"{}\"".format(self.session.api_user)
|
||||
).first()
|
||||
if not user:
|
||||
self.log.warning(
|
||||
"Was not able to query current User {}".format(
|
||||
self.session.api_user
|
||||
)
|
||||
)
|
||||
|
||||
# Query ftrack hierarchy with parenting
|
||||
ftrack_hierarchy = self.query_ftrack_entitites(
|
||||
self.session, self.ft_project)
|
||||
|
||||
# Fill ftrack entities to hierarchy context
|
||||
# - there is no need to query entities again
|
||||
matching_entities = self.find_matching_ftrack_entities(
|
||||
hierarchy_context, ftrack_hierarchy)
|
||||
# Query custom attribute values of each entity
|
||||
custom_attr_values_by_id = self.query_custom_attribute_values(
|
||||
self.session, matching_entities, hier_attrs)
|
||||
|
||||
# Get ftrack api module (as they are different per python version)
|
||||
ftrack_api = self.context.data["ftrackPythonModule"]
|
||||
|
||||
for entity_name in input_data:
|
||||
entity_data = input_data[entity_name]
|
||||
# Use queue of hierarchy items to process
|
||||
import_queue = collections.deque()
|
||||
for entity_name, entity_data in hierarchy_context.items():
|
||||
import_queue.append(
|
||||
(entity_name, entity_data, None)
|
||||
)
|
||||
|
||||
while import_queue:
|
||||
item = import_queue.popleft()
|
||||
entity_name, entity_data, parent = item
|
||||
|
||||
entity_type = entity_data['entity_type']
|
||||
self.log.debug(entity_data)
|
||||
self.log.debug(entity_type)
|
||||
|
||||
if entity_type.lower() == 'project':
|
||||
entity = self.ft_project
|
||||
|
||||
elif self.ft_project is None or parent is None:
|
||||
entity = entity_data.get("ft_entity")
|
||||
if entity is None and entity_type.lower() == "project":
|
||||
raise AssertionError(
|
||||
"Collected items are not in right order!"
|
||||
)
|
||||
|
||||
# try to find if entity already exists
|
||||
else:
|
||||
query = (
|
||||
'TypedContext where name is "{0}" and '
|
||||
'project_id is "{1}"'
|
||||
).format(entity_name, self.ft_project["id"])
|
||||
try:
|
||||
entity = self.session.query(query).one()
|
||||
except Exception:
|
||||
entity = None
|
||||
|
||||
# Create entity if not exists
|
||||
if entity is None:
|
||||
entity = self.create_entity(
|
||||
name=entity_name,
|
||||
type=entity_type,
|
||||
parent=parent
|
||||
)
|
||||
entity = self.session.create(entity_type, {
|
||||
"name": entity_name,
|
||||
"parent": parent
|
||||
})
|
||||
entity_data["ft_entity"] = entity
|
||||
|
||||
# self.log.info('entity: {}'.format(dict(entity)))
|
||||
# CUSTOM ATTRIBUTES
|
||||
custom_attributes = entity_data.get('custom_attributes', [])
|
||||
instances = [
|
||||
instance
|
||||
for instance in self.context
|
||||
if instance.data.get("asset") == entity["name"]
|
||||
]
|
||||
custom_attributes = entity_data.get('custom_attributes', {})
|
||||
instances = []
|
||||
for instance in self.context:
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
if (
|
||||
instance_asset_name
|
||||
and instance_asset_name.lower() == entity["name"].lower()
|
||||
):
|
||||
instances.append(instance)
|
||||
|
||||
for instance in instances:
|
||||
instance.data["ftrackEntity"] = entity
|
||||
|
||||
for key in custom_attributes:
|
||||
for key, cust_attr_value in custom_attributes.items():
|
||||
if cust_attr_value is None:
|
||||
continue
|
||||
|
||||
hier_attr = hier_attr_by_key.get(key)
|
||||
# Use simple method if key is not hierarchical
|
||||
if not hier_attr:
|
||||
assert (key in entity['custom_attributes']), (
|
||||
'Missing custom attribute key: `{0}` in attrs: '
|
||||
'`{1}`'.format(key, entity['custom_attributes'].keys())
|
||||
if key not in entity["custom_attributes"]:
|
||||
raise KnownPublishError((
|
||||
"Missing custom attribute in ftrack with name '{}'"
|
||||
).format(key))
|
||||
|
||||
entity['custom_attributes'][key] = cust_attr_value
|
||||
continue
|
||||
|
||||
attr_id = hier_attr["id"]
|
||||
entity_values = custom_attr_values_by_id.get(entity["id"], {})
|
||||
# New value is defined by having id in values
|
||||
# - it can be set to 'None' (ftrack allows that using API)
|
||||
is_new_value = attr_id not in entity_values
|
||||
attr_value = entity_values.get(attr_id)
|
||||
|
||||
# Use ftrack operations method to set hiearchical
|
||||
# attribute value.
|
||||
# - this is because there may be non hiearchical custom
|
||||
# attributes with different properties
|
||||
entity_key = collections.OrderedDict((
|
||||
("configuration_id", hier_attr["id"]),
|
||||
("entity_id", entity["id"])
|
||||
))
|
||||
op = None
|
||||
if is_new_value:
|
||||
op = ftrack_api.operation.CreateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
entity_key,
|
||||
{"value": cust_attr_value}
|
||||
)
|
||||
|
||||
entity['custom_attributes'][key] = custom_attributes[key]
|
||||
|
||||
else:
|
||||
# Use ftrack operations method to set hiearchical
|
||||
# attribute value.
|
||||
# - this is because there may be non hiearchical custom
|
||||
# attributes with different properties
|
||||
entity_key = collections.OrderedDict()
|
||||
entity_key["configuration_id"] = hier_attr["id"]
|
||||
entity_key["entity_id"] = entity["id"]
|
||||
self.session.recorded_operations.push(
|
||||
ftrack_api.operation.UpdateEntityOperation(
|
||||
"ContextCustomAttributeValue",
|
||||
entity_key,
|
||||
"value",
|
||||
ftrack_api.symbol.NOT_SET,
|
||||
custom_attributes[key]
|
||||
)
|
||||
elif attr_value != cust_attr_value:
|
||||
op = ftrack_api.operation.UpdateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
entity_key,
|
||||
"value",
|
||||
attr_value,
|
||||
cust_attr_value
|
||||
)
|
||||
|
||||
if op is not None:
|
||||
self.session.recorded_operations.push(op)
|
||||
|
||||
if self.session.recorded_operations:
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
|
|
@ -206,7 +367,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
for instance in instances:
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
instances_by_task_name[task_name].append(instance)
|
||||
instances_by_task_name[task_name.lower()].append(instance)
|
||||
|
||||
tasks = entity_data.get('tasks', [])
|
||||
existing_tasks = []
|
||||
|
|
@ -247,30 +408,28 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
six.reraise(tp, value, tb)
|
||||
|
||||
# Create notes.
|
||||
user = self.session.query(
|
||||
"User where username is \"{}\"".format(self.session.api_user)
|
||||
).first()
|
||||
if user:
|
||||
for comment in entity_data.get("comments", []):
|
||||
entity_comments = entity_data.get("comments")
|
||||
if user and entity_comments:
|
||||
for comment in entity_comments:
|
||||
entity.create_note(comment, user)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Was not able to query current User {}".format(
|
||||
self.session.api_user
|
||||
)
|
||||
)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Import children.
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_ftrack(
|
||||
project_name, entity_data['childs'], entity)
|
||||
children = entity_data.get("childs")
|
||||
if not children:
|
||||
continue
|
||||
|
||||
for entity_name, entity_data in children.items():
|
||||
import_queue.append(
|
||||
(entity_name, entity_data, entity)
|
||||
)
|
||||
|
||||
def create_links(self, project_name, entity_data, entity):
|
||||
# Clear existing links.
|
||||
|
|
@ -366,48 +525,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
return task
|
||||
|
||||
def create_entity(self, name, type, parent):
|
||||
entity = self.session.create(type, {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return entity
|
||||
|
||||
def auto_sync_off(self, project):
|
||||
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False
|
||||
|
||||
self.log.info("Ftrack autosync swithed off")
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def auto_sync_on(self, project):
|
||||
|
||||
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True
|
||||
|
||||
self.log.info("Ftrack autosync swithed on")
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def _get_active_assets(self, context):
|
||||
""" Returns only asset dictionary.
|
||||
Usually the last part of deep dictionary which
|
||||
|
|
@ -429,19 +546,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = context.data["hierarchyContext"]
|
||||
|
||||
active_assets = []
|
||||
active_assets = set()
|
||||
# filter only the active publishing insatnces
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
if not instance.data.get("asset"):
|
||||
continue
|
||||
|
||||
active_assets.append(instance.data["asset"])
|
||||
asset_name = instance.data.get("asset")
|
||||
if asset_name:
|
||||
active_assets.add(asset_name)
|
||||
|
||||
# remove duplicity in list
|
||||
active_assets = list(set(active_assets))
|
||||
self.log.debug("__ active_assets: {}".format(active_assets))
|
||||
self.log.debug("__ active_assets: {}".format(list(active_assets)))
|
||||
|
||||
return get_pure_hierarchy_data(hierarchy_context)
|
||||
|
|
|
|||
|
|
@ -199,7 +199,7 @@ class InstanceMember:
|
|||
})
|
||||
|
||||
|
||||
class AttributeValues:
|
||||
class AttributeValues(object):
|
||||
"""Container which keep values of Attribute definitions.
|
||||
|
||||
Goal is to have one object which hold values of attribute definitions for
|
||||
|
|
@ -584,6 +584,7 @@ class CreatedInstance:
|
|||
if key in data:
|
||||
data.pop(key)
|
||||
|
||||
self._data["variant"] = self._data.get("variant") or ""
|
||||
# Stored creator specific attribute values
|
||||
# {key: value}
|
||||
creator_values = copy.deepcopy(orig_creator_attributes)
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
for subset_doc in subset_docs:
|
||||
subset_id = subset_doc["_id"]
|
||||
last_version_doc = last_version_docs_by_subset_id.get(subset_id)
|
||||
if last_version_docs_by_subset_id is None:
|
||||
if last_version_doc is None:
|
||||
continue
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class CreatorsModel(QtGui.QStandardItemModel):
|
|||
if not items:
|
||||
item = QtGui.QStandardItem("No registered families")
|
||||
item.setEnabled(False)
|
||||
item.setData(QtCore.Qt.ItemIsEnabled, False)
|
||||
item.setData(False, QtCore.Qt.ItemIsEnabled)
|
||||
items.append(item)
|
||||
|
||||
self.invisibleRootItem().appendRows(items)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue