mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 13:52:15 +01:00
add pre-launch hook, minor fixes
This commit is contained in:
parent
fdd6ab3a95
commit
e19e4f58ec
15 changed files with 48 additions and 196 deletions
|
|
@ -14,10 +14,10 @@ class OpenPypeCreatorError(Exception):
|
|||
|
||||
class Creator(PypeCreatorMixin, houdini.Creator):
|
||||
def process(self):
|
||||
instance = super(houdini.Creator, self).process()
|
||||
# re-raise as standard Python exception so
|
||||
# Avalon can catch it
|
||||
try:
|
||||
# re-raise as standard Python exception so
|
||||
# Avalon can catch it
|
||||
instance = super(Creator, self).process()
|
||||
self._process(instance)
|
||||
except hou.Error as er:
|
||||
six.reraise(OpenPypeCreatorError, OpenPypeCreatorError("Creator error"), sys.exc_info()[2])
|
||||
|
|
|
|||
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
18
openpype/hosts/houdini/hooks/set_paths.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
"""Set current dir to workdir.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["houdini"]
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
||||
os.chdir(workdir)
|
||||
|
|
@ -33,7 +33,6 @@ class CreateAlembicCamera(plugin.Creator):
|
|||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
path = node.path()
|
||||
|
||||
# Split the node path into the first root and the remainder
|
||||
# So we can set the root and objects parameters correctly
|
||||
_, root, remainder = path.split("/", 2)
|
||||
|
|
|
|||
|
|
@ -35,5 +35,10 @@ class CreateCompositeSequence(plugin.Creator):
|
|||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
try:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
except AttributeError:
|
||||
# missing lock pattern
|
||||
self.log.debug(
|
||||
"missing lock pattern {}".format(name))
|
||||
|
|
|
|||
|
|
@ -38,9 +38,13 @@ class CreateRedshiftROP(plugin.Creator):
|
|||
instance.setName(basename + "_ROP", unique_name=True)
|
||||
|
||||
# Also create the linked Redshift IPR Rop
|
||||
ipr_rop = self.parent.createNode(
|
||||
"Redshift_IPR", node_name=basename + "_IPR"
|
||||
)
|
||||
try:
|
||||
ipr_rop = self.parent.createNode(
|
||||
"Redshift_IPR", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise Exception(("Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"))
|
||||
|
||||
# Move it to directly under the Redshift ROP
|
||||
ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1))
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
from openpype.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class CreateRemotePublish(plugin.Creator):
|
||||
"""Create Remote Publish Submission Settings node."""
|
||||
|
||||
label = "Remote Publish"
|
||||
family = "remotePublish"
|
||||
icon = "cloud-upload"
|
||||
|
||||
def _process(self, instance):
|
||||
"""This is a stub creator process.
|
||||
|
||||
This does not create a regular instance that the instance collector
|
||||
picks up. Instead we force this one to solely create something we
|
||||
explicitly want to create. The only reason this class is here is so
|
||||
that Artists can also create the node through the Avalon creator.
|
||||
|
||||
"""
|
||||
lib.create_remote_publish_node(force=True)
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import re
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
||||
class CreateUSDModel(plugin.Creator):
|
||||
"""Author USD Model"""
|
||||
|
||||
label = "USD Model"
|
||||
family = "usdModel"
|
||||
icon = "gears"
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
node_type = "op::author_model:1.0"
|
||||
|
||||
subset = self.data["subset"]
|
||||
name = "author_{}".format(subset)
|
||||
variant = re.match("usdModel(.*)", subset).group(1)
|
||||
|
||||
# Get stage root and create node
|
||||
stage = hou.node("/stage")
|
||||
print("creating node {}/{}".format(node_type, name))
|
||||
instance = stage.createNode(node_type, node_name=name)
|
||||
instance.moveToGoodPosition(move_unconnected=True)
|
||||
|
||||
parms = {"asset_name": self.data["asset"], "variant_name": variant}
|
||||
|
||||
# Set the Geo Path to the first selected node (if any)
|
||||
selection = hou.selectedNodes()
|
||||
if selection:
|
||||
node = selection[0]
|
||||
parms["geo_path"] = node.path()
|
||||
|
||||
instance.setParms(parms)
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
from openpype.hosts.houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
||||
class _USDWorkspace(plugin.Creator):
|
||||
"""Base class to create pre-built USD Workspaces"""
|
||||
|
||||
node_name = None
|
||||
node_type = None
|
||||
step = None
|
||||
icon = "gears"
|
||||
|
||||
def _process(self, instance):
|
||||
"""Creator main entry point.
|
||||
|
||||
Args:
|
||||
instance (hou.Node): Created Houdini instance.
|
||||
|
||||
"""
|
||||
if not all([self.node_type, self.node_name, self.step]):
|
||||
self.log.error("Incomplete USD Workspace parameters")
|
||||
return
|
||||
|
||||
name = self.node_name
|
||||
node_type = self.node_type
|
||||
|
||||
# Force the subset to "{asset}.{step}.usd"
|
||||
subset = "usd{step}".format(step=self.step)
|
||||
self.data["subset"] = subset
|
||||
|
||||
# Get stage root and create node
|
||||
stage = hou.node("/stage")
|
||||
instance = stage.createNode(node_type, node_name=name)
|
||||
instance.moveToGoodPosition()
|
||||
|
||||
# With the Workspace HDAs there is no need to imprint the instance data
|
||||
# since this data is pre-built into it. However, we do set the right
|
||||
# asset as that can be defined by the user.
|
||||
parms = {"asset": self.data["asset"]}
|
||||
instance.setParms(parms)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class USDCreateShadingWorkspace(_USDWorkspace):
|
||||
"""USD Shading Workspace"""
|
||||
|
||||
defaults = ["Shade"]
|
||||
|
||||
label = "USD Shading Workspace"
|
||||
family = "colorbleed.shade.usd"
|
||||
|
||||
node_type = "op::shadingWorkspace::1.0"
|
||||
node_name = "shadingWorkspace"
|
||||
step = "Shade"
|
||||
|
||||
|
||||
# Don't allow the base class to be picked up by Avalon
|
||||
del _USDWorkspace
|
||||
|
|
@ -10,13 +10,10 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Alembic"
|
||||
hosts = ["houdini"]
|
||||
targets = ["local"]
|
||||
families = ["pointcache", "camera"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ class ExtractComposite(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Composite (Image Sequence)"
|
||||
hosts = ["houdini"]
|
||||
targets = ["local"]
|
||||
families = ["imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ class ExtractUSD(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract USD"
|
||||
hosts = ["houdini"]
|
||||
targets = ["local"]
|
||||
families = ["usd",
|
||||
"usdModel",
|
||||
"usdSetDress"]
|
||||
|
|
@ -31,7 +30,13 @@ class ExtractUSD(openpype.api.Extractor):
|
|||
|
||||
assert os.path.exists(output), "Output does not exist: %s" % output
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
instance.data["files"].append(file_name)
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -155,8 +155,7 @@ class ExtractUSDLayered(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Layered USD"
|
||||
hosts = ["houdini"]
|
||||
targets = ["local"]
|
||||
families = ["colorbleed.usd.layered", "usdShade"]
|
||||
families = ["usdLayered", "usdShade"]
|
||||
|
||||
# Force Output Processors so it will always save any file
|
||||
# into our unique staging directory with processed Avalon paths
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts.api.houdini.lib import render_rop
|
||||
from openpype.hosts.houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractVDBCache(openpype.api.Extractor):
|
||||
|
|
@ -10,13 +10,10 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract VDB Cache"
|
||||
families = ["vdbcache"]
|
||||
targets = ["local"]
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
|
|
@ -36,8 +33,8 @@ class ExtractVDBCache(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "mov",
|
||||
"ext": "mov",
|
||||
"name": "vdb",
|
||||
"ext": "vdb",
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidatOutputNodeExists(pyblish.api.InstancePlugin):
|
||||
"""Validate if node attribute Create intermediate Directories is turned on.
|
||||
|
||||
Rules:
|
||||
* The node must have Create intermediate Directories turned on to
|
||||
ensure the output file will be created
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Output Node Exists"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Could not find output node(s)!")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
result = set()
|
||||
|
||||
node = instance[0]
|
||||
if node.type().name() == "alembic":
|
||||
soppath_parm = "sop_path"
|
||||
else:
|
||||
# Fall back to geometry node
|
||||
soppath_parm = "soppath"
|
||||
|
||||
sop_path = node.parm(soppath_parm).eval()
|
||||
output_node = hou.node(sop_path)
|
||||
|
||||
if output_node is None:
|
||||
cls.log.error("Node at '%s' does not exist" % sop_path)
|
||||
result.add(node.path())
|
||||
|
||||
# Added cam as this is a legit output type (cameras can't
|
||||
if output_node.type().name() not in ["output", "cam"]:
|
||||
cls.log.error("SOP Path does not end path at output node")
|
||||
result.add(node.path())
|
||||
|
||||
return result
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
from avalon import api, houdini
|
||||
import hou
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue