Merge pull request #2072 from pypeclub/enhancement/houdini-hda-workflow

Houdini: simple HDA workflow
This commit is contained in:
Ondřej Samohel 2021-11-01 11:20:50 +01:00 committed by GitHub
commit ba2af3dd62
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 239 additions and 5 deletions

View file

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""Houdini specific Avalon/Pyblish plugin definitions."""
import sys
from avalon.api import CreatorError
from avalon import houdini
import six
@ -8,7 +9,7 @@ import hou
from openpype.api import PypeCreatorMixin
class OpenPypeCreatorError(Exception):
class OpenPypeCreatorError(CreatorError):
pass

View file

@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
from openpype.hosts.houdini.api import plugin
from avalon.houdini import lib
from avalon import io
import hou
class CreateHDA(plugin.Creator):
"""Publish Houdini Digital Asset file."""
name = "hda"
label = "Houdini Digital Asset (Hda)"
family = "hda"
icon = "gears"
maintain_selection = False
def __init__(self, *args, **kwargs):
super(CreateHDA, self).__init__(*args, **kwargs)
self.data.pop("active", None)
def _check_existing(self, subset_name):
# type: (str) -> bool
"""Check if existing subset name versions already exists."""
# Get all subsets of the current asset
asset_id = io.find_one({"name": self.data["asset"], "type": "asset"},
projection={"_id": True})['_id']
subset_docs = io.find(
{
"type": "subset",
"parent": asset_id
}, {"name": 1}
)
existing_subset_names = set(subset_docs.distinct("name"))
existing_subset_names_low = {
_name.lower() for _name in existing_subset_names
}
return subset_name.lower() in existing_subset_names_low
def _process(self, instance):
subset_name = self.data["subset"]
# get selected nodes
out = hou.node("/obj")
self.nodes = hou.selectedNodes()
if (self.options or {}).get("useSelection") and self.nodes:
# if we have `use selection` enabled and we have some
# selected nodes ...
to_hda = self.nodes[0]
if len(self.nodes) > 1:
# if there is more then one node, create subnet first
subnet = out.createNode(
"subnet", node_name="{}_subnet".format(self.name))
to_hda = subnet
else:
# in case of no selection, just create subnet node
subnet = out.createNode(
"subnet", node_name="{}_subnet".format(self.name))
subnet.moveToGoodPosition()
to_hda = subnet
if not to_hda.type().definition():
# if node type has not its definition, it is not user
# created hda. We test if hda can be created from the node.
if not to_hda.canCreateDigitalAsset():
raise Exception(
"cannot create hda from node {}".format(to_hda))
hda_node = to_hda.createDigitalAsset(
name=subset_name,
hda_file_name="$HIP/{}.hda".format(subset_name)
)
hou.moveNodesTo(self.nodes, hda_node)
hda_node.layoutChildren()
else:
if self._check_existing(subset_name):
raise plugin.OpenPypeCreatorError(
("subset {} is already published with different HDA"
"definition.").format(subset_name))
hda_node = to_hda
hda_node.setName(subset_name)
# delete node created by Avalon in /out
# this needs to be addressed in future Houdini workflow refactor.
hou.node("/out/{}".format(subset_name)).destroy()
try:
lib.imprint(hda_node, self.data)
except hou.OperationFailed:
raise plugin.OpenPypeCreatorError(
("Cannot set metadata on asset. Might be that it already is "
"OpenPype asset.")
)
return hda_node

View file

@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
from avalon import api
from avalon.houdini import pipeline
class HdaLoader(api.Loader):
"""Load Houdini Digital Asset file."""
families = ["hda"]
label = "Load Hda"
representations = ["hda"]
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import os
import hou
# Format file name, Houdini only wants forward slashes
file_path = os.path.normpath(self.fname)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Create a unique name
counter = 1
namespace = namespace or context["asset"]["name"]
formatted = "{}_{}".format(namespace, name) if namespace else name
node_name = "{0}_{1:03d}".format(formatted, counter)
hou.hda.installFile(file_path)
hda_node = obj.createNode(name, node_name)
self[:] = [hda_node]
return pipeline.containerise(
node_name,
namespace,
[hda_node],
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, representation):
import hou
hda_node = container["node"]
file_path = api.get_representation_path(representation)
file_path = file_path.replace("\\", "/")
hou.hda.installFile(file_path)
defs = hda_node.type().allInstalledDefinitions()
def_paths = [d.libraryFilePath() for d in defs]
new = def_paths.index(file_path)
defs[new].setIsPreferred(True)
def remove(self, container):
node = container["node"]
node.destroy()

View file

@ -23,8 +23,10 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
return
# Check bypass state and reverse
active = True
node = instance[0]
active = not node.isBypassed()
if hasattr(node, "isBypassed"):
active = not node.isBypassed()
# Set instance active state
instance.data.update(

View file

@ -31,6 +31,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
def process(self, context):
nodes = hou.node("/out").children()
nodes += hou.node("/obj").children()
# Include instances in USD stage only when it exists so it
# remains backwards compatible with version before houdini 18
@ -49,9 +50,12 @@ class CollectInstances(pyblish.api.ContextPlugin):
has_family = node.evalParm("family")
assert has_family, "'%s' is missing 'family'" % node.name()
self.log.info("processing {}".format(node))
data = lib.read(node)
# Check bypass state and reverse
data.update({"active": not node.isBypassed()})
if hasattr(node, "isBypassed"):
data.update({"active": not node.isBypassed()})
# temporarily translation of `active` to `publish` till issue has
# been resolved, https://github.com/pyblish/pyblish-base/issues/307

View file

@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
import os
from pprint import pformat
import pyblish.api
import openpype.api
class ExtractHDA(openpype.api.Extractor):
order = pyblish.api.ExtractorOrder
label = "Extract HDA"
hosts = ["houdini"]
families = ["hda"]
def process(self, instance):
self.log.info(pformat(instance.data))
hda_node = instance[0]
hda_def = hda_node.type().definition()
hda_options = hda_def.options()
hda_options.setSaveInitialParmsAndContents(True)
next_version = instance.data["anatomyData"]["version"]
self.log.info("setting version: {}".format(next_version))
hda_def.setVersion(str(next_version))
hda_def.setOptions(hda_options)
hda_def.save(hda_def.libraryFilePath(), hda_node, hda_options)
if "representations" not in instance.data:
instance.data["representations"] = []
file = os.path.basename(hda_def.libraryFilePath())
staging_dir = os.path.dirname(hda_def.libraryFilePath())
self.log.info("Using HDA from {}".format(hda_def.libraryFilePath()))
representation = {
'name': 'hda',
'ext': 'hda',
'files': file,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -35,5 +35,5 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
def get_invalid(cls, instance):
rop = instance[0]
if rop.isBypassed():
if hasattr(rop, "isBypassed") and rop.isBypassed():
return [rop]

View file

@ -99,7 +99,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"camerarig",
"redshiftproxy",
"effect",
"xgen"
"xgen",
"hda"
]
exclude_families = ["clip"]
db_representation_context_keys = [

View file

@ -76,3 +76,28 @@ I've selected `vdb1` and went **OpenPype -> Create** and selected **VDB Cache**.
geometry ROP in `/out` and sets its paths to output vdb files. During the publishing process
whole dops are cooked.
## Publishing Houdini Digital Assets (HDA)
You can publish most of the nodes in Houdini as hda for easy interchange of data between Houdini instances or even
other DCCs with Houdini Engine.
## Creating HDA
Simply select nodes you want to include in hda and go **OpenPype -> Create** and select **Houdini digital asset (hda)**.
You can even use already existing hda as a selected node, and it will be published (see below for limitation).
:::caution HDA Workflow limitations
As long as the hda is of same type - it is created from different nodes but using the same (subset) name, everything
is ok. But once you've published version of hda subset, you cannot change its type. For example, you create hda **Foo**
from *Cube* and *Sphere* - it will create hda subset named `hdaFoo` with the same type. You publish it as version 1.
Then you create version 2 with added *Torus*. Then you create version 3 from the scratch from completely different nodes,
but still using resulting subset name `hdaFoo`. Everything still works as expected. But then you use already
existing hda as a base, for example from different artist. Its type cannot be changed from what it was and so even if
it is named `hdaFoo` it has different type. It could be published, but you would never load it and retain ability to
switch versions between different hda types.
:::
## Loading HDA
When you load hda, it will install its type in your hip file and add published version as its definition file. When
you switch version via Scene Manager, it will add its definition and set it as preferred.