Merge pull request #3697 from pypeclub/feature/OP-3411_publishing-houdini-workfiles

Houdini: Publishing workfiles
This commit is contained in:
Ondřej Samohel 2022-08-30 13:46:11 +02:00 committed by GitHub
commit c3e688c888
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 176 additions and 21 deletions

View file

@ -1,27 +1,28 @@
import os
import hou
from openpype.pipeline import legacy_io
import pyblish.api
class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.5
order = pyblish.api.CollectorOrder - 0.01
label = "Houdini Current File"
hosts = ["houdini"]
def process(self, context):
"""Inject the current working file"""
filepath = hou.hipFile.path()
if not os.path.exists(filepath):
current_file = hou.hipFile.path()
if not os.path.exists(current_file):
# By default Houdini will even point a new scene to a path.
# However if the file is not saved at all and does not exist,
# we assume the user never set it.
filepath = ""
elif os.path.basename(filepath) == "untitled.hip":
elif os.path.basename(current_file) == "untitled.hip":
# Due to even a new file being called 'untitled.hip' we are unable
# to confirm the current scene was ever saved because the file
# could have existed already. We will allow it if the file exists,
@ -33,4 +34,43 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
"saved correctly."
)
context.data["currentFile"] = filepath
context.data["currentFile"] = current_file
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
task = legacy_io.Session["AVALON_TASK"]
data = {}
# create instance
instance = context.create_instance(name=filename)
subset = 'workfile' + task.capitalize()
data.update({
"subset": subset,
"asset": os.getenv("AVALON_ASSET", None),
"label": subset,
"publish": True,
"family": 'workfile',
"families": ['workfile'],
"setMembers": [current_file],
"frameStart": context.data['frameStart'],
"frameEnd": context.data['frameEnd'],
"handleStart": context.data['handleStart'],
"handleEnd": context.data['handleEnd']
})
data['representations'] = [{
'name': ext.lstrip("."),
'ext': ext.lstrip("."),
'files': file,
"stagingDir": folder,
}]
instance.data.update(data)
self.log.info('Collected instance: {}'.format(file))
self.log.info('Scene path: {}'.format(current_file))
self.log.info('staging Dir: {}'.format(folder))
self.log.info('subset: {}'.format(subset))

View file

@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
import openpype.api
import pyblish.api
import hou
class ValidateWorkfilePaths(pyblish.api.InstancePlugin):
"""Validate workfile paths so they are absolute."""
order = pyblish.api.ValidatorOrder
families = ["workfile"]
hosts = ["houdini"]
label = "Validate Workfile Paths"
actions = [openpype.api.RepairAction]
optional = True
node_types = ["file", "alembic"]
prohibited_vars = ["$HIP", "$JOB"]
def process(self, instance):
invalid = self.get_invalid()
self.log.info(
"node types to check: {}".format(", ".join(self.node_types)))
self.log.info(
"prohibited vars: {}".format(", ".join(self.prohibited_vars))
)
if invalid:
for param in invalid:
self.log.error(
"{}: {}".format(param.path(), param.unexpandedString()))
raise RuntimeError("Invalid paths found")
@classmethod
def get_invalid(cls):
invalid = []
for param, _ in hou.fileReferences():
# skip nodes we are not interested in
if param.node().type().name() not in cls.node_types:
continue
if any(
v for v in cls.prohibited_vars
if v in param.unexpandedString()):
invalid.append(param)
return invalid
@classmethod
def repair(cls, instance):
invalid = cls.get_invalid()
for param in invalid:
cls.log.info("processing: {}".format(param.path()))
cls.log.info("Replacing {} for {}".format(
param.unexpandedString(),
hou.text.expandString(param.unexpandedString())))
param.set(hou.text.expandString(param.unexpandedString()))

View file

@ -0,0 +1,10 @@
from openpype.pipeline import install_host
from openpype.hosts.houdini import api
def main():
print("Installing OpenPype ...")
install_host(api)
main()

View file

@ -47,6 +47,18 @@
}
},
"publish": {
"ValidateWorkfilePaths": {
"enabled": true,
"optional": true,
"node_types": [
"file",
"alembic"
],
"prohibited_vars": [
"$HIP",
"$JOB"
]
},
"ValidateContainers": {
"enabled": true,
"optional": true,

View file

@ -10,22 +10,8 @@
"name": "schema_houdini_create"
},
{
"type": "dict",
"collapsible": true,
"key": "publish",
"label": "Publish plugins",
"children": [
{
"type": "schema_template",
"name": "template_publish_plugin",
"template_data": [
{
"key": "ValidateContainers",
"label": "ValidateContainers"
}
]
}
]
"type": "schema",
"name": "schema_houdini_publish"
}
]
}

View file

@ -0,0 +1,50 @@
{
"type": "dict",
"collapsible": true,
"key": "publish",
"label": "Publish plugins",
"children": [
{
"type": "dict",
"collapsible": true,
"checkbox_key": "enabled",
"key": "ValidateWorkfilePaths",
"label": "Validate Workfile Paths",
"is_group": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "boolean",
"key": "optional",
"label": "Optional"
},
{
"key": "node_types",
"label": "Node types",
"type": "list",
"object_type": "text"
},
{
"key": "prohibited_vars",
"label": "Prohibited variables",
"type": "list",
"object_type": "text"
}
]
},
{
"type": "schema_template",
"name": "template_publish_plugin",
"template_data": [
{
"key": "ValidateContainers",
"label": "ValidateContainers"
}
]
}
]
}