mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/AY-1064_Houdini-Support-HDA-publishing-from-non-object-level
This commit is contained in:
commit
bf65db154f
28 changed files with 207 additions and 611 deletions
|
|
@ -92,7 +92,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
|
|||
return None, None
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
"""" Cache placeholder data to shared data.
|
||||
"""Cache placeholder data to shared data.
|
||||
Returns:
|
||||
(list) of dicts
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class ExtractThumbnail(plugin.BlenderExtractor):
|
|||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_output_path(self, filepath):
|
||||
""""Workaround to return correct filepath.
|
||||
"""Workaround to return correct filepath.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import logging
|
|||
import hou # noqa
|
||||
|
||||
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
|
||||
from ayon_core.tools.utils import host_tools
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
|
|
@ -23,6 +23,7 @@ from ayon_houdini.api import lib, shelves, creator_node_shelves
|
|||
from ayon_core.lib import (
|
||||
register_event_callback,
|
||||
emit_event,
|
||||
env_value_to_bool,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -85,10 +86,9 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
# initialization during start up delays Houdini UI by minutes
|
||||
# making it extremely slow to launch.
|
||||
hdefereval.executeDeferred(shelves.generate_shelves)
|
||||
|
||||
if not IS_HEADLESS:
|
||||
import hdefereval # noqa, hdefereval is only available in ui mode
|
||||
hdefereval.executeDeferred(creator_node_shelves.install)
|
||||
if env_value_to_bool("AYON_WORKFILE_TOOL_ON_START"):
|
||||
hdefereval.executeDeferred(lambda: host_tools.show_workfiles(parent=hou.qt.mainWindow()))
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return hou.hipFile.hasUnsavedChanges()
|
||||
|
|
|
|||
|
|
@ -1,55 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating pointcache alembics."""
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_core.lib import BoolDef
|
||||
|
||||
|
||||
class CreateMantraIFD(plugin.HoudiniCreator):
|
||||
"""Mantra .ifd Archive"""
|
||||
identifier = "io.openpype.creators.houdini.mantraifd"
|
||||
label = "Mantra IFD"
|
||||
product_type = "mantraifd"
|
||||
icon = "gears"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "ifd"})
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
creator_attributes["farm"] = pre_create_data["farm"]
|
||||
instance = super(CreateMantraIFD, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
filepath = "{}{}".format(
|
||||
hou.text.expandString("$HIP/pyblish/"),
|
||||
"{}.$F4.ifd".format(product_name))
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Arnold ROP settings
|
||||
"soho_diskfile": filepath,
|
||||
"soho_outputmode": 1
|
||||
}
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["soho_outputmode", "productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=False)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super().get_pre_create_attr_defs()
|
||||
# Use same attributes as for instance attributes
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
@ -12,9 +12,7 @@ class CollectDataforCache(plugin.HoudiniInstancePlugin):
|
|||
|
||||
# Run after Collect Frames
|
||||
order = pyblish.api.CollectorOrder + 0.11
|
||||
families = ["ass", "pointcache",
|
||||
"mantraifd", "redshiftproxy",
|
||||
"vdbcache", "model"]
|
||||
families = ["ass", "pointcache", "redshiftproxy", "vdbcache", "model"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Data for Cache"
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,7 @@ class CollectChunkSize(plugin.HoudiniInstancePlugin,
|
|||
"""Collect chunk size for cache submission to Deadline."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.05
|
||||
families = ["ass", "pointcache",
|
||||
"vdbcache", "mantraifd",
|
||||
"redshiftproxy", "model"]
|
||||
families = ["ass", "pointcache", "vdbcache", "redshiftproxy", "model"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Chunk Size"
|
||||
chunk_size = 999999
|
||||
|
|
|
|||
|
|
@ -15,9 +15,8 @@ class CollectFrames(plugin.HoudiniInstancePlugin):
|
|||
# this plugin runs after CollectRopFrameRange
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Frames"
|
||||
families = ["vdbcache", "imagesequence", "ass",
|
||||
"mantraifd", "redshiftproxy", "review",
|
||||
"pointcache"]
|
||||
families = ["camera", "vdbcache", "imagesequence", "ass",
|
||||
"redshiftproxy", "review", "pointcache", "fbx"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -60,7 +59,10 @@ class CollectFrames(plugin.HoudiniInstancePlugin):
|
|||
|
||||
# todo: `frames` currently conflicts with "explicit frames" for a
|
||||
# for a custom frame list. So this should be refactored.
|
||||
instance.data.update({"frames": result})
|
||||
instance.data.update({
|
||||
"frames": result,
|
||||
"stagingDir": os.path.dirname(output)
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def create_file_list(match, start_frame, end_frame):
|
||||
|
|
|
|||
|
|
@ -1,51 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractAlembic(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Alembic"
|
||||
families = ["abc", "camera"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("filename")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
if instance.data.get("frames"):
|
||||
# list of files
|
||||
files = instance.data["frames"]
|
||||
else:
|
||||
# single file
|
||||
files = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (files,
|
||||
staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': files,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractAss(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract Ass"
|
||||
families = ["ass"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
output = ropnode.evalParm("ar_ass_file")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing ASS '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
# Unfortunately user interrupting the extraction does not raise an
|
||||
# error and thus still continues to the integrator. To capture that
|
||||
# we make sure all files exist
|
||||
files = instance.data["frames"]
|
||||
missing = []
|
||||
for file_name in files:
|
||||
full_path = os.path.normpath(os.path.join(staging_dir, file_name))
|
||||
if not os.path.exists(full_path):
|
||||
missing.append(full_path)
|
||||
|
||||
if missing:
|
||||
raise RuntimeError("Failed to complete Arnold ass extraction. "
|
||||
"Missing output files: {}".format(missing))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# Allow ass.gz extension as well
|
||||
ext = "ass.gz" if file_name.endswith(".ass.gz") else "ass"
|
||||
|
||||
representation = {
|
||||
'name': 'ass',
|
||||
'ext': ext,
|
||||
"files": files,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import lib, plugin
|
||||
|
||||
|
||||
class ExtractBGEO(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract BGEO"
|
||||
families = ["bgeo"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("sopoutput")
|
||||
staging_dir, file_name = os.path.split(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing bgeo files '{}' to '{}'.".format(
|
||||
file_name, staging_dir))
|
||||
|
||||
# write files
|
||||
lib.render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
_, ext = lib.splitext(
|
||||
output[0], allowed_multidot_extensions=[
|
||||
".ass.gz", ".bgeo.sc", ".bgeo.gz",
|
||||
".bgeo.lzma", ".bgeo.bz2"])
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "bgeo",
|
||||
"ext": ext.lstrip("."),
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"]
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop, splitext
|
||||
|
||||
|
||||
class ExtractComposite(plugin.HoudiniExtractorPlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Composite (Image Sequence)"
|
||||
families = ["imagesequence"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the copoutput parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
output = ropnode.evalParm("copoutput")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
_, ext = splitext(output[0], [])
|
||||
ext = ext.lstrip(".")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
}
|
||||
|
||||
if ext.lower() == "exr":
|
||||
# Inject colorspace with 'scene_linear' as that's the
|
||||
# default Houdini working colorspace and all extracted
|
||||
# OpenEXR images should be in that colorspace.
|
||||
# https://www.sidefx.com/docs/houdini/render/linear.html#image-formats
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace="scene_linear"
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Fbx Extractor for houdini. """
|
||||
|
||||
import os
|
||||
import hou
|
||||
import pyblish.api
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractFBX(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
label = "Extract FBX"
|
||||
families = ["fbx"]
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# get rop node
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
output_file = ropnode.evalParm("sopoutput")
|
||||
|
||||
# get staging_dir and file_name
|
||||
staging_dir = os.path.normpath(os.path.dirname(output_file))
|
||||
file_name = os.path.basename(output_file)
|
||||
|
||||
# render rop
|
||||
self.log.debug("Writing FBX '%s' to '%s'", file_name, staging_dir)
|
||||
render_rop(ropnode)
|
||||
|
||||
# prepare representation
|
||||
representation = {
|
||||
"name": "fbx",
|
||||
"ext": "fbx",
|
||||
"files": file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
# A single frame may also be rendered without start/end frame.
|
||||
if "frameStartHandle" in instance.data and "frameEndHandle" in instance.data: # noqa
|
||||
representation["frameStart"] = instance.data["frameStartHandle"]
|
||||
representation["frameEnd"] = instance.data["frameEndHandle"]
|
||||
|
||||
# set value type for 'representations' key to list
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# update instance data
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ExtractMantraIFD(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Mantra ifd"
|
||||
families = ["mantraifd"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
output = ropnode.evalParm("soho_diskfile")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
files = instance.data["frames"]
|
||||
missing_frames = [
|
||||
frame
|
||||
for frame in instance.data["frames"]
|
||||
if not os.path.exists(
|
||||
os.path.normpath(os.path.join(staging_dir, frame)))
|
||||
]
|
||||
if missing_frames:
|
||||
raise RuntimeError("Failed to complete Mantra ifd extraction. "
|
||||
"Missing output files: {}".format(
|
||||
missing_frames))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ifd',
|
||||
'ext': 'ifd',
|
||||
'files': files,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractOpenGL(plugin.HoudiniExtractorPlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract OpenGL"
|
||||
families = ["review"]
|
||||
|
||||
def process(self, instance):
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# This plugin is triggered when marking render as reviewable.
|
||||
# Therefore, this plugin will run on over wrong instances.
|
||||
# TODO: Don't run this plugin on wrong instances.
|
||||
# This plugin should run only on review product type
|
||||
# with instance node of opengl type.
|
||||
if ropnode.type().name() != "opengl":
|
||||
self.log.debug("Skipping OpenGl extraction. Rop node {} "
|
||||
"is not an OpenGl node.".format(ropnode.path()))
|
||||
return
|
||||
|
||||
output = ropnode.evalParm("picture")
|
||||
staging_dir = os.path.normpath(os.path.dirname(output))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Extracting '%s' to '%s'" % (file_name,
|
||||
staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
tags = ["review"]
|
||||
if not instance.data.get("keepImages"):
|
||||
tags.append("delete")
|
||||
|
||||
representation = {
|
||||
"name": instance.data["imageFormat"],
|
||||
"ext": instance.data["imageFormat"],
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
"tags": tags,
|
||||
"preview": True,
|
||||
"camera_name": instance.data.get("review_camera")
|
||||
}
|
||||
|
||||
if ropnode.evalParm("colorcorrect") == 2: # OpenColorIO enabled
|
||||
colorspace = ropnode.evalParm("ociocolorspace")
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractRedshiftProxy(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract Redshift Proxy"
|
||||
families = ["redshiftproxy"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
output = ropnode.evalParm("RS_archive_file")
|
||||
staging_dir = os.path.normpath(os.path.dirname(output))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
self.log.info("Writing Redshift Proxy '%s' to '%s'" % (file_name,
|
||||
staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "rs",
|
||||
"ext": "rs",
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
# A single frame may also be rendered without start/end frame.
|
||||
if "frameStartHandle" in instance.data and "frameEndHandle" in instance.data: # noqa
|
||||
representation["frameStart"] = instance.data["frameStartHandle"]
|
||||
representation["frameEnd"] = instance.data["frameEndHandle"]
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -0,0 +1,150 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop, splitext
|
||||
|
||||
|
||||
class ExtractROP(plugin.HoudiniExtractorPlugin):
|
||||
"""Generic Extractor for any ROP node."""
|
||||
label = "Extract ROP"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
|
||||
families = ["abc", "camera", "bgeo", "pointcache", "fbx",
|
||||
"vdbcache", "ass", "redshiftproxy", "mantraifd"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance: pyblish.api.Instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
files = instance.data["frames"]
|
||||
first_file = files[0] if isinstance(files, (list, tuple)) else files
|
||||
_, ext = splitext(
|
||||
first_file, allowed_multidot_extensions=[
|
||||
".ass.gz", ".bgeo.sc", ".bgeo.gz",
|
||||
".bgeo.lzma", ".bgeo.bz2"]
|
||||
)
|
||||
ext = ext.lstrip(".")
|
||||
|
||||
self.log.debug(f"Rendering {rop_node.path()} to {first_file}..")
|
||||
|
||||
render_rop(rop_node)
|
||||
self.validate_expected_frames(instance)
|
||||
|
||||
# In some cases representation name is not the the extension
|
||||
# TODO: Preferably we remove this very specific naming
|
||||
product_type = instance.data["productType"]
|
||||
name = {
|
||||
"bgeo": "bgeo",
|
||||
"rs": "rs",
|
||||
"ass": "ass"
|
||||
}.get(product_type, ext)
|
||||
|
||||
representation = {
|
||||
"name": name,
|
||||
"ext": ext,
|
||||
"files": instance.data["frames"],
|
||||
"stagingDir": instance.data["stagingDir"],
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
}
|
||||
self.update_representation_data(instance, representation)
|
||||
instance.data.setdefault("representations", []).append(representation)
|
||||
|
||||
def validate_expected_frames(self, instance: pyblish.api.Instance):
|
||||
"""
|
||||
Validate all expected files in `instance.data["frames"]` exist in
|
||||
the staging directory.
|
||||
"""
|
||||
filenames = instance.data["frames"]
|
||||
staging_dir = instance.data["stagingDir"]
|
||||
if isinstance(filenames, str):
|
||||
# Single frame
|
||||
filenames = [filenames]
|
||||
|
||||
missing_filenames = [
|
||||
filename for filename in filenames
|
||||
if not os.path.isfile(os.path.join(staging_dir, filename))
|
||||
]
|
||||
if missing_filenames:
|
||||
raise RuntimeError(f"Missing frames: {missing_filenames}")
|
||||
|
||||
def update_representation_data(self,
|
||||
instance: pyblish.api.Instance,
|
||||
representation: dict):
|
||||
"""Allow subclass to override the representation data in-place"""
|
||||
pass
|
||||
|
||||
|
||||
class ExtractOpenGL(ExtractROP,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract OpenGL"
|
||||
families = ["review"]
|
||||
|
||||
def process(self, instance):
|
||||
# This plugin is triggered when marking render as reviewable.
|
||||
# Therefore, this plugin will run over wrong instances.
|
||||
# TODO: Don't run this plugin on wrong instances.
|
||||
# This plugin should run only on review product type
|
||||
# with instance node of opengl type.
|
||||
instance_node = instance.data.get("instance_node")
|
||||
if not instance_node:
|
||||
self.log.debug("Skipping instance without instance node.")
|
||||
return
|
||||
|
||||
rop_node = hou.node(instance_node)
|
||||
if rop_node.type().name() != "opengl":
|
||||
self.log.debug("Skipping OpenGl extraction. Rop node {} "
|
||||
"is not an OpenGl node.".format(rop_node.path()))
|
||||
return
|
||||
|
||||
super(ExtractOpenGL, self).process(instance)
|
||||
|
||||
def update_representation_data(self,
|
||||
instance: pyblish.api.Instance,
|
||||
representation: dict):
|
||||
tags = ["review"]
|
||||
if not instance.data.get("keepImages"):
|
||||
tags.append("delete")
|
||||
|
||||
representation.update({
|
||||
# TODO: Avoid this override?
|
||||
"name": instance.data["imageFormat"],
|
||||
"ext": instance.data["imageFormat"],
|
||||
|
||||
"tags": tags,
|
||||
"preview": True,
|
||||
"camera_name": instance.data.get("review_camera")
|
||||
})
|
||||
|
||||
|
||||
class ExtractComposite(ExtractROP,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
|
||||
label = "Extract Composite (Image Sequence)"
|
||||
families = ["imagesequence"]
|
||||
|
||||
def update_representation_data(self,
|
||||
instance: pyblish.api.Instance,
|
||||
representation: dict):
|
||||
|
||||
if representation["ext"].lower() != "exr":
|
||||
return
|
||||
|
||||
# Inject colorspace with 'scene_linear' as that's the
|
||||
# default Houdini working colorspace and all extracted
|
||||
# OpenEXR images should be in that colorspace.
|
||||
# https://www.sidefx.com/docs/houdini/render/linear.html#image-formats
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace="scene_linear"
|
||||
)
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExtractVDBCache(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract VDB Cache"
|
||||
families = ["vdbcache"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
ropnode = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
sop_output = ropnode.evalParm("sopoutput")
|
||||
staging_dir = os.path.normpath(os.path.dirname(sop_output))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
file_name = os.path.basename(sop_output)
|
||||
|
||||
self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "vdb",
|
||||
"ext": "vdb",
|
||||
"files": output,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'houdini' version."""
|
||||
__version__ = "0.3.2"
|
||||
__version__ = "0.3.4"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "houdini"
|
||||
title = "Houdini"
|
||||
version = "0.3.2"
|
||||
version = "0.3.4"
|
||||
|
||||
client_dir = "ayon_houdini"
|
||||
|
||||
|
|
|
|||
|
|
@ -51,9 +51,6 @@ class CreatePluginsModel(BaseSettingsModel):
|
|||
CreateKarmaROP: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Karma ROP")
|
||||
CreateMantraIFD: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Mantra IFD")
|
||||
CreateMantraROP: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Mantra ROP")
|
||||
|
|
@ -119,10 +116,6 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
|
|||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateMantraIFD": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateMantraROP": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from .pipeline import (
|
|||
MayaHost,
|
||||
)
|
||||
from .plugin import (
|
||||
Creator,
|
||||
Loader
|
||||
)
|
||||
|
||||
|
|
@ -45,7 +44,6 @@ __all__ = [
|
|||
"containerise",
|
||||
"MayaHost",
|
||||
|
||||
"Creator",
|
||||
"Loader",
|
||||
|
||||
# Workfiles API
|
||||
|
|
|
|||
|
|
@ -1733,7 +1733,7 @@ def is_valid_reference_node(reference_node):
|
|||
"""
|
||||
# maya 2022 is missing `isValidReference` so the check needs to be
|
||||
# done in different way.
|
||||
if cmds.about(version=True) < 2023:
|
||||
if int(cmds.about(version=True)) < 2023:
|
||||
try:
|
||||
cmds.referenceQuery(reference_node, filename=True)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -15,10 +15,9 @@ from ayon_core.pipeline import (
|
|||
Anatomy,
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
Creator as NewCreator,
|
||||
Creator,
|
||||
CreatorError,
|
||||
HiddenCreator,
|
||||
LegacyCreator,
|
||||
LoaderPlugin,
|
||||
get_current_project_name,
|
||||
get_representation_path,
|
||||
|
|
@ -70,22 +69,6 @@ def get_reference_node_parents(*args, **kwargs):
|
|||
return lib.get_reference_node_parents(*args, **kwargs)
|
||||
|
||||
|
||||
class Creator(LegacyCreator):
|
||||
defaults = ['Main']
|
||||
|
||||
def process(self):
|
||||
nodes = list()
|
||||
|
||||
with lib.undo_chunk():
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = cmds.ls(selection=True)
|
||||
|
||||
instance = cmds.sets(nodes, name=self.name)
|
||||
lib.imprint(instance, self.data)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class MayaCreatorBase(object):
|
||||
|
||||
|
|
@ -274,7 +257,7 @@ class MayaCreatorBase(object):
|
|||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class MayaCreator(NewCreator, MayaCreatorBase):
|
||||
class MayaCreator(Creator, MayaCreatorBase):
|
||||
|
||||
settings_category = "maya"
|
||||
|
||||
|
|
@ -381,7 +364,7 @@ def ensure_namespace(namespace):
|
|||
return cmds.namespace(add=namespace)
|
||||
|
||||
|
||||
class RenderlayerCreator(NewCreator, MayaCreatorBase):
|
||||
class RenderlayerCreator(Creator, MayaCreatorBase):
|
||||
"""Creator which creates an instance per renderlayer in the workfile.
|
||||
|
||||
Create and manages renderlayer product per renderLayer in workfile.
|
||||
|
|
|
|||
|
|
@ -9,11 +9,16 @@ class CreateSetDress(plugin.MayaCreator):
|
|||
label = "Set Dress"
|
||||
product_type = "setdress"
|
||||
icon = "cubes"
|
||||
exactSetMembersOnly = True
|
||||
shader = True
|
||||
default_variants = ["Main", "Anim"]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("exactSetMembersOnly",
|
||||
label="Exact Set Members Only",
|
||||
default=True)
|
||||
default=self.exactSetMembersOnly),
|
||||
BoolDef("shader",
|
||||
label="Include shader",
|
||||
default=self.shader)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract data as Maya scene (raw)."""
|
||||
import os
|
||||
|
||||
import contextlib
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_core.pipeline import AVALON_CONTAINER_ID, AYON_CONTAINER_ID
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
from ayon_maya.api.lib import maintained_selection
|
||||
from ayon_maya.api.lib import maintained_selection, shader
|
||||
from ayon_maya.api import plugin
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -88,17 +88,21 @@ class ExtractMayaSceneRaw(plugin.MayaExtractorPlugin, AYONPyblishPluginMixin):
|
|||
)
|
||||
with maintained_selection():
|
||||
cmds.select(selection, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=attribute_values[
|
||||
"preserve_references"
|
||||
],
|
||||
constructionHistory=True,
|
||||
shader=True,
|
||||
constraints=True,
|
||||
expressions=True)
|
||||
with contextlib.ExitStack() as stack:
|
||||
if not instance.data.get("shader", True):
|
||||
# Fix bug where export without shader may import the geometry 'green'
|
||||
# due to the lack of any shader on import.
|
||||
stack.enter_context(shader(selection, shadingEngine="initialShadingGroup"))
|
||||
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary",
|
||||
exportSelected=True,
|
||||
preserveReferences=attribute_values["preserve_references"],
|
||||
constructionHistory=True,
|
||||
shader=instance.data.get("shader", True),
|
||||
constraints=True,
|
||||
expressions=True)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'maya' version."""
|
||||
__version__ = "0.2.4"
|
||||
__version__ = "0.2.7"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "maya"
|
||||
title = "Maya"
|
||||
version = "0.2.4"
|
||||
version = "0.2.7"
|
||||
client_dir = "ayon_maya"
|
||||
|
||||
ayon_required_addons = {
|
||||
|
|
|
|||
|
|
@ -124,6 +124,14 @@ class CreateVrayProxyModel(BaseSettingsModel):
|
|||
default_factory=list, title="Default Products")
|
||||
|
||||
|
||||
class CreateSetDressModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
exactSetMembersOnly: bool = SettingsField(title="Exact Set Members Only")
|
||||
shader: bool = SettingsField(title="Include shader")
|
||||
default_variants: list[str] = SettingsField(
|
||||
default_factory=list, title="Default Products")
|
||||
|
||||
|
||||
class CreateMultishotLayout(BasicCreatorModel):
|
||||
shotParent: str = SettingsField(title="Shot Parent Folder")
|
||||
groupLoadedAssets: bool = SettingsField(title="Group Loaded Assets")
|
||||
|
|
@ -217,8 +225,8 @@ class CreatorsModel(BaseSettingsModel):
|
|||
default_factory=BasicCreatorModel,
|
||||
title="Create Rig"
|
||||
)
|
||||
CreateSetDress: BasicCreatorModel = SettingsField(
|
||||
default_factory=BasicCreatorModel,
|
||||
CreateSetDress: CreateSetDressModel = SettingsField(
|
||||
default_factory=CreateSetDressModel,
|
||||
title="Create Set Dress"
|
||||
)
|
||||
CreateVrayProxy: CreateVrayProxyModel = SettingsField(
|
||||
|
|
@ -396,6 +404,8 @@ DEFAULT_CREATORS_SETTINGS = {
|
|||
},
|
||||
"CreateSetDress": {
|
||||
"enabled": True,
|
||||
"exactSetMembersOnly": True,
|
||||
"shader": True,
|
||||
"default_variants": [
|
||||
"Main",
|
||||
"Anim"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue