mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into feature/remove-resolve-addon
This commit is contained in:
commit
46c8b17209
58 changed files with 4522 additions and 1383 deletions
91
client/ayon_core/pipeline/entity_uri.py
Normal file
91
client/ayon_core/pipeline/entity_uri.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from typing import Optional, Union
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
|
||||
def parse_ayon_entity_uri(uri: str) -> Optional[dict]:
|
||||
"""Parse AYON entity URI into individual components.
|
||||
|
||||
URI specification:
|
||||
ayon+entity://{project}/{folder}?product={product}
|
||||
&version={version}
|
||||
&representation={representation}
|
||||
URI example:
|
||||
ayon+entity://test/hero?product=modelMain&version=2&representation=usd
|
||||
|
||||
However - if the netloc is `ayon://` it will by default also resolve as
|
||||
`ayon+entity://` on AYON server, thus we need to support both. The shorter
|
||||
`ayon://` is preferred for user readability.
|
||||
|
||||
Example:
|
||||
>>> parse_ayon_entity_uri(
|
||||
>>> "ayon://test/char/villain?product=modelMain&version=2&representation=usd" # noqa: E501
|
||||
>>> )
|
||||
{'project': 'test', 'folderPath': '/char/villain',
|
||||
'product': 'modelMain', 'version': 1,
|
||||
'representation': 'usd'}
|
||||
>>> parse_ayon_entity_uri(
|
||||
>>> "ayon+entity://project/folder?product=renderMain&version=3&representation=exr" # noqa: E501
|
||||
>>> )
|
||||
{'project': 'project', 'folderPath': '/folder',
|
||||
'product': 'renderMain', 'version': 3,
|
||||
'representation': 'exr'}
|
||||
|
||||
Returns:
|
||||
dict[str, Union[str, int]]: The individual key with their values as
|
||||
found in the ayon entity URI.
|
||||
|
||||
"""
|
||||
|
||||
if not (uri.startswith("ayon+entity://") or uri.startswith("ayon://")):
|
||||
return {}
|
||||
|
||||
parsed = urlparse(uri)
|
||||
if parsed.scheme not in {"ayon+entity", "ayon"}:
|
||||
return {}
|
||||
|
||||
result = {
|
||||
"project": parsed.netloc,
|
||||
"folderPath": "/" + parsed.path.strip("/")
|
||||
}
|
||||
query = parse_qs(parsed.query)
|
||||
for key in ["product", "version", "representation"]:
|
||||
if key in query:
|
||||
result[key] = query[key][0]
|
||||
|
||||
# Convert version to integer if it is a digit
|
||||
version = result.get("version")
|
||||
if version is not None and version.isdigit():
|
||||
result["version"] = int(version)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def construct_ayon_entity_uri(
|
||||
project_name: str,
|
||||
folder_path: str,
|
||||
product: str,
|
||||
version: Union[int, str],
|
||||
representation_name: str
|
||||
) -> str:
|
||||
"""Construct AYON entity URI from its components
|
||||
|
||||
Returns:
|
||||
str: AYON Entity URI to query entity path.
|
||||
"""
|
||||
if isinstance(version, int) and version < 0:
|
||||
version = "hero"
|
||||
if not (isinstance(version, int) or version in {"latest", "hero"}):
|
||||
raise ValueError(
|
||||
"Version must either be integer, 'latest' or 'hero'. "
|
||||
"Got: {}".format(version)
|
||||
)
|
||||
return (
|
||||
"ayon://{project}/{folder_path}?product={product}&version={version}"
|
||||
"&representation={representation}".format(
|
||||
project=project_name,
|
||||
folder_path=folder_path,
|
||||
product=product,
|
||||
version=version,
|
||||
representation=representation_name
|
||||
)
|
||||
)
|
||||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import inspect
|
||||
import collections
|
||||
import numbers
|
||||
from typing import Any
|
||||
from typing import Optional, Union, Any
|
||||
|
||||
import ayon_api
|
||||
|
||||
|
|
@ -739,6 +739,91 @@ def get_representation_path(representation, root=None):
|
|||
)
|
||||
|
||||
|
||||
def get_representation_path_by_names(
|
||||
project_name: str,
|
||||
folder_path: str,
|
||||
product_name: str,
|
||||
version_name: str,
|
||||
representation_name: str,
|
||||
anatomy: Optional[Anatomy] = None) -> Optional[str]:
|
||||
"""Get (latest) filepath for representation for folder and product.
|
||||
|
||||
See `get_representation_by_names` for more details.
|
||||
|
||||
Returns:
|
||||
str: The representation path if the representation exists.
|
||||
|
||||
"""
|
||||
representation = get_representation_by_names(
|
||||
project_name,
|
||||
folder_path,
|
||||
product_name,
|
||||
version_name,
|
||||
representation_name
|
||||
)
|
||||
if not representation:
|
||||
return
|
||||
|
||||
if not anatomy:
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
if representation:
|
||||
path = get_representation_path_with_anatomy(representation, anatomy)
|
||||
return str(path).replace("\\", "/")
|
||||
|
||||
|
||||
def get_representation_by_names(
|
||||
project_name: str,
|
||||
folder_path: str,
|
||||
product_name: str,
|
||||
version_name: Union[int, str],
|
||||
representation_name: str,
|
||||
) -> Optional[dict]:
|
||||
"""Get representation entity for asset and subset.
|
||||
|
||||
If version_name is "hero" then return the hero version
|
||||
If version_name is "latest" then return the latest version
|
||||
Otherwise use version_name as the exact integer version name.
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(folder_path, dict) and "name" in folder_path:
|
||||
# Allow explicitly passing asset document
|
||||
folder_entity = folder_path
|
||||
else:
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields=["id"])
|
||||
if not folder_entity:
|
||||
return
|
||||
|
||||
if isinstance(product_name, dict) and "name" in product_name:
|
||||
# Allow explicitly passing subset document
|
||||
product_entity = product_name
|
||||
else:
|
||||
product_entity = ayon_api.get_product_by_name(
|
||||
project_name,
|
||||
product_name,
|
||||
folder_id=folder_entity["id"],
|
||||
fields=["id"])
|
||||
if not product_entity:
|
||||
return
|
||||
|
||||
if version_name == "hero":
|
||||
version_entity = ayon_api.get_hero_version_by_product_id(
|
||||
project_name, product_id=product_entity["id"])
|
||||
elif version_name == "latest":
|
||||
version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, product_id=product_entity["id"])
|
||||
else:
|
||||
version_entity = ayon_api.get_version_by_name(
|
||||
project_name, version_name, product_id=product_entity["id"])
|
||||
if not version_entity:
|
||||
return
|
||||
|
||||
return ayon_api.get_representation_by_name(
|
||||
project_name, representation_name, version_id=version_entity["id"])
|
||||
|
||||
|
||||
def is_compatible_loader(Loader, context):
|
||||
"""Return whether a loader is compatible with a context.
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import inspect
|
|||
import copy
|
||||
import tempfile
|
||||
import xml.etree.ElementTree
|
||||
from typing import Optional, Union
|
||||
|
||||
import pyblish.util
|
||||
import pyblish.plugin
|
||||
|
|
@ -20,7 +21,6 @@ from ayon_core.pipeline import (
|
|||
Anatomy
|
||||
)
|
||||
from ayon_core.pipeline.plugin_discover import DiscoverResult
|
||||
|
||||
from .constants import (
|
||||
DEFAULT_PUBLISH_TEMPLATE,
|
||||
DEFAULT_HERO_PUBLISH_TEMPLATE,
|
||||
|
|
@ -933,3 +933,48 @@ def get_publish_instance_families(instance):
|
|||
families.discard(family)
|
||||
output.extend(families)
|
||||
return output
|
||||
|
||||
|
||||
def get_instance_expected_output_path(
|
||||
instance: pyblish.api.Instance,
|
||||
representation_name: str,
|
||||
ext: Union[str, None],
|
||||
version: Optional[str] = None
|
||||
):
|
||||
"""Return expected publish filepath for representation in instance
|
||||
|
||||
This does not validate whether the instance has any representation by the
|
||||
given name, extension and/or version.
|
||||
|
||||
Arguments:
|
||||
instance (pyblish.api.Instance): Publish instance
|
||||
representation_name (str): Representation name
|
||||
ext (Union[str, None]): Extension for the file.
|
||||
When None, the `ext` will be set to the representation name.
|
||||
version (Optional[int]): If provided, force it to format to this
|
||||
particular version.
|
||||
|
||||
Returns:
|
||||
str: Resolved path
|
||||
|
||||
"""
|
||||
|
||||
if ext is None:
|
||||
ext = representation_name
|
||||
if version is None:
|
||||
version = instance.data["version"]
|
||||
|
||||
context = instance.context
|
||||
anatomy = context.data["anatomy"]
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({
|
||||
"ext": ext,
|
||||
"representation": representation_name,
|
||||
"variant": instance.data.get("variant"),
|
||||
"version": version
|
||||
})
|
||||
|
||||
path_template_obj = anatomy.get_template_item("publish", "default")["path"]
|
||||
template_filled = path_template_obj.format_strict(template_data)
|
||||
return os.path.normpath(template_filled)
|
||||
|
|
|
|||
|
|
@ -1,92 +1,241 @@
|
|||
import dataclasses
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
import ayon_api
|
||||
try:
|
||||
from pxr import Usd, UsdGeom, Sdf, Kind
|
||||
from pxr import UsdGeom, Sdf, Kind
|
||||
except ImportError:
|
||||
# Allow to fall back on Multiverse 6.3.0+ pxr usd library
|
||||
from mvpxr import Usd, UsdGeom, Sdf, Kind
|
||||
|
||||
from ayon_core.pipeline import Anatomy, get_current_project_name
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
from mvpxr import UsdGeom, Sdf, Kind
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# The predefined steps order used for bootstrapping USD Shots and Assets.
|
||||
# These are ordered in order from strongest to weakest opinions, like in USD.
|
||||
PIPELINE = {
|
||||
"shot": [
|
||||
"usdLighting",
|
||||
"usdFx",
|
||||
"usdSimulation",
|
||||
"usdAnimation",
|
||||
"usdLayout",
|
||||
],
|
||||
"asset": ["usdShade", "usdModel"],
|
||||
}
|
||||
@dataclasses.dataclass
|
||||
class Layer:
|
||||
layer: Sdf.Layer
|
||||
path: str
|
||||
# Allow to anchor a layer to another so that when the layer would be
|
||||
# exported it'd write itself out relative to its anchor
|
||||
anchor: 'Layer' = None
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
return self.layer.identifier
|
||||
|
||||
def get_full_path(self):
|
||||
"""Return full path relative to the anchor layer"""
|
||||
if not os.path.isabs(self.path) and self.anchor:
|
||||
anchor_path = self.anchor.get_full_path()
|
||||
root = os.path.dirname(anchor_path)
|
||||
return os.path.normpath(os.path.join(root, self.path))
|
||||
return self.path
|
||||
|
||||
def export(self, path=None, args=None):
|
||||
"""Save the layer"""
|
||||
if path is None:
|
||||
path = self.get_full_path()
|
||||
|
||||
if args is None:
|
||||
args = self.layer.GetFileFormatArguments()
|
||||
|
||||
self.layer.Export(path, args=args)
|
||||
|
||||
@classmethod
|
||||
def create_anonymous(cls, path, tag="LOP", anchor=None):
|
||||
"""Create an anonymous layer instance.
|
||||
|
||||
Arguments:
|
||||
path (str): The layer's filepath.
|
||||
tag (Optional[str]): The tag to give to the anonymous layer.
|
||||
This defaults to 'LOP' because Houdini requires that tag for
|
||||
its in-memory layers that it will be able to manage. In other
|
||||
integrations no similar requirements have been found so it was
|
||||
deemed a 'safe' default for now.
|
||||
anchor (Optional[Layer]): Another layer to relatively anchor to.
|
||||
"""
|
||||
sdf_layer = Sdf.Layer.CreateAnonymous(tag)
|
||||
return cls(layer=sdf_layer, path=path, anchor=anchor)
|
||||
|
||||
|
||||
def create_asset(
|
||||
filepath, asset_name, reference_layers, kind=Kind.Tokens.component
|
||||
def setup_asset_layer(
|
||||
layer,
|
||||
asset_name,
|
||||
reference_layers=None,
|
||||
kind=Kind.Tokens.component,
|
||||
define_class=True,
|
||||
force_add_payload=False,
|
||||
set_payload_path=False
|
||||
):
|
||||
"""
|
||||
Creates an asset file that consists of a top level layer and sublayers for
|
||||
shading and geometry.
|
||||
Adds an asset prim to the layer with the `reference_layers` added as
|
||||
references for e.g. geometry and shading.
|
||||
|
||||
The referenced layers will be moved into a separate `./payload.usd` file
|
||||
that the asset file uses to allow deferred loading of the heavier
|
||||
geometrical data. An example would be:
|
||||
|
||||
asset.usd <-- out filepath
|
||||
payload.usd <-- always automatically added in-between
|
||||
look.usd <-- reference layer 0 from `reference_layers` argument
|
||||
model.usd <-- reference layer 1 from `reference_layers` argument
|
||||
|
||||
If `define_class` is enabled then a `/__class__/{asset_name}` class
|
||||
definition will be created that the root asset inherits from
|
||||
|
||||
Examples:
|
||||
>>> create_asset("/path/to/asset.usd",
|
||||
>>> asset_name="test",
|
||||
>>> reference_layers=["./model.usd", "./look.usd"])
|
||||
|
||||
Returns:
|
||||
List[Tuple[Sdf.Layer, str]]: List of created layers with their
|
||||
preferred output save paths.
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
layer (Sdf.Layer): Layer to set up the asset structure for.
|
||||
asset_name (str): The name for the Asset identifier and default prim.
|
||||
reference_layers (list): USD Files to reference in the asset.
|
||||
Note that the bottom layer (first file, like a model) would
|
||||
be last in the list. The strongest layer will be the first
|
||||
index.
|
||||
asset_name (str): The name for the Asset identifier and default prim.
|
||||
kind (pxr.Kind): A USD Kind for the root asset.
|
||||
define_class: Define a `/__class__/{asset_name}` class which the
|
||||
root asset prim will inherit from.
|
||||
force_add_payload (bool): Generate payload layer even if no
|
||||
reference paths are set - thus generating an enmpty layer.
|
||||
set_payload_path (bool): Whether to directly set the payload asset
|
||||
path to `./payload.usd` or not Defaults to True.
|
||||
|
||||
"""
|
||||
# Define root prim for the asset and make it the default for the stage.
|
||||
prim_name = asset_name
|
||||
|
||||
if define_class:
|
||||
class_prim = Sdf.PrimSpec(
|
||||
layer.pseudoRoot,
|
||||
"__class__",
|
||||
Sdf.SpecifierClass,
|
||||
)
|
||||
Sdf.PrimSpec(
|
||||
class_prim,
|
||||
prim_name,
|
||||
Sdf.SpecifierClass,
|
||||
)
|
||||
|
||||
asset_prim = Sdf.PrimSpec(
|
||||
layer.pseudoRoot,
|
||||
prim_name,
|
||||
Sdf.SpecifierDef,
|
||||
"Xform"
|
||||
)
|
||||
|
||||
if define_class:
|
||||
asset_prim.inheritPathList.prependedItems[:] = [
|
||||
"/__class__/{}".format(prim_name)
|
||||
]
|
||||
|
||||
# Define Kind
|
||||
# Usually we will "loft up" the kind authored into the exported geometry
|
||||
# layer rather than re-stamping here; we'll leave that for a later
|
||||
# tutorial, and just be explicit here.
|
||||
asset_prim.kind = kind
|
||||
|
||||
# Set asset info
|
||||
asset_prim.assetInfo["name"] = asset_name
|
||||
asset_prim.assetInfo["identifier"] = "%s/%s.usd" % (asset_name, asset_name)
|
||||
|
||||
# asset.assetInfo["version"] = asset_version
|
||||
set_layer_defaults(layer, default_prim=asset_name)
|
||||
|
||||
created_layers = []
|
||||
|
||||
# Add references to the asset prim
|
||||
if force_add_payload or reference_layers:
|
||||
# Create a relative payload file to filepath through which we sublayer
|
||||
# the heavier payloads
|
||||
# Prefix with `LOP` just so so that if Houdini ROP were to save
|
||||
# the nodes it's capable of exporting with explicit save path
|
||||
payload_layer = Sdf.Layer.CreateAnonymous("LOP",
|
||||
args={"format": "usda"})
|
||||
set_layer_defaults(payload_layer, default_prim=asset_name)
|
||||
created_layers.append(Layer(layer=payload_layer,
|
||||
path="./payload.usd"))
|
||||
|
||||
# Add payload
|
||||
if set_payload_path:
|
||||
payload_identifier = "./payload.usd"
|
||||
else:
|
||||
payload_identifier = payload_layer.identifier
|
||||
|
||||
asset_prim.payloadList.prependedItems[:] = [
|
||||
Sdf.Payload(assetPath=payload_identifier)
|
||||
]
|
||||
|
||||
# Add sublayers to the payload layer
|
||||
# Note: Sublayering is tricky because it requires that the sublayers
|
||||
# actually define the path at defaultPrim otherwise the payload
|
||||
# reference will not find the defaultPrim and turn up empty.
|
||||
if reference_layers:
|
||||
for ref_layer in reference_layers:
|
||||
payload_layer.subLayerPaths.append(ref_layer)
|
||||
|
||||
return created_layers
|
||||
|
||||
|
||||
def create_asset(
|
||||
filepath,
|
||||
asset_name,
|
||||
reference_layers=None,
|
||||
kind=Kind.Tokens.component,
|
||||
define_class=True
|
||||
):
|
||||
"""Creates and saves a prepared asset stage layer.
|
||||
|
||||
Creates an asset file that consists of a top level asset prim, asset info
|
||||
and references in the provided `reference_layers`.
|
||||
|
||||
Returns:
|
||||
list: Created layers
|
||||
|
||||
"""
|
||||
# Also see create_asset.py in PixarAnimationStudios/USD endToEnd example
|
||||
|
||||
log.info("Creating asset at %s", filepath)
|
||||
sdf_layer = Sdf.Layer.CreateAnonymous()
|
||||
layer = Layer(layer=sdf_layer, path=filepath)
|
||||
|
||||
created_layers = setup_asset_layer(
|
||||
layer=sdf_layer,
|
||||
asset_name=asset_name,
|
||||
reference_layers=reference_layers,
|
||||
kind=kind,
|
||||
define_class=define_class,
|
||||
set_payload_path=True
|
||||
)
|
||||
for created_layer in created_layers:
|
||||
created_layer.anchor = layer
|
||||
created_layer.export()
|
||||
|
||||
# Make the layer ascii - good for readability, plus the file is small
|
||||
root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
log.debug("Creating asset at %s", filepath)
|
||||
layer.export(args={"format": "usda"})
|
||||
|
||||
# Define a prim for the asset and make it the default for the stage.
|
||||
asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim()
|
||||
stage.SetDefaultPrim(asset_prim)
|
||||
|
||||
# Let viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# Usually we will "loft up" the kind authored into the exported geometry
|
||||
# layer rather than re-stamping here; we'll leave that for a later
|
||||
# tutorial, and just be explicit here.
|
||||
model = Usd.ModelAPI(asset_prim)
|
||||
if kind:
|
||||
model.SetKind(kind)
|
||||
|
||||
model.SetAssetName(asset_name)
|
||||
model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name))
|
||||
|
||||
# Add references to the asset prim
|
||||
references = asset_prim.GetReferences()
|
||||
for reference_filepath in reference_layers:
|
||||
references.AddReference(reference_filepath)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
return [layer] + created_layers
|
||||
|
||||
|
||||
def create_shot(filepath, layers, create_layers=False):
|
||||
"""Create a shot with separate layers for departments.
|
||||
|
||||
Examples:
|
||||
>>> create_shot("/path/to/shot.usd",
|
||||
>>> layers=["lighting.usd", "fx.usd", "animation.usd"])
|
||||
"/path/to/shot.usd"
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
layers (str): When provided this will be added verbatim in the
|
||||
layers (list): When provided this will be added verbatim in the
|
||||
subLayerPaths layers. When the provided layer paths do not exist
|
||||
they are generated using Sdf.Layer.CreateNew
|
||||
they are generated using Sdf.Layer.CreateNew
|
||||
create_layers (bool): Whether to create the stub layers on disk if
|
||||
they do not exist yet.
|
||||
|
||||
|
|
@ -95,10 +244,9 @@ def create_shot(filepath, layers, create_layers=False):
|
|||
|
||||
"""
|
||||
# Also see create_shot.py in PixarAnimationStudios/USD endToEnd example
|
||||
root_layer = Sdf.Layer.CreateAnonymous()
|
||||
|
||||
stage = Usd.Stage.CreateNew(filepath)
|
||||
log.info("Creating shot at %s" % filepath)
|
||||
|
||||
created_layers = [root_layer]
|
||||
for layer_path in layers:
|
||||
if create_layers and not os.path.exists(layer_path):
|
||||
# We use the Sdf API here to quickly create layers. Also, we're
|
||||
|
|
@ -108,143 +256,114 @@ def create_shot(filepath, layers, create_layers=False):
|
|||
if not os.path.exists(layer_folder):
|
||||
os.makedirs(layer_folder)
|
||||
|
||||
Sdf.Layer.CreateNew(layer_path)
|
||||
new_layer = Sdf.Layer.CreateNew(layer_path)
|
||||
created_layers.append(new_layer)
|
||||
|
||||
stage.GetRootLayer().subLayerPaths.append(layer_path)
|
||||
root_layer.subLayerPaths.append(layer_path)
|
||||
|
||||
# Lets viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
stage.GetRootLayer().Save()
|
||||
set_layer_defaults(root_layer)
|
||||
log.debug("Creating shot at %s" % filepath)
|
||||
root_layer.Export(filepath, args={"format": "usda"})
|
||||
|
||||
return filepath
|
||||
return created_layers
|
||||
|
||||
|
||||
def create_model(filename, folder_path, variant_product_names):
|
||||
"""Create a USD Model file.
|
||||
def add_ordered_sublayer(layer, contribution_path, layer_id, order=None,
|
||||
add_sdf_arguments_metadata=True):
|
||||
"""Add sublayer paths in the Sdf.Layer at given "orders"
|
||||
|
||||
For each of the variation paths it will payload the path and set its
|
||||
relevant variation name.
|
||||
USD does not provide a way to set metadata per sublayer entry, but we can
|
||||
'sneak it in' by adding it as part of the file url after :SDF_FORMAT_ARGS:
|
||||
There they will then just be unused args that we can parse later again
|
||||
to access our data.
|
||||
|
||||
A higher order will appear earlier in the subLayerPaths as a stronger
|
||||
opinion. An unordered layer (`order=None`) will be stronger than any
|
||||
ordered opinion and thus will be inserted at the start of the list.
|
||||
|
||||
Args:
|
||||
layer (Sdf.Layer): Layer to add sublayers in.
|
||||
contribution_path (str): Path/URI to add.
|
||||
layer_id (str): Token that if found for an existing layer it will
|
||||
replace that layer.
|
||||
order (Any[int, None]): Order to place the contribution in
|
||||
the sublayers. When `None` no ordering is considered nor will
|
||||
ordering metadata be written if `add_sdf_arguments_metadata` is
|
||||
False.
|
||||
add_sdf_arguments_metadata (bool): Add metadata into the filepath
|
||||
to store the `layer_id` and `order` so ordering can be maintained
|
||||
in the future as intended.
|
||||
|
||||
Returns:
|
||||
str: The resulting contribution path (which maybe include the
|
||||
sdf format args metadata if enabled)
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
assert folder_entity, "Folder not found: %s" % folder_path
|
||||
# Add the order with the contribution path so that for future
|
||||
# contributions we can again use it to magically fit into the
|
||||
# ordering. We put this in the path because sublayer paths do
|
||||
# not allow customData to be stored.
|
||||
def _format_path(path, layer_id, order):
|
||||
# TODO: Avoid this hack to store 'order' and 'layer' metadata
|
||||
# for sublayers; in USD sublayers can't hold customdata
|
||||
if not add_sdf_arguments_metadata:
|
||||
return path
|
||||
data = {"layer_id": str(layer_id)}
|
||||
if order is not None:
|
||||
data["order"] = str(order)
|
||||
return Sdf.Layer.CreateIdentifier(path, data)
|
||||
|
||||
variants = []
|
||||
for product_name in variant_product_names:
|
||||
prefix = "usdModel"
|
||||
if product_name.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = product_name[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model products must start with usdModel: %s" % product_name
|
||||
# If the layer was already in the layers, then replace it
|
||||
for index, existing_path in enumerate(layer.subLayerPaths):
|
||||
args = get_sdf_format_args(existing_path)
|
||||
existing_layer = args.get("layer_id")
|
||||
if existing_layer == layer_id:
|
||||
# Put it in the same position where it was before when swapping
|
||||
# it with the original, also take over its order metadata
|
||||
order = args.get("order")
|
||||
if order is not None:
|
||||
order = int(order)
|
||||
else:
|
||||
order = None
|
||||
contribution_path = _format_path(contribution_path,
|
||||
order=order,
|
||||
layer_id=layer_id)
|
||||
log.debug(
|
||||
f"Replacing existing layer: {layer.subLayerPaths[index]} "
|
||||
f"-> {contribution_path}"
|
||||
)
|
||||
layer.subLayerPaths[index] = contribution_path
|
||||
return contribution_path
|
||||
|
||||
path = get_usd_master_path(
|
||||
folder_entity=folder_entity,
|
||||
product_name=product_name,
|
||||
representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
contribution_path = _format_path(contribution_path,
|
||||
order=order,
|
||||
layer_id=layer_id)
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename,
|
||||
variants=variants,
|
||||
variantset="model",
|
||||
variant_prim="/root",
|
||||
reference_prim="/root/geo",
|
||||
as_payload=True,
|
||||
)
|
||||
# If an order is defined and other layers are ordered than place it before
|
||||
# the first order where existing order is lower
|
||||
if order is not None:
|
||||
for index, existing_path in enumerate(layer.subLayerPaths):
|
||||
args = get_sdf_format_args(existing_path)
|
||||
existing_order = args.get("order")
|
||||
if existing_order is not None and int(existing_order) < order:
|
||||
log.debug(
|
||||
f"Inserting new layer at {index}: {contribution_path}"
|
||||
)
|
||||
layer.subLayerPaths.insert(index, contribution_path)
|
||||
return
|
||||
# Weakest ordered opinion
|
||||
layer.subLayerPaths.append(contribution_path)
|
||||
return contribution_path
|
||||
|
||||
UsdGeom.SetStageMetersPerUnit(stage, 1)
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# modelAPI = Usd.ModelAPI(root_prim)
|
||||
# modelAPI.SetKind(Kind.Tokens.component)
|
||||
|
||||
# See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details
|
||||
# for more on assetInfo
|
||||
# modelAPI.SetAssetName(asset)
|
||||
# modelAPI.SetAssetIdentifier(asset)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
# If no paths found with an order to put it next to
|
||||
# then put the sublayer at the end
|
||||
log.debug(f"Appending new layer: {contribution_path}")
|
||||
layer.subLayerPaths.insert(0, contribution_path)
|
||||
return contribution_path
|
||||
|
||||
|
||||
def create_shade(filename, folder_path, variant_product_names):
|
||||
"""Create a master USD shade file for an asset.
|
||||
|
||||
For each available model variation this should generate a reference
|
||||
to a `usdShade_{modelVariant}` product.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
assert folder_entity, "Folder not found: %s" % folder_path
|
||||
|
||||
variants = []
|
||||
|
||||
for product_name in variant_product_names:
|
||||
prefix = "usdModel"
|
||||
if product_name.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = product_name[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model products must start " "with usdModel: %s" % product_name
|
||||
)
|
||||
|
||||
shade_product_name = re.sub(
|
||||
"^usdModel", "usdShade", product_name
|
||||
)
|
||||
path = get_usd_master_path(
|
||||
folder_entity=folder_entity,
|
||||
product_name=shade_product_name,
|
||||
representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="model", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shade_variation(filename, folder_path, model_variant, shade_variants):
|
||||
"""Create the master Shade file for a specific model variant.
|
||||
|
||||
This should reference all shade variants for the specific model variant.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
assert folder_entity, "Folder not found: %s" % folder_path
|
||||
|
||||
variants = []
|
||||
for variant in shade_variants:
|
||||
product_name = "usdShade_{model}_{shade}".format(
|
||||
model=model_variant, shade=variant
|
||||
)
|
||||
path = get_usd_master_path(
|
||||
folder_entity=folder_entity,
|
||||
product_name=product_name,
|
||||
representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="shade", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def _create_variants_file(
|
||||
filename,
|
||||
def add_variant_references_to_layer(
|
||||
variants,
|
||||
variantset,
|
||||
default_variant=None,
|
||||
|
|
@ -252,112 +371,316 @@ def _create_variants_file(
|
|||
reference_prim=None,
|
||||
set_default_variant=True,
|
||||
as_payload=False,
|
||||
skip_variant_on_single_file=True,
|
||||
skip_variant_on_single_file=False,
|
||||
layer=None
|
||||
):
|
||||
"""Add or set a prim's variants to reference specified paths in the layer.
|
||||
|
||||
root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
Note:
|
||||
This does not clear any of the other opinions than replacing
|
||||
`prim.referenceList.prependedItems` with the new reference.
|
||||
If `as_payload=True` then this only does it for payloads and leaves
|
||||
references as they were in-tact.
|
||||
|
||||
root_prim = stage.DefinePrim(variant_prim)
|
||||
stage.SetDefaultPrim(root_prim)
|
||||
Note:
|
||||
If `skip_variant_on_single_file=True` it does *not* check if any
|
||||
other variants do exist; it only checks whether you are currently
|
||||
adding more than one since it'd be hard to find out whether previously
|
||||
this was also skipped and should now if you're adding a new one
|
||||
suddenly also be its original 'variant'. As such it's recommended to
|
||||
keep this disabled unless you know you're not updating the file later
|
||||
into the same variant set.
|
||||
|
||||
def _reference(path):
|
||||
"""Reference/Payload path depending on function arguments"""
|
||||
Examples:
|
||||
>>> layer = add_variant_references_to_layer("model.usd",
|
||||
>>> variants=[
|
||||
>>> ("main", "main.usd"),
|
||||
>>> ("damaged", "damaged.usd"),
|
||||
>>> ("twisted", "twisted.usd")
|
||||
>>> ],
|
||||
>>> variantset="model")
|
||||
>>> layer.Export("model.usd", args={"format": "usda"})
|
||||
|
||||
if reference_prim:
|
||||
prim = stage.DefinePrim(reference_prim)
|
||||
else:
|
||||
prim = root_prim
|
||||
Arguments:
|
||||
variants (List[List[str, str]): List of two-tuples of variant name to
|
||||
the filepath that should be referenced in for that variant.
|
||||
variantset (str): Name of the variant set
|
||||
default_variant (str): Default variant to set. If not provided
|
||||
the first variant will be used.
|
||||
variant_prim (str): Variant prim?
|
||||
reference_prim (str): Path to the reference prim where to add the
|
||||
references and variant sets.
|
||||
set_default_variant (bool): Whether to set the default variant.
|
||||
When False no default variant will be set, even if a value
|
||||
was provided to `default_variant`
|
||||
as_payload (bool): When enabled, instead of referencing use payloads
|
||||
skip_variant_on_single_file (bool): If this is enabled and only
|
||||
a single variant is provided then do not create the variant set
|
||||
but just reference that single file.
|
||||
layer (Optional[Sdf.Layer]): When provided operate on this layer,
|
||||
otherwise create an anonymous layer in memory.
|
||||
|
||||
if as_payload:
|
||||
# Payload
|
||||
prim.GetPayloads().AddPayload(Sdf.Payload(path))
|
||||
else:
|
||||
# Reference
|
||||
prim.GetReferences().AddReference(Sdf.Reference(path))
|
||||
Returns:
|
||||
Sdf.Layer: The layer with the added references inside the variants.
|
||||
|
||||
"""
|
||||
if layer is None:
|
||||
layer = Sdf.Layer.CreateAnonymous()
|
||||
set_layer_defaults(layer, default_prim=variant_prim.strip("/"))
|
||||
|
||||
prim_path_to_get_variants = Sdf.Path(variant_prim)
|
||||
root_prim = get_or_define_prim_spec(layer, variant_prim, "Xform")
|
||||
|
||||
# TODO: Define why there's a need for separate variant_prim and
|
||||
# reference_prim attribute. When should they differ? Does it even work?
|
||||
if not reference_prim:
|
||||
reference_prim = root_prim
|
||||
else:
|
||||
reference_prim = get_or_define_prim_spec(layer, reference_prim,
|
||||
"Xform")
|
||||
|
||||
assert variants, "Must have variants, got: %s" % variants
|
||||
|
||||
log.info(filename)
|
||||
|
||||
if skip_variant_on_single_file and len(variants) == 1:
|
||||
# Reference directly, no variants
|
||||
variant_path = variants[0][1]
|
||||
_reference(variant_path)
|
||||
if as_payload:
|
||||
# Payload
|
||||
reference_prim.payloadList.prependedItems.append(
|
||||
Sdf.Payload(variant_path)
|
||||
)
|
||||
else:
|
||||
# Reference
|
||||
reference_prim.referenceList.prependedItems.append(
|
||||
Sdf.Reference(variant_path)
|
||||
)
|
||||
|
||||
log.info("Non-variants..")
|
||||
log.info("Path: %s" % variant_path)
|
||||
log.debug("Creating without variants due to single file only.")
|
||||
log.debug("Path: %s", variant_path)
|
||||
|
||||
else:
|
||||
# Variants
|
||||
append = Usd.ListPositionBackOfAppendList
|
||||
variant_set = root_prim.GetVariantSets().AddVariantSet(
|
||||
variantset, append
|
||||
)
|
||||
|
||||
for variant, variant_path in variants:
|
||||
|
||||
for variant, variant_filepath in variants:
|
||||
if default_variant is None:
|
||||
default_variant = variant
|
||||
|
||||
variant_set.AddVariant(variant, append)
|
||||
variant_set.SetVariantSelection(variant)
|
||||
with variant_set.GetVariantEditContext():
|
||||
_reference(variant_path)
|
||||
set_variant_reference(layer,
|
||||
prim_path=prim_path_to_get_variants,
|
||||
variant_selections=[[variantset, variant]],
|
||||
path=variant_filepath,
|
||||
as_payload=as_payload)
|
||||
|
||||
log.info("Variants..")
|
||||
log.info("Variant: %s" % variant)
|
||||
log.info("Path: %s" % variant_path)
|
||||
if set_default_variant and default_variant is not None:
|
||||
# Set default variant selection
|
||||
root_prim.variantSelections[variantset] = default_variant
|
||||
|
||||
if set_default_variant:
|
||||
variant_set.SetVariantSelection(default_variant)
|
||||
|
||||
return stage
|
||||
return layer
|
||||
|
||||
|
||||
def get_usd_master_path(folder_entity, product_name, representation):
|
||||
"""Get the filepath for a .usd file of a product.
|
||||
def set_layer_defaults(layer,
|
||||
up_axis=UsdGeom.Tokens.y,
|
||||
meters_per_unit=1.0,
|
||||
default_prim=None):
|
||||
"""Set some default metadata for the SdfLayer.
|
||||
|
||||
This will return the path to an unversioned master file generated by
|
||||
`usd_master_file.py`.
|
||||
Arguments:
|
||||
layer (Sdf.Layer): The layer to set default for via Sdf API.
|
||||
up_axis (UsdGeom.Token); Which axis is the up-axis
|
||||
meters_per_unit (float): Meters per unit
|
||||
default_prim (Optional[str]: Default prim name
|
||||
|
||||
"""
|
||||
# Set default prim
|
||||
if default_prim is not None:
|
||||
layer.defaultPrim = default_prim
|
||||
|
||||
# Let viewing applications know how to orient a free camera properly
|
||||
# Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, up_axis)
|
||||
|
||||
# Set meters per unit
|
||||
layer.pseudoRoot.SetInfo(UsdGeom.Tokens.metersPerUnit,
|
||||
float(meters_per_unit))
|
||||
|
||||
|
||||
def get_or_define_prim_spec(layer, prim_path, type_name):
|
||||
"""Get or create a PrimSpec in the layer.
|
||||
|
||||
Note:
|
||||
This creates a Sdf.PrimSpec with Sdf.SpecifierDef but if the PrimSpec
|
||||
already exists this will not force it to be a Sdf.SpecifierDef and
|
||||
it may remain what it was, e.g. Sdf.SpecifierOver
|
||||
|
||||
Args:
|
||||
folder_entity (Union[str, dict]): Folder entity.
|
||||
product_name (str): Product name.
|
||||
representation (str): Representation name.
|
||||
layer (Sdf.Layer): The layer to create it in.
|
||||
prim_path (Any[str, Sdf.Path]): Prim path to create.
|
||||
type_name (str): Type name for the PrimSpec.
|
||||
This will only be set if the prim does not exist in the layer
|
||||
yet. It does not update type for an existing prim.
|
||||
|
||||
Returns:
|
||||
Sdf.PrimSpec: The PrimSpec in the layer for the given prim path.
|
||||
|
||||
"""
|
||||
prim_spec = layer.GetPrimAtPath(prim_path)
|
||||
if prim_spec:
|
||||
return prim_spec
|
||||
|
||||
project_name = get_current_project_name()
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
anatomy = Anatomy(project_name, project_entity=project_entity)
|
||||
prim_spec = Sdf.CreatePrimInLayer(layer, prim_path)
|
||||
prim_spec.specifier = Sdf.SpecifierDef
|
||||
prim_spec.typeName = type_name
|
||||
return prim_spec
|
||||
|
||||
template_data = get_template_data(project_entity, folder_entity)
|
||||
template_data.update({
|
||||
"product": {
|
||||
"name": product_name
|
||||
},
|
||||
"subset": product_name,
|
||||
"representation": representation,
|
||||
"version": 0, # stub version zero
|
||||
})
|
||||
|
||||
template_obj = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
def variant_nested_prim_path(prim_path, variant_selections):
|
||||
"""Return the Sdf.Path for a nested variant selection at prim path.
|
||||
|
||||
Examples:
|
||||
>>> prim_path = Sdf.Path("/asset")
|
||||
>>> variant_spec = variant_nested_prim_path(
|
||||
>>> prim_path,
|
||||
>>> variant_selections=[["model", "main"], ["look", "main"]]
|
||||
>>> )
|
||||
>>> variant_spec.path
|
||||
|
||||
Args:
|
||||
prim_path (Sdf.PrimPath): The prim path to create the spec in
|
||||
variant_selections (List[List[str, str]]): A list of variant set names
|
||||
and variant names to get the prim spec in.
|
||||
|
||||
Returns:
|
||||
Sdf.Path: The variant prim path
|
||||
|
||||
"""
|
||||
variant_prim_path = Sdf.Path(prim_path)
|
||||
for variant_set_name, variant_name in variant_selections:
|
||||
variant_prim_path = variant_prim_path.AppendVariantSelection(
|
||||
variant_set_name, variant_name)
|
||||
return variant_prim_path
|
||||
|
||||
|
||||
def add_ordered_reference(
|
||||
layer,
|
||||
prim_path,
|
||||
reference,
|
||||
order
|
||||
):
|
||||
"""Add reference alongside other ordered references.
|
||||
|
||||
Args:
|
||||
layer (Sdf.Layer): Layer to operate in.
|
||||
prim_path (Sdf.Path): Prim path to reference into.
|
||||
This may include variant selections to reference into a prim
|
||||
inside the variant selection.
|
||||
reference (Sdf.Reference): Reference to add.
|
||||
order (int): Order.
|
||||
|
||||
Returns:
|
||||
Sdf.PrimSpec: The prim spec for the prim path.
|
||||
|
||||
"""
|
||||
assert isinstance(order, int), "order must be integer"
|
||||
|
||||
# Sdf.Reference is immutable, see: `pxr/usd/sdf/wrapReference.cpp`
|
||||
# A Sdf.Reference can't be edited in Python so we create a new entry
|
||||
# matching the original with the extra data entry added.
|
||||
custom_data = reference.customData
|
||||
custom_data["ayon_order"] = order
|
||||
reference = Sdf.Reference(
|
||||
assetPath=reference.assetPath,
|
||||
primPath=reference.primPath,
|
||||
layerOffset=reference.layerOffset,
|
||||
customData=custom_data
|
||||
)
|
||||
path = template_obj.format_strict(template_data)
|
||||
|
||||
# Remove the version folder
|
||||
product_folder = os.path.dirname(os.path.dirname(path))
|
||||
master_folder = os.path.join(product_folder, "master")
|
||||
fname = "{0}.{1}".format(product_name, representation)
|
||||
# TODO: inherit type from outside of variants if it has it
|
||||
prim_spec = get_or_define_prim_spec(layer, prim_path, "Xform")
|
||||
|
||||
return os.path.join(master_folder, fname).replace("\\", "/")
|
||||
# Insert new entry at correct order
|
||||
entries = list(prim_spec.referenceList.prependedItems)
|
||||
|
||||
if not entries:
|
||||
prim_spec.referenceList.prependedItems.append(reference)
|
||||
return prim_spec
|
||||
|
||||
for index, existing_ref in enumerate(entries):
|
||||
existing_order = existing_ref.customData.get("order")
|
||||
if existing_order is not None and existing_order < order:
|
||||
log.debug(
|
||||
f"Inserting new reference at {index}: {reference}"
|
||||
)
|
||||
entries.insert(index, reference)
|
||||
break
|
||||
else:
|
||||
prim_spec.referenceList.prependedItems.append(reference)
|
||||
return prim_spec
|
||||
|
||||
prim_spec.referenceList.prependedItems[:] = entries
|
||||
return prim_spec
|
||||
|
||||
|
||||
def parse_avalon_uri(uri):
|
||||
# URI Pattern: avalon://{folder}/{product}.{ext}
|
||||
pattern = r"avalon://(?P<folder>[^/.]*)/(?P<product>[^/]*)\.(?P<ext>.*)"
|
||||
if uri.startswith("avalon://"):
|
||||
match = re.match(pattern, uri)
|
||||
if match:
|
||||
return match.groupdict()
|
||||
def set_variant_reference(
|
||||
sdf_layer,
|
||||
prim_path,
|
||||
variant_selections,
|
||||
path,
|
||||
as_payload=False,
|
||||
append=True
|
||||
):
|
||||
"""Get or define variant selection at prim path and add a reference
|
||||
|
||||
If the Variant Prim already exists the prepended references are replaced
|
||||
with a reference to `path`, it is overridden.
|
||||
|
||||
Args:
|
||||
sdf_layer (Sdf.Layer): Layer to operate in.
|
||||
prim_path (Any[str, Sdf.Path]): Prim path to add variant to.
|
||||
variant_selections (List[List[str, str]]): A list of variant set names
|
||||
and variant names to get the prim spec in.
|
||||
path (str): Path to reference or payload
|
||||
as_payload (bool): When enabled it will generate a payload instead of
|
||||
a reference. Defaults to False.
|
||||
append (bool): When enabled it will append the reference of payload
|
||||
to prepended items, otherwise it will replace it.
|
||||
|
||||
Returns:
|
||||
Sdf.PrimSpec: The prim spec for the prim path at the given
|
||||
variant selection.
|
||||
|
||||
"""
|
||||
prim_path = Sdf.Path(prim_path)
|
||||
# TODO: inherit type from outside of variants if it has it
|
||||
get_or_define_prim_spec(sdf_layer, prim_path, "Xform")
|
||||
variant_prim_path = variant_nested_prim_path(prim_path, variant_selections)
|
||||
variant_prim = get_or_define_prim_spec(sdf_layer,
|
||||
variant_prim_path,
|
||||
"Xform")
|
||||
# Replace the prepended references or payloads
|
||||
if as_payload:
|
||||
# Payload
|
||||
if append:
|
||||
variant_prim.payloadList.prependedItems.append(
|
||||
Sdf.Payload(assetPath=path)
|
||||
)
|
||||
else:
|
||||
variant_prim.payloadList.prependedItems[:] = [
|
||||
Sdf.Payload(assetPath=path)
|
||||
]
|
||||
else:
|
||||
# Reference
|
||||
if append:
|
||||
variant_prim.referenceList.prependedItems.append(
|
||||
Sdf.Reference(assetPath=path)
|
||||
)
|
||||
else:
|
||||
variant_prim.referenceList.prependedItems[:] = [
|
||||
Sdf.Reference(assetPath=path)
|
||||
]
|
||||
|
||||
return variant_prim
|
||||
|
||||
|
||||
def get_sdf_format_args(path):
|
||||
"""Return SDF_FORMAT_ARGS parsed to `dict`"""
|
||||
_raw_path, data = Sdf.Layer.SplitIdentifier(path)
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -64,7 +64,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
|
|||
"skeletalMesh",
|
||||
"xgen",
|
||||
"yeticacheUE",
|
||||
"tycache"
|
||||
"tycache",
|
||||
"usd"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,879 @@
|
|||
from operator import attrgetter
|
||||
import dataclasses
|
||||
import os
|
||||
from typing import Dict
|
||||
|
||||
import pyblish.api
|
||||
from pxr import Sdf
|
||||
|
||||
from ayon_core.lib import (
|
||||
TextDef,
|
||||
BoolDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
EnumDef
|
||||
)
|
||||
from ayon_core.pipeline.usdlib import (
|
||||
get_or_define_prim_spec,
|
||||
add_ordered_reference,
|
||||
variant_nested_prim_path,
|
||||
setup_asset_layer,
|
||||
add_ordered_sublayer,
|
||||
set_layer_defaults
|
||||
)
|
||||
from ayon_core.pipeline.entity_uri import (
|
||||
construct_ayon_entity_uri,
|
||||
parse_ayon_entity_uri
|
||||
)
|
||||
from ayon_core.pipeline.load.utils import get_representation_path_by_names
|
||||
from ayon_core.pipeline.publish.lib import get_instance_expected_output_path
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
# This global toggle is here mostly for debugging purposes and should usually
|
||||
# be True so that new publishes merge and extend on previous contributions.
|
||||
# With this enabled a new variant model layer publish would e.g. merge with
|
||||
# the model layer's other variants nicely, so you can build up an asset by
|
||||
# individual publishes instead of requiring to republish each contribution
|
||||
# all the time at the same time
|
||||
BUILD_INTO_LAST_VERSIONS = True
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _BaseContribution:
|
||||
# What are we contributing?
|
||||
instance: pyblish.api.Instance # instance that contributes it
|
||||
|
||||
# Where are we contributing to?
|
||||
layer_id: str # usually the department or task name
|
||||
target_product: str # target product the layer should merge to
|
||||
|
||||
order: int
|
||||
|
||||
|
||||
class SublayerContribution(_BaseContribution):
|
||||
"""Sublayer contribution"""
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class VariantContribution(_BaseContribution):
|
||||
"""Reference contribution within a Variant Set"""
|
||||
|
||||
# Variant
|
||||
variant_set_name: str
|
||||
variant_name: str
|
||||
variant_is_default: bool # Whether to author variant selection opinion
|
||||
|
||||
|
||||
def get_representation_path_in_publish_context(
|
||||
context: pyblish.api.Context,
|
||||
project_name,
|
||||
folder_path,
|
||||
product_name,
|
||||
version_name,
|
||||
representation_name,
|
||||
):
|
||||
"""Return resolved path for product if present in publishing context.
|
||||
|
||||
Allow resolving 'latest' paths from a publishing context's instances
|
||||
as if they will exist after publishing without them being integrated yet.
|
||||
|
||||
Use first instance that has same folder path and product name,
|
||||
and contains representation with passed name.
|
||||
|
||||
Args:
|
||||
context (pyblish.api.Context): Publishing context.
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
product_name (str): Product name.
|
||||
version_name (str): Version name.
|
||||
representation_name (str): Representation name.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Returns the path if it could be resolved
|
||||
|
||||
"""
|
||||
# The AYON publishing logic is set up in such a way that you can not
|
||||
# publish to another project. As such, we know if the project name we're
|
||||
# looking for doesn't match the publishing context it'll not be in there.
|
||||
if context.data["projectName"] != project_name:
|
||||
return
|
||||
|
||||
if version_name == "hero":
|
||||
raise NotImplementedError(
|
||||
"Hero version resolving not implemented from context"
|
||||
)
|
||||
|
||||
# Search first in publish context to allow resolving latest versions
|
||||
# from e.g. the current publish session if the context is provided
|
||||
specific_version = isinstance(version_name, int)
|
||||
for instance in context:
|
||||
if instance.data.get("folderPath") != folder_path:
|
||||
continue
|
||||
|
||||
if instance.data.get("productName") != product_name:
|
||||
continue
|
||||
|
||||
# Only consider if the instance has a representation by
|
||||
# that name
|
||||
representations = instance.data.get("representations", [])
|
||||
if not any(representation.get("name") == representation_name
|
||||
for representation in representations):
|
||||
continue
|
||||
|
||||
return get_instance_expected_output_path(
|
||||
instance,
|
||||
representation_name=representation_name,
|
||||
ext=None,
|
||||
version=version_name if specific_version else None
|
||||
)
|
||||
|
||||
|
||||
def get_instance_uri_path(
|
||||
instance,
|
||||
resolve=True
|
||||
):
|
||||
"""Return path for instance's usd representation"""
|
||||
context = instance.context
|
||||
folder_path = instance.data["folderPath"]
|
||||
product_name = instance.data["productName"]
|
||||
project_name = context.data["projectName"]
|
||||
|
||||
# Get the layer's published path
|
||||
path = construct_ayon_entity_uri(
|
||||
project_name=project_name,
|
||||
folder_path=folder_path,
|
||||
product=product_name,
|
||||
version="latest",
|
||||
representation_name="usd"
|
||||
)
|
||||
|
||||
# Resolve contribution path
|
||||
# TODO: Remove this when Asset Resolver is used
|
||||
if resolve:
|
||||
query = parse_ayon_entity_uri(path)
|
||||
names = {
|
||||
"project_name": query["project"],
|
||||
"folder_path": query["folderPath"],
|
||||
"product_name": query["product"],
|
||||
"version_name": query["version"],
|
||||
"representation_name": query["representation"],
|
||||
}
|
||||
|
||||
# We want to resolve the paths live from the publishing context
|
||||
path = get_representation_path_in_publish_context(context, **names)
|
||||
if path:
|
||||
return path
|
||||
|
||||
# If for whatever reason we were unable to retrieve from the context
|
||||
# then get the path from an existing database entry
|
||||
path = get_representation_path_by_names(**query)
|
||||
|
||||
# Ensure `None` for now is also a string
|
||||
path = str(path)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def get_last_publish(instance, representation="usd"):
|
||||
"""Wrapper to quickly get last representation publish path"""
|
||||
return get_representation_path_by_names(
|
||||
project_name=instance.context.data["projectName"],
|
||||
folder_path=instance.data["folderPath"],
|
||||
product_name=instance.data["productName"],
|
||||
version_name="latest",
|
||||
representation_name=representation
|
||||
)
|
||||
|
||||
|
||||
def add_representation(instance, name,
|
||||
files, staging_dir, ext=None,
|
||||
output_name=None):
|
||||
"""Add a representation to publish and integrate.
|
||||
|
||||
A representation must exist of either a single file or a
|
||||
single file sequence. It can *not* contain multiple files.
|
||||
|
||||
For the integration to succeed the instance must provide the context
|
||||
for asset, frame range, etc. even though the representation can
|
||||
override some parts of it.
|
||||
|
||||
Arguments:
|
||||
instance (pyblish.api.Instance): Publish instance
|
||||
name (str): The representation name
|
||||
files (str | List[str]): List of files or single file of the
|
||||
representation. This should be the filename only.
|
||||
staging_dir (str): The directory containing the files.
|
||||
ext (Optional[str]): Explicit extension for the output
|
||||
output_name (Optional[str]): Output name suffix for the
|
||||
destination file to ensure the file is unique if
|
||||
multiple representations share the same extension.
|
||||
|
||||
Returns:
|
||||
dict: Representation data for integration.
|
||||
|
||||
"""
|
||||
if ext is None:
|
||||
# TODO: Use filename
|
||||
ext = name
|
||||
|
||||
representation = {
|
||||
"name": name,
|
||||
"ext": ext,
|
||||
"stagingDir": staging_dir,
|
||||
"files": files
|
||||
}
|
||||
if output_name:
|
||||
representation["outputName"] = output_name
|
||||
|
||||
instance.data.setdefault("representations", []).append(representation)
|
||||
return representation
|
||||
|
||||
|
||||
class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||
publish.OpenPypePyblishPluginMixin):
|
||||
"""Collect the USD Layer Contributions and create dependent instances.
|
||||
|
||||
Our contributions go to the layer
|
||||
|
||||
Instance representation -> Department Layer -> Asset
|
||||
|
||||
So that for example:
|
||||
modelMain --> variant 'main' in model.usd -> asset.usd
|
||||
modelDamaged --> variant 'damaged' in model.usd -> asset.usd
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Layer Contributions (Asset/Shot)"
|
||||
families = ["usd"]
|
||||
enabled = True
|
||||
|
||||
# A contribution defines a contribution into a (department) layer which
|
||||
# will get layered into the target product, usually the asset or shot.
|
||||
# We need to at least know what it targets (e.g. where does it go into) and
|
||||
# in what order (which contribution is stronger?)
|
||||
# Preferably the bootstrapped data (e.g. the Shot) preserves metadata about
|
||||
# the contributions so that we can design a system where custom
|
||||
# contributions outside the predefined orders are possible to be
|
||||
# managed. So that if a particular asset requires an extra contribution
|
||||
# level, you can add itdirectly from the publisher at that particular
|
||||
# order. Future publishes will then see the existing contribution and will
|
||||
# persist adding it to future bootstraps at that order
|
||||
contribution_layers: Dict[str, int] = {
|
||||
# asset layers
|
||||
"model": 100,
|
||||
"assembly": 150,
|
||||
"groom": 175,
|
||||
"look": 200,
|
||||
"rig": 300,
|
||||
# shot layers
|
||||
"layout": 200,
|
||||
"animation": 300,
|
||||
"simulation": 400,
|
||||
"fx": 500,
|
||||
"lighting": 600,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# Override contribution_layers logic to turn data into Dict[str, int]
|
||||
plugin_settings = project_settings["core"]["publish"].get(
|
||||
"CollectUSDLayerContributions", {}
|
||||
)
|
||||
|
||||
cls.enabled = plugin_settings.get("enabled", cls.enabled)
|
||||
|
||||
# Define contribution layers via settings
|
||||
contribution_layers = {}
|
||||
for entry in plugin_settings.get("contribution_layers", []):
|
||||
contribution_layers[entry["name"]] = int(entry["order"])
|
||||
if contribution_layers:
|
||||
cls.contribution_layers = contribution_layers
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
if not attr_values.get("contribution_enabled"):
|
||||
return
|
||||
|
||||
instance.data["productGroup"] = (
|
||||
instance.data.get("productGroup") or "USD Layer"
|
||||
)
|
||||
|
||||
# Allow formatting in variant set name and variant name
|
||||
data = instance.data.copy()
|
||||
data["layer"] = attr_values["contribution_layer"]
|
||||
for key in [
|
||||
"contribution_variant_set_name",
|
||||
"contribution_variant"
|
||||
]:
|
||||
attr_values[key] = attr_values[key].format(**data)
|
||||
|
||||
# Define contribution
|
||||
order = self.contribution_layers.get(
|
||||
attr_values["contribution_layer"], 0
|
||||
)
|
||||
|
||||
if attr_values["contribution_apply_as_variant"]:
|
||||
contribution = VariantContribution(
|
||||
instance=instance,
|
||||
layer_id=attr_values["contribution_layer"],
|
||||
target_product=attr_values["contribution_target_product"],
|
||||
variant_set_name=attr_values["contribution_variant_set_name"],
|
||||
variant_name=attr_values["contribution_variant"],
|
||||
variant_is_default=attr_values["contribution_variant_is_default"], # noqa: E501
|
||||
order=order
|
||||
)
|
||||
else:
|
||||
contribution = SublayerContribution(
|
||||
instance=instance,
|
||||
layer_id=attr_values["contribution_layer"],
|
||||
target_product=attr_values["contribution_target_product"],
|
||||
order=order
|
||||
)
|
||||
|
||||
asset_product = contribution.target_product
|
||||
layer_product = "{}_{}".format(asset_product, contribution.layer_id)
|
||||
|
||||
# Layer contribution instance
|
||||
layer_instance = self.get_or_create_instance(
|
||||
product_name=layer_product,
|
||||
variant=contribution.layer_id,
|
||||
source_instance=instance,
|
||||
families=["usd", "usdLayer"],
|
||||
)
|
||||
layer_instance.data.setdefault("usd_contributions", []).append(
|
||||
contribution
|
||||
)
|
||||
layer_instance.data["usd_layer_id"] = contribution.layer_id
|
||||
layer_instance.data["usd_layer_order"] = contribution.order
|
||||
|
||||
layer_instance.data["productGroup"] = (
|
||||
instance.data.get("productGroup") or "USD Layer"
|
||||
)
|
||||
|
||||
# Asset/Shot contribution instance
|
||||
target_instance = self.get_or_create_instance(
|
||||
product_name=asset_product,
|
||||
variant=asset_product,
|
||||
source_instance=layer_instance,
|
||||
families=["usd", "usdAsset"],
|
||||
)
|
||||
target_instance.data["contribution_target_product_init"] = attr_values[
|
||||
"contribution_target_product_init"
|
||||
]
|
||||
|
||||
self.log.info(
|
||||
f"Contributing {instance.data['productName']} to "
|
||||
f"{layer_product} -> {asset_product}"
|
||||
)
|
||||
|
||||
def find_instance(self, context, data, ignore_instance):
|
||||
"""Return instance in context that has matching `instance.data`.
|
||||
|
||||
If no matching instance is found, then `None` is returned.
|
||||
"""
|
||||
for instance in context:
|
||||
if instance is ignore_instance:
|
||||
continue
|
||||
|
||||
if all(instance.data.get(key) == value
|
||||
for key, value in data.items()):
|
||||
return instance
|
||||
|
||||
def get_or_create_instance(self,
|
||||
product_name,
|
||||
variant,
|
||||
source_instance,
|
||||
families):
|
||||
"""Get or create the instance matching the product/variant.
|
||||
|
||||
The source instance will be used to do additional matching, like
|
||||
ensuring it's a product for the same asset and task. If the instance
|
||||
already exists in the `context` then the existing one is returned.
|
||||
|
||||
For each source instance this is called the sources will be appended
|
||||
to a `instance.data["source_instances"]` list on the returned instance.
|
||||
|
||||
Arguments:
|
||||
product_name (str): product name
|
||||
variant (str): Variant name
|
||||
source_instance (pyblish.api.Instance): Source instance to
|
||||
be related to for asset, task.
|
||||
families (list): The families required to be set on the instance.
|
||||
|
||||
Returns:
|
||||
pyblish.api.Instance: The resulting instance.
|
||||
|
||||
"""
|
||||
|
||||
# Potentially the instance already exists due to multiple instances
|
||||
# contributing to the same layer or asset - so we first check for
|
||||
# existence
|
||||
context = source_instance.context
|
||||
|
||||
# Required matching vars
|
||||
data = {
|
||||
"folderPath": source_instance.data["folderPath"],
|
||||
"task": source_instance.data.get("task"),
|
||||
"productName": product_name,
|
||||
"variant": variant,
|
||||
"families": families
|
||||
}
|
||||
existing_instance = self.find_instance(context, data,
|
||||
ignore_instance=source_instance)
|
||||
if existing_instance:
|
||||
existing_instance.append(source_instance.id)
|
||||
existing_instance.data["source_instances"].append(source_instance)
|
||||
return existing_instance
|
||||
|
||||
# Otherwise create the instance
|
||||
new_instance = context.create_instance(name=product_name)
|
||||
new_instance.data.update(data)
|
||||
|
||||
new_instance.data["label"] = (
|
||||
"{0} ({1})".format(product_name, new_instance.data["folderPath"])
|
||||
)
|
||||
new_instance.data["family"] = "usd"
|
||||
new_instance.data["productType"] = "usd"
|
||||
new_instance.data["icon"] = "link"
|
||||
new_instance.data["comment"] = "Automated bootstrap USD file."
|
||||
new_instance.append(source_instance.id)
|
||||
new_instance.data["source_instances"] = [source_instance]
|
||||
|
||||
# The contribution target publishes should never match versioning of
|
||||
# the workfile but should just always increment from their last version
|
||||
# so that there will never be conflicts between contributions from
|
||||
# different departments and scenes.
|
||||
new_instance.data["followWorkfileVersion"] = False
|
||||
|
||||
return new_instance
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
|
||||
return [
|
||||
UISeparatorDef("usd_container_settings1"),
|
||||
UILabelDef(label="<b>USD Contribution</b>"),
|
||||
BoolDef("contribution_enabled",
|
||||
label="Enable",
|
||||
tooltip=(
|
||||
"When enabled this publish instance will be added "
|
||||
"into a department layer into a target product, "
|
||||
"usually an asset or shot.\n"
|
||||
"When disabled this publish instance will not be "
|
||||
"added into another USD file and remain as is.\n"
|
||||
"In both cases the USD data itself is free to have "
|
||||
"references and sublayers of its own."
|
||||
),
|
||||
default=True),
|
||||
TextDef("contribution_target_product",
|
||||
label="Target product",
|
||||
tooltip=(
|
||||
"The target product the contribution should be added "
|
||||
"to. Usually this is the asset or shot product.\nThe "
|
||||
"department layer will be added to this product, and "
|
||||
"the contribution itself will be added to the "
|
||||
"department layer."
|
||||
),
|
||||
default="usdAsset"),
|
||||
EnumDef("contribution_target_product_init",
|
||||
label="Initialize as",
|
||||
tooltip=(
|
||||
"The target product's USD file will be initialized "
|
||||
"based on this type if there's no existing USD of "
|
||||
"that product yet.\nIf there's already an existing "
|
||||
"product with the name of the 'target product' this "
|
||||
"setting will do nothing."
|
||||
),
|
||||
items=["asset", "shot"],
|
||||
default="asset"),
|
||||
|
||||
# Asset layer, e.g. model.usd, look.usd, rig.usd
|
||||
EnumDef("contribution_layer",
|
||||
label="Add to department layer",
|
||||
tooltip=(
|
||||
"The layer the contribution should be made to in the "
|
||||
"target product.\nThe layers have their own "
|
||||
"predefined ordering.\nA higher order (further down "
|
||||
"the list) will contribute as a stronger opinion."
|
||||
),
|
||||
items=list(cls.contribution_layers.keys()),
|
||||
default="model"),
|
||||
BoolDef("contribution_apply_as_variant",
|
||||
label="Add as variant",
|
||||
tooltip=(
|
||||
"When enabled the contribution to the department "
|
||||
"layer will be added as a variant where the variant "
|
||||
"on the default root prim will be added as a "
|
||||
"reference.\nWhen disabled the contribution will be "
|
||||
"appended to as a sublayer to the department layer "
|
||||
"instead."
|
||||
),
|
||||
default=True),
|
||||
TextDef("contribution_variant_set_name",
|
||||
label="Variant Set Name",
|
||||
default="{layer}"),
|
||||
TextDef("contribution_variant",
|
||||
label="Variant Name",
|
||||
default="{variant}"),
|
||||
BoolDef("contribution_variant_is_default",
|
||||
label="Set as default variant selection",
|
||||
tooltip=(
|
||||
"Whether to set this instance's variant name as the "
|
||||
"default selected variant name for the variant set.\n"
|
||||
"It is always expected to be enabled for only one "
|
||||
"variant name in the variant set.\n"
|
||||
"The behavior is unpredictable if multiple instances "
|
||||
"for the same variant set have this enabled."
|
||||
),
|
||||
default=False),
|
||||
UISeparatorDef("usd_container_settings3"),
|
||||
]
|
||||
|
||||
|
||||
class CollectUSDLayerContributionsHoudiniLook(CollectUSDLayerContributions):
|
||||
"""
|
||||
This is solely here to expose the attribute definitions for the
|
||||
Houdini "look" family.
|
||||
"""
|
||||
# TODO: Improve how this is built for the look family
|
||||
hosts = ["houdini"]
|
||||
families = ["look"]
|
||||
label = CollectUSDLayerContributions.label + " (Look)"
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(CollectUSDLayerContributionsHoudiniLook,
|
||||
cls).get_attribute_defs()
|
||||
|
||||
# Update default for department layer to look
|
||||
layer_def = next(d for d in defs if d.key == "contribution_layer")
|
||||
layer_def.default = "look"
|
||||
|
||||
return defs
|
||||
|
||||
|
||||
class ExtractUSDLayerContribution(publish.Extractor):
|
||||
|
||||
families = ["usdLayer"]
|
||||
label = "Extract USD Layer Contributions (Asset/Shot)"
|
||||
order = pyblish.api.ExtractorOrder + 0.45
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
folder_path = instance.data["folderPath"]
|
||||
product_name = instance.data["productName"]
|
||||
self.log.debug(f"Building layer: {folder_path} > {product_name}")
|
||||
|
||||
path = get_last_publish(instance)
|
||||
if path and BUILD_INTO_LAST_VERSIONS:
|
||||
sdf_layer = Sdf.Layer.OpenAsAnonymous(path)
|
||||
default_prim = sdf_layer.defaultPrim
|
||||
else:
|
||||
default_prim = folder_path.rsplit("/", 1)[-1] # use folder name
|
||||
sdf_layer = Sdf.Layer.CreateAnonymous()
|
||||
set_layer_defaults(sdf_layer, default_prim=default_prim)
|
||||
|
||||
contributions = instance.data.get("usd_contributions", [])
|
||||
for contribution in sorted(contributions, key=attrgetter("order")):
|
||||
path = get_instance_uri_path(contribution.instance)
|
||||
if isinstance(contribution, VariantContribution):
|
||||
# Add contribution as a reference inside a variant
|
||||
self.log.debug(f"Adding variant: {contribution}")
|
||||
|
||||
# Make sure at least the prim exists outside the variant
|
||||
# selection, so it can house the variant selection and the
|
||||
# variants themselves
|
||||
prim_path = Sdf.Path(f"/{default_prim}")
|
||||
prim_spec = get_or_define_prim_spec(sdf_layer,
|
||||
prim_path,
|
||||
"Xform")
|
||||
|
||||
variant_prim_path = variant_nested_prim_path(
|
||||
prim_path=prim_path,
|
||||
variant_selections=[
|
||||
(contribution.variant_set_name,
|
||||
contribution.variant_name)
|
||||
]
|
||||
)
|
||||
|
||||
# Remove any existing matching entry of same product
|
||||
variant_prim_spec = sdf_layer.GetPrimAtPath(variant_prim_path)
|
||||
if variant_prim_spec:
|
||||
self.remove_previous_reference_contribution(
|
||||
prim_spec=variant_prim_spec,
|
||||
instance=contribution.instance
|
||||
)
|
||||
|
||||
# Add the contribution at the indicated order
|
||||
self.add_reference_contribution(sdf_layer,
|
||||
variant_prim_path,
|
||||
path,
|
||||
contribution)
|
||||
|
||||
# Set default variant selection
|
||||
variant_set_name = contribution.variant_set_name
|
||||
variant_name = contribution.variant_name
|
||||
if contribution.variant_is_default or \
|
||||
variant_set_name not in prim_spec.variantSelections:
|
||||
prim_spec.variantSelections[variant_set_name] = variant_name # noqa: E501
|
||||
|
||||
elif isinstance(contribution, SublayerContribution):
|
||||
# Sublayer source file
|
||||
self.log.debug(f"Adding sublayer: {contribution}")
|
||||
|
||||
# This replaces existing versions of itself so that
|
||||
# republishing does not continuously add more versions of the
|
||||
# same product
|
||||
product_name = contribution.instance.data["productName"]
|
||||
add_ordered_sublayer(
|
||||
layer=sdf_layer,
|
||||
contribution_path=path,
|
||||
layer_id=product_name,
|
||||
order=None, # unordered
|
||||
add_sdf_arguments_metadata=True
|
||||
)
|
||||
else:
|
||||
raise TypeError(f"Unsupported contribution: {contribution}")
|
||||
|
||||
# Save the file
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.usd"
|
||||
filepath = os.path.join(staging_dir, filename)
|
||||
sdf_layer.Export(filepath, args={"format": "usda"})
|
||||
|
||||
add_representation(
|
||||
instance,
|
||||
name="usd",
|
||||
files=filename,
|
||||
staging_dir=staging_dir
|
||||
)
|
||||
|
||||
def remove_previous_reference_contribution(self,
|
||||
prim_spec: Sdf.PrimSpec,
|
||||
instance: pyblish.api.Instance):
|
||||
# Remove existing contributions of the same product - ignoring
|
||||
# the picked version and representation. We assume there's only ever
|
||||
# one version of a product you want to have referenced into a Prim.
|
||||
remove_indices = set()
|
||||
for index, ref in enumerate(prim_spec.referenceList.prependedItems):
|
||||
ref: Sdf.Reference # type hint
|
||||
|
||||
uri = ref.customData.get("ayon_uri")
|
||||
if uri and self.instance_match_ayon_uri(instance, uri):
|
||||
self.log.debug("Removing existing reference: %s", ref)
|
||||
remove_indices.add(index)
|
||||
|
||||
if remove_indices:
|
||||
prim_spec.referenceList.prependedItems[:] = [
|
||||
ref for index, ref
|
||||
in enumerate(prim_spec.referenceList.prependedItems)
|
||||
if index not in remove_indices
|
||||
]
|
||||
|
||||
def add_reference_contribution(self,
|
||||
layer: Sdf.Layer,
|
||||
prim_path: Sdf.Path,
|
||||
filepath: str,
|
||||
contribution: VariantContribution):
|
||||
instance = contribution.instance
|
||||
uri = construct_ayon_entity_uri(
|
||||
project_name=instance.data["projectEntity"]["name"],
|
||||
folder_path=instance.data["folderPath"],
|
||||
product=instance.data["productName"],
|
||||
version=instance.data["version"],
|
||||
representation_name="usd"
|
||||
)
|
||||
reference = Sdf.Reference(assetPath=filepath,
|
||||
customData={"ayon_uri": uri})
|
||||
add_ordered_reference(
|
||||
layer=layer,
|
||||
prim_path=prim_path,
|
||||
reference=reference,
|
||||
order=contribution.order
|
||||
)
|
||||
|
||||
def instance_match_ayon_uri(self, instance, ayon_uri):
|
||||
|
||||
uri_data = parse_ayon_entity_uri(ayon_uri)
|
||||
if not uri_data:
|
||||
return False
|
||||
|
||||
# Check if project, asset and product match
|
||||
if instance.data["projectEntity"]["name"] != uri_data.get("project"):
|
||||
return False
|
||||
|
||||
if instance.data["folderPath"] != uri_data.get("folderPath"):
|
||||
return False
|
||||
|
||||
if instance.data["productName"] != uri_data.get("product"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ExtractUSDAssetContribution(publish.Extractor):
|
||||
|
||||
families = ["usdAsset"]
|
||||
label = "Extract USD Asset/Shot Contributions"
|
||||
order = ExtractUSDLayerContribution.order + 0.01
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
folder_path = instance.data["folderPath"]
|
||||
product_name = instance.data["productName"]
|
||||
self.log.debug(f"Building asset: {folder_path} > {product_name}")
|
||||
folder_name = folder_path.rsplit("/", 1)[-1]
|
||||
|
||||
# Contribute layers to asset
|
||||
# Use existing asset and add to it, or initialize a new asset layer
|
||||
path = get_last_publish(instance)
|
||||
payload_layer = None
|
||||
if path and BUILD_INTO_LAST_VERSIONS:
|
||||
# If there's a payload file, put it in the payload instead
|
||||
folder = os.path.dirname(path)
|
||||
payload_path = os.path.join(folder, "payload.usd")
|
||||
if os.path.exists(payload_path):
|
||||
payload_layer = Sdf.Layer.OpenAsAnonymous(payload_path)
|
||||
|
||||
asset_layer = Sdf.Layer.OpenAsAnonymous(path)
|
||||
else:
|
||||
# If no existing publish of this product exists then we initialize
|
||||
# the layer as either a default asset or shot structure.
|
||||
init_type = instance.data["contribution_target_product_init"]
|
||||
asset_layer, payload_layer = self.init_layer(
|
||||
asset_name=folder_name, init_type=init_type
|
||||
)
|
||||
|
||||
# Author timeCodesPerSecond and framesPerSecond if the asset layer
|
||||
# is currently lacking any but our current context does specify an FPS
|
||||
fps = instance.data.get("fps", instance.context.data.get("fps"))
|
||||
if fps is not None:
|
||||
if (
|
||||
not asset_layer.HasTimeCodesPerSecond()
|
||||
and not asset_layer.HasFramesPerSecond()
|
||||
):
|
||||
# Author FPS on the asset layer since there is no opinion yet
|
||||
self.log.info("Authoring FPS on Asset Layer: %s FPS", fps)
|
||||
asset_layer.timeCodesPerSecond = fps
|
||||
asset_layer.framesPerSecond = fps
|
||||
|
||||
if fps != asset_layer.timeCodesPerSecond:
|
||||
self.log.warning(
|
||||
"Current instance FPS '%s' does not match asset layer "
|
||||
"timecodes per second '%s'",
|
||||
fps, asset_layer.timeCodesPerSecond
|
||||
)
|
||||
if fps != asset_layer.framesPerSecond:
|
||||
self.log.warning(
|
||||
"Current instance FPS '%s' does not match asset layer "
|
||||
"frames per second '%s'",
|
||||
fps, asset_layer.framesPerSecond
|
||||
)
|
||||
|
||||
target_layer = payload_layer if payload_layer else asset_layer
|
||||
|
||||
# Get unique layer instances (remove duplicate entries)
|
||||
processed_ids = set()
|
||||
layer_instances = []
|
||||
for layer_inst in instance.data["source_instances"]:
|
||||
if layer_inst.id in processed_ids:
|
||||
continue
|
||||
layer_instances.append(layer_inst)
|
||||
processed_ids.add(layer_inst.id)
|
||||
|
||||
# Insert the layer in contributions order
|
||||
def sort_by_order(instance):
|
||||
return instance.data["usd_layer_order"]
|
||||
|
||||
for layer_instance in sorted(layer_instances,
|
||||
key=sort_by_order,
|
||||
reverse=True):
|
||||
|
||||
layer_id = layer_instance.data["usd_layer_id"]
|
||||
order = layer_instance.data["usd_layer_order"]
|
||||
|
||||
path = get_instance_uri_path(instance=layer_instance)
|
||||
add_ordered_sublayer(target_layer,
|
||||
contribution_path=path,
|
||||
layer_id=layer_id,
|
||||
order=order,
|
||||
# Add the sdf argument metadata which allows
|
||||
# us to later detect whether another path
|
||||
# has the same layer id, so we can replace it
|
||||
# it.
|
||||
add_sdf_arguments_metadata=True)
|
||||
|
||||
# Save the file
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.usd"
|
||||
filepath = os.path.join(staging_dir, filename)
|
||||
asset_layer.Export(filepath, args={"format": "usda"})
|
||||
|
||||
add_representation(
|
||||
instance,
|
||||
name="usd",
|
||||
files=filename,
|
||||
staging_dir=staging_dir
|
||||
)
|
||||
|
||||
if payload_layer:
|
||||
payload_path = os.path.join(staging_dir, "payload.usd")
|
||||
payload_layer.Export(payload_path, args={"format": "usda"})
|
||||
self.add_relative_file(instance, payload_path)
|
||||
|
||||
def init_layer(self, asset_name, init_type):
|
||||
"""Initialize layer if no previous version exists"""
|
||||
|
||||
if init_type == "asset":
|
||||
asset_layer = Sdf.Layer.CreateAnonymous()
|
||||
created_layers = setup_asset_layer(asset_layer, asset_name,
|
||||
force_add_payload=True,
|
||||
set_payload_path=True)
|
||||
payload_layer = created_layers[0].layer
|
||||
return asset_layer, payload_layer
|
||||
|
||||
elif init_type == "shot":
|
||||
shot_layer = Sdf.Layer.CreateAnonymous()
|
||||
set_layer_defaults(shot_layer, default_prim=None)
|
||||
return shot_layer, None
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"USD Target Product contribution can only initialize "
|
||||
"as 'asset' or 'shot', got: '{}'".format(init_type)
|
||||
)
|
||||
|
||||
def add_relative_file(self, instance, source, staging_dir=None):
|
||||
"""Add transfer for a relative path form staging to publish dir.
|
||||
|
||||
Unlike files in representations, the file will not be renamed and
|
||||
will be ingested one-to-one into the publish directory.
|
||||
|
||||
Note: This file does not get registered as a representation, because
|
||||
representation files always get renamed by the publish template
|
||||
system. These files get included in the `representation["files"]`
|
||||
info with all the representations of the version - and thus will
|
||||
appear multiple times per version.
|
||||
|
||||
"""
|
||||
# TODO: It can be nice to force a particular representation no matter
|
||||
# what to adhere to a certain filename on integration because e.g. a
|
||||
# particular file format relies on that file named like that or alike
|
||||
# and still allow regular registering with the database as a file of
|
||||
# the version. As such we might want to tweak integrator logic?
|
||||
if staging_dir is None:
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
assert isinstance(staging_dir, str), "Staging dir must be string"
|
||||
publish_dir: str = instance.data["publishDir"]
|
||||
|
||||
relative_path = os.path.relpath(source, staging_dir)
|
||||
destination = os.path.join(publish_dir, relative_path)
|
||||
destination = os.path.normpath(destination)
|
||||
|
||||
transfers = instance.data.setdefault("transfers", [])
|
||||
self.log.debug(f"Adding relative file {source} -> {relative_path}")
|
||||
transfers.append((source, destination))
|
||||
|
|
@ -1,6 +1,13 @@
|
|||
import inspect
|
||||
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateResources(pyblish.api.InstancePlugin):
|
||||
|
|
@ -10,19 +17,95 @@ class ValidateResources(pyblish.api.InstancePlugin):
|
|||
these could be textures, image planes, cache files or other linked
|
||||
media.
|
||||
|
||||
A single resource entry MUST contain `source` and `files`:
|
||||
{
|
||||
"source": "/path/to/file.<UDIM>.exr",
|
||||
"files": ['/path/to/file.1001.exr', '/path/to/file.1002.exr']
|
||||
}
|
||||
|
||||
It may contain additional metadata like `attribute` or `node` so other
|
||||
publishing plug-ins can detect where the resource was used. The
|
||||
`color_space` data is also frequently used (e.g. in Maya and Houdini)
|
||||
|
||||
This validates:
|
||||
- The resources are existing files.
|
||||
- The resources have correctly collected the data.
|
||||
- The resources must be unique to the source filepath so that multiple
|
||||
source filepaths do not write to the same publish filepath.
|
||||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Validate Resources"
|
||||
label = "Resources"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
for resource in instance.data.get('resources', []):
|
||||
resources = instance.data.get("resources", [])
|
||||
if not resources:
|
||||
self.log.debug("No resources to validate..")
|
||||
return
|
||||
|
||||
# Validate the `resources` data structure is valid
|
||||
invalid_data = False
|
||||
for resource in resources:
|
||||
# Required data
|
||||
assert "source" in resource, "No source found"
|
||||
assert "files" in resource, "No files from source"
|
||||
assert all(os.path.exists(f) for f in resource['files'])
|
||||
if "source" not in resource:
|
||||
invalid_data = True
|
||||
self.log.error("Missing 'source' in resource: %s", resource)
|
||||
if "files" not in resource or not resource["files"]:
|
||||
invalid_data = True
|
||||
self.log.error("Missing 'files' in resource: %s", resource)
|
||||
if not all(os.path.exists(f) for f in resource.get("files", [])):
|
||||
invalid_data = True
|
||||
self.log.error(
|
||||
"Resource contains files that do not exist "
|
||||
"on disk: %s", resource
|
||||
)
|
||||
|
||||
# Ensure unique resource names
|
||||
basenames = defaultdict(set)
|
||||
for resource in resources:
|
||||
files = resource.get("files", [])
|
||||
for filename in files:
|
||||
|
||||
# Use normalized paths in comparison and ignore case
|
||||
# sensitivity
|
||||
filename = os.path.normpath(filename).lower()
|
||||
|
||||
basename = os.path.splitext(os.path.basename(filename))[0]
|
||||
basenames[basename].add(filename)
|
||||
|
||||
invalid_resources = list()
|
||||
for basename, sources in basenames.items():
|
||||
if len(sources) > 1:
|
||||
invalid_resources.extend(sources)
|
||||
self.log.error(
|
||||
"Non-unique resource filename: {0}\n- {1}".format(
|
||||
basename,
|
||||
"\n- ".join(sources)
|
||||
)
|
||||
)
|
||||
|
||||
if invalid_data or invalid_resources:
|
||||
raise PublishValidationError(
|
||||
"Invalid resources in instance.",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Invalid resources
|
||||
|
||||
Used resources, like textures, must exist on disk and must have
|
||||
unique filenames.
|
||||
|
||||
#### Filenames must be unique
|
||||
|
||||
In most cases this will invalidate due to using the same filenames
|
||||
from different folders, and as such the file to be transferred is
|
||||
unique but has the same filename. Either rename the source files or
|
||||
make sure to use the same source file if they are intended to
|
||||
be the same file.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -57,6 +57,31 @@ class CollectFramesFixDefModel(BaseSettingsModel):
|
|||
True,
|
||||
title="Show 'Rewrite latest version' toggle"
|
||||
)
|
||||
|
||||
|
||||
class ContributionLayersModel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
order: str = SettingsField(
|
||||
title="Order",
|
||||
description="Higher order means a higher strength and stacks the "
|
||||
"layer on top.")
|
||||
|
||||
|
||||
class CollectUSDLayerContributionsModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
contribution_layers: list[ContributionLayersModel] = SettingsField(
|
||||
title="Department Layer Orders",
|
||||
description=(
|
||||
"Define available department layers and their strength "
|
||||
"ordering inside the USD contribution workflow."
|
||||
)
|
||||
)
|
||||
|
||||
@validator("contribution_layers")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class PluginStateByHostModelProfile(BaseSettingsModel):
|
||||
|
|
@ -792,6 +817,10 @@ class PublishPuginsModel(BaseSettingsModel):
|
|||
default_factory=CollectFramesFixDefModel,
|
||||
title="Collect Frames to Fix",
|
||||
)
|
||||
CollectUSDLayerContributions: CollectUSDLayerContributionsModel = SettingsField(
|
||||
default_factory=CollectUSDLayerContributionsModel,
|
||||
title="Collect USD Layer Contributions",
|
||||
)
|
||||
ValidateEditorialAssetName: ValidateBaseModel = SettingsField(
|
||||
default_factory=ValidateBaseModel,
|
||||
title="Validate Editorial Asset Name"
|
||||
|
|
@ -884,6 +913,23 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
"enabled": True,
|
||||
"rewrite_version_enable": True
|
||||
},
|
||||
"CollectUSDLayerContributions": {
|
||||
"enabled": True,
|
||||
"contribution_layers": [
|
||||
# Asset layers
|
||||
{"name": "model", "order": 100},
|
||||
{"name": "assembly", "order": 150},
|
||||
{"name": "groom", "order": 175},
|
||||
{"name": "look", "order": 300},
|
||||
{"name": "rig", "order": 100},
|
||||
# Shot layers
|
||||
{"name": "layout", "order": 200},
|
||||
{"name": "animation", "order": 300},
|
||||
{"name": "simulation", "order": 400},
|
||||
{"name": "fx", "order": 500},
|
||||
{"name": "lighting", "order": 600},
|
||||
],
|
||||
},
|
||||
"ValidateEditorialAssetName": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
|
|||
|
|
@ -50,6 +50,30 @@ class RedshiftRenderPluginInfo():
|
|||
Version = attr.ib(default="1")
|
||||
|
||||
|
||||
@attr.s
|
||||
class HuskStandalonePluginInfo():
|
||||
"""Requires Deadline Husk Standalone Plugin.
|
||||
See Deadline Plug-in:
|
||||
https://github.com/BigRoy/HuskStandaloneSubmitter
|
||||
Also see Husk options here:
|
||||
https://www.sidefx.com/docs/houdini/ref/utils/husk.html
|
||||
"""
|
||||
SceneFile = attr.ib()
|
||||
# TODO: Below parameters are only supported by custom version of the plugin
|
||||
Renderer = attr.ib(default=None)
|
||||
RenderSettings = attr.ib(default="/Render/rendersettings")
|
||||
Purpose = attr.ib(default="geometry,render")
|
||||
Complexity = attr.ib(default="veryhigh")
|
||||
Snapshot = attr.ib(default=-1)
|
||||
LogLevel = attr.ib(default="2")
|
||||
PreRender = attr.ib(default="")
|
||||
PreFrame = attr.ib(default="")
|
||||
PostFrame = attr.ib(default="")
|
||||
PostRender = attr.ib(default="")
|
||||
RestartDelegate = attr.ib(default="")
|
||||
Version = attr.ib(default="")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin
|
||||
|
|
@ -69,8 +93,7 @@ class HoudiniSubmitDeadline(
|
|||
label = "Submit Render to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender",
|
||||
"redshift_rop",
|
||||
families = ["redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
|
|
@ -149,11 +172,14 @@ class HoudiniSubmitDeadline(
|
|||
|
||||
job_type = "[RENDER]"
|
||||
if split_render_job and not is_export_job:
|
||||
# Convert from family to Deadline plugin name
|
||||
# i.e., arnold_rop -> Arnold
|
||||
plugin = (
|
||||
instance.data["productType"].replace("_rop", "").capitalize()
|
||||
)
|
||||
product_type = instance.data["productType"]
|
||||
plugin = {
|
||||
"usdrender": "HuskStandalone",
|
||||
}.get(product_type)
|
||||
if not plugin:
|
||||
# Convert from product type to Deadline plugin name
|
||||
# i.e., arnold_rop -> Arnold
|
||||
plugin = product_type.replace("_rop", "").capitalize()
|
||||
else:
|
||||
plugin = "Houdini"
|
||||
if split_render_job:
|
||||
|
|
@ -185,7 +211,8 @@ class HoudiniSubmitDeadline(
|
|||
# Make sure we make job frame dependent so render tasks pick up a soon
|
||||
# as export tasks are done
|
||||
if split_render_job and not is_export_job:
|
||||
job_info.IsFrameDependent = True
|
||||
job_info.IsFrameDependent = bool(instance.data.get(
|
||||
"splitRenderFrameDependent", True))
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
|
@ -207,6 +234,13 @@ class HoudiniSubmitDeadline(
|
|||
)
|
||||
job_info.Group = self.group
|
||||
|
||||
# Apply render globals, like e.g. data from collect machine list
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
if render_globals:
|
||||
self.log.debug("Applying 'renderGlobals' to job info: %s",
|
||||
render_globals)
|
||||
job_info.update(render_globals)
|
||||
|
||||
job_info.Comment = context.data.get("comment")
|
||||
|
||||
keys = [
|
||||
|
|
@ -292,6 +326,10 @@ class HoudiniSubmitDeadline(
|
|||
" - using version configured in Deadline"
|
||||
))
|
||||
|
||||
elif product_type == "usdrender":
|
||||
plugin_info = self._get_husk_standalone_plugin_info(
|
||||
instance, hou_major_minor)
|
||||
|
||||
else:
|
||||
self.log.error(
|
||||
"Product type '%s' not supported yet to split render job",
|
||||
|
|
@ -321,3 +359,45 @@ class HoudiniSubmitDeadline(
|
|||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
def _get_husk_standalone_plugin_info(self, instance, hou_major_minor):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
# Supply additional parameters from the USD Render ROP
|
||||
# to the Husk Standalone Render Plug-in
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
snapshot_interval = -1
|
||||
if rop_node.evalParm("dosnapshot"):
|
||||
snapshot_interval = rop_node.evalParm("snapshotinterval")
|
||||
|
||||
restart_delegate = 0
|
||||
if rop_node.evalParm("husk_restartdelegate"):
|
||||
restart_delegate = rop_node.evalParm("husk_restartdelegateframes")
|
||||
|
||||
rendersettings = (
|
||||
rop_node.evalParm("rendersettings")
|
||||
or "/Render/rendersettings"
|
||||
)
|
||||
return HuskStandalonePluginInfo(
|
||||
SceneFile=instance.data["ifdFile"],
|
||||
Renderer=rop_node.evalParm("renderer"),
|
||||
RenderSettings=rendersettings,
|
||||
Purpose=rop_node.evalParm("husk_purpose"),
|
||||
Complexity=rop_node.evalParm("husk_complexity"),
|
||||
Snapshot=snapshot_interval,
|
||||
PreRender=rop_node.evalParm("husk_prerender"),
|
||||
PreFrame=rop_node.evalParm("husk_preframe"),
|
||||
PostFrame=rop_node.evalParm("husk_postframe"),
|
||||
PostRender=rop_node.evalParm("husk_postrender"),
|
||||
RestartDelegate=restart_delegate,
|
||||
Version=hou_major_minor
|
||||
)
|
||||
|
||||
|
||||
class HoudiniSubmitDeadlineUsdRender(HoudiniSubmitDeadline):
|
||||
# Do not use published workfile paths for USD Render ROP because the
|
||||
# Export Job doesn't seem to occur using the published path either, so
|
||||
# output paths then do not match the actual rendered paths
|
||||
use_published = False
|
||||
families = ["usdrender"]
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop",
|
||||
"redshift_rop"]
|
||||
"redshift_rop", "usdrender"]
|
||||
settings_category = "deadline"
|
||||
|
||||
aov_filter = [
|
||||
|
|
|
|||
|
|
@ -367,6 +367,28 @@ def maintained_selection():
|
|||
node.setSelected(on=True)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def parm_values(overrides):
|
||||
"""Override Parameter values during the context.
|
||||
Arguments:
|
||||
overrides (List[Tuple[hou.Parm, Any]]): The overrides per parm
|
||||
that should be applied during context.
|
||||
"""
|
||||
|
||||
originals = []
|
||||
try:
|
||||
for parm, value in overrides:
|
||||
originals.append((parm, parm.eval()))
|
||||
parm.set(value)
|
||||
yield
|
||||
finally:
|
||||
for parm, value in originals:
|
||||
# Parameter might not exist anymore so first
|
||||
# check whether it's still valid
|
||||
if hou.parm(parm.path()):
|
||||
parm.set(value)
|
||||
|
||||
|
||||
def reset_framerange(fps=True, frame_range=True):
|
||||
"""Set frame range and FPS to current folder."""
|
||||
|
||||
|
|
|
|||
|
|
@ -134,6 +134,7 @@ class HoudiniCreator(Creator, HoudiniCreatorBase):
|
|||
|
||||
instance_data["instance_node"] = instance_node.path()
|
||||
instance_data["instance_id"] = instance_node.path()
|
||||
instance_data["families"] = self.get_publish_families()
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
|
|
@ -182,6 +183,7 @@ class HoudiniCreator(Creator, HoudiniCreatorBase):
|
|||
node_path = instance.path()
|
||||
node_data["instance_id"] = node_path
|
||||
node_data["instance_node"] = node_path
|
||||
node_data["families"] = self.get_publish_families()
|
||||
if "AYON_productName" in node_data:
|
||||
node_data["productName"] = node_data.pop("AYON_productName")
|
||||
|
||||
|
|
@ -211,6 +213,7 @@ class HoudiniCreator(Creator, HoudiniCreatorBase):
|
|||
values["AYON_productName"] = values.pop("productName")
|
||||
values.pop("instance_node", None)
|
||||
values.pop("instance_id", None)
|
||||
values.pop("families", None)
|
||||
imprint(node, values, update=update)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
|
|
@ -252,6 +255,21 @@ class HoudiniCreator(Creator, HoudiniCreatorBase):
|
|||
node.setUserData('nodeshape', shape)
|
||||
node.setColor(color)
|
||||
|
||||
def get_publish_families(self):
|
||||
"""Return families for the instances of this creator.
|
||||
|
||||
Allow a Creator to define multiple families so that a creator can
|
||||
e.g. specify `usd` and `usdrop`.
|
||||
|
||||
There is no need to override this method if you only have the
|
||||
primary family defined by the `product_type` property as that will
|
||||
always be set.
|
||||
|
||||
Returns:
|
||||
List[str]: families for instances of this creator
|
||||
"""
|
||||
return []
|
||||
|
||||
def get_network_categories(self):
|
||||
"""Return in which network view type this creator should show.
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,12 @@
|
|||
|
||||
import contextlib
|
||||
import logging
|
||||
import json
|
||||
import itertools
|
||||
from typing import List
|
||||
|
||||
from pxr import Sdf
|
||||
|
||||
import hou
|
||||
from pxr import Usd, Sdf, Tf, Vt, UsdRender
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -119,11 +122,13 @@ def get_usd_rop_loppath(node):
|
|||
return node.parm("loppath").evalAsNode()
|
||||
|
||||
|
||||
def get_layer_save_path(layer):
|
||||
def get_layer_save_path(layer, expand_string=True):
|
||||
"""Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer.
|
||||
|
||||
Args:
|
||||
layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from.
|
||||
expand_string (bool): Whether to expand any houdini vars in the save
|
||||
path before computing the absolute path.
|
||||
|
||||
Returns:
|
||||
str or None: Path to save to when data exists.
|
||||
|
|
@ -136,6 +141,8 @@ def get_layer_save_path(layer):
|
|||
save_path = hou_layer_info.customData.get("HoudiniSavePath", None)
|
||||
if save_path:
|
||||
# Unfortunately this doesn't actually resolve the full absolute path
|
||||
if expand_string:
|
||||
save_path = hou.text.expandString(save_path)
|
||||
return layer.ComputeAbsolutePath(save_path)
|
||||
|
||||
|
||||
|
|
@ -181,7 +188,18 @@ def iter_layer_recursive(layer):
|
|||
yield layer
|
||||
|
||||
|
||||
def get_configured_save_layers(usd_rop):
|
||||
def get_configured_save_layers(usd_rop, strip_above_layer_break=True):
|
||||
"""Retrieve the layer save paths from a USD ROP.
|
||||
|
||||
Arguments:
|
||||
usdrop (hou.RopNode): USD Rop Node
|
||||
strip_above_layer_break (Optional[bool]): Whether to exclude any
|
||||
layers that are above layer breaks. This defaults to True.
|
||||
|
||||
Returns:
|
||||
List[Sdf.Layer]: The layers with configured save paths.
|
||||
|
||||
"""
|
||||
|
||||
lop_node = get_usd_rop_loppath(usd_rop)
|
||||
stage = lop_node.stage(apply_viewport_overrides=False)
|
||||
|
|
@ -192,10 +210,170 @@ def get_configured_save_layers(usd_rop):
|
|||
|
||||
root_layer = stage.GetRootLayer()
|
||||
|
||||
if strip_above_layer_break:
|
||||
layers_above_layer_break = set(lop_node.layersAboveLayerBreak())
|
||||
else:
|
||||
layers_above_layer_break = set()
|
||||
|
||||
save_layers = []
|
||||
for layer in iter_layer_recursive(root_layer):
|
||||
if (
|
||||
strip_above_layer_break and
|
||||
layer.identifier in layers_above_layer_break
|
||||
):
|
||||
continue
|
||||
|
||||
save_path = get_layer_save_path(layer)
|
||||
if save_path is not None:
|
||||
save_layers.append(layer)
|
||||
|
||||
return save_layers
|
||||
|
||||
|
||||
def setup_lop_python_layer(layer, node, savepath=None,
|
||||
apply_file_format_args=True):
|
||||
"""Set up Sdf.Layer with HoudiniLayerInfo prim for metadata.
|
||||
|
||||
This is the same as `loputils.createPythonLayer` but can be run on top
|
||||
of `pxr.Sdf.Layer` instances that are already created in a Python LOP node.
|
||||
That's useful if your layer creation itself is built to be DCC agnostic,
|
||||
then we just need to run this after per layer to make it explicitly
|
||||
stored for houdini.
|
||||
|
||||
By default, Houdini doesn't apply the FileFormatArguments supplied to
|
||||
the created layer; however it does support USD's file save suffix
|
||||
of `:SDF_FORMAT_ARGS:` to supply them. With `apply_file_format_args` any
|
||||
file format args set on the layer's creation will be added to the
|
||||
save path through that.
|
||||
|
||||
Note: The `node.addHeldLayer` call will only work from a LOP python node
|
||||
whenever `node.editableStage()` or `node.editableLayer()` was called.
|
||||
|
||||
Arguments:
|
||||
layer (Sdf.Layer): An existing layer (most likely just created
|
||||
in the current runtime)
|
||||
node (hou.LopNode): The Python LOP node to attach the layer to so
|
||||
it does not get garbage collected/mangled after the downstream.
|
||||
savepath (Optional[str]): When provided the HoudiniSaveControl
|
||||
will be set to Explicit with HoudiniSavePath to this path.
|
||||
apply_file_format_args (Optional[bool]): When enabled any
|
||||
FileFormatArgs defined for the layer on creation will be set
|
||||
in the HoudiniSavePath so Houdini USD ROP will use them top.
|
||||
|
||||
Returns:
|
||||
Sdf.PrimSpec: The Created HoudiniLayerInfo prim spec.
|
||||
|
||||
"""
|
||||
# Add a Houdini Layer Info prim where we can put the save path.
|
||||
p = Sdf.CreatePrimInLayer(layer, '/HoudiniLayerInfo')
|
||||
p.specifier = Sdf.SpecifierDef
|
||||
p.typeName = 'HoudiniLayerInfo'
|
||||
if savepath:
|
||||
if apply_file_format_args:
|
||||
args = layer.GetFileFormatArguments()
|
||||
savepath = Sdf.Layer.CreateIdentifier(savepath, args)
|
||||
|
||||
p.customData['HoudiniSavePath'] = savepath
|
||||
p.customData['HoudiniSaveControl'] = 'Explicit'
|
||||
# Let everyone know what node created this layer.
|
||||
p.customData['HoudiniCreatorNode'] = node.sessionId()
|
||||
p.customData['HoudiniEditorNodes'] = Vt.IntArray([node.sessionId()])
|
||||
node.addHeldLayer(layer.identifier)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def remap_paths(rop_node, mapping):
|
||||
"""Enable the AyonRemapPaths output processor with provided `mapping`"""
|
||||
from ayon_houdini.api.lib import parm_values
|
||||
|
||||
if not mapping:
|
||||
# Do nothing
|
||||
yield
|
||||
return
|
||||
|
||||
# Houdini string parms need to escape backslashes due to the support
|
||||
# of expressions - as such we do so on the json data
|
||||
value = json.dumps(mapping).replace("\\", "\\\\")
|
||||
with outputprocessors(
|
||||
rop_node,
|
||||
processors=["ayon_remap_paths"],
|
||||
disable_all_others=True,
|
||||
):
|
||||
with parm_values([
|
||||
(rop_node.parm("ayon_remap_paths_remap_json"), value)
|
||||
]):
|
||||
yield
|
||||
|
||||
|
||||
def get_usd_render_rop_rendersettings(rop_node, stage=None, logger=None):
|
||||
"""Return the chosen UsdRender.Settings from the stage (if any).
|
||||
|
||||
Args:
|
||||
rop_node (hou.Node): The Houdini USD Render ROP node.
|
||||
stage (pxr.Usd.Stage): The USD stage to find the render settings
|
||||
in. This is usually the stage from the LOP path the USD Render
|
||||
ROP node refers to.
|
||||
logger (logging.Logger): Logger to log warnings to if no render
|
||||
settings were find in stage.
|
||||
|
||||
Returns:
|
||||
Optional[UsdRender.Settings]: Render Settings.
|
||||
|
||||
"""
|
||||
if logger is None:
|
||||
logger = log
|
||||
|
||||
if stage is None:
|
||||
lop_node = get_usd_rop_loppath(rop_node)
|
||||
stage = lop_node.stage()
|
||||
|
||||
path = rop_node.evalParm("rendersettings")
|
||||
if not path:
|
||||
# Default behavior
|
||||
path = "/Render/rendersettings"
|
||||
|
||||
prim = stage.GetPrimAtPath(path)
|
||||
if not prim:
|
||||
logger.warning("No render settings primitive found at: %s", path)
|
||||
return
|
||||
|
||||
render_settings = UsdRender.Settings(prim)
|
||||
if not render_settings:
|
||||
logger.warning("Prim at %s is not a valid RenderSettings prim.", path)
|
||||
return
|
||||
|
||||
return render_settings
|
||||
|
||||
|
||||
def get_schema_type_names(type_name: str) -> List[str]:
|
||||
"""Return schema type name for type name and its derived types
|
||||
|
||||
This can be useful for checking whether a `Sdf.PrimSpec`'s type name is of
|
||||
a given type or any of its derived types.
|
||||
|
||||
Args:
|
||||
type_name (str): The type name, like e.g. 'UsdGeomMesh'
|
||||
|
||||
Returns:
|
||||
List[str]: List of schema type names and their derived types.
|
||||
|
||||
"""
|
||||
schema_registry = Usd.SchemaRegistry
|
||||
type_ = Tf.Type.FindByName(type_name)
|
||||
|
||||
if type_ == Tf.Type.Unknown:
|
||||
type_ = schema_registry.GetTypeFromSchemaTypeName(type_name)
|
||||
if type_ == Tf.Type.Unknown:
|
||||
# Type not found
|
||||
return []
|
||||
|
||||
results = []
|
||||
derived = type_.GetAllDerivedTypes()
|
||||
for derived_type in itertools.chain([type_], derived):
|
||||
schema_type_name = schema_registry.GetSchemaTypeName(derived_type)
|
||||
if schema_type_name:
|
||||
results.append(schema_type_name)
|
||||
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
import hou
|
||||
from assettools import setToolSubmenu
|
||||
|
||||
import ayon_api
|
||||
from ayon_core.pipeline import (
|
||||
|
|
@ -16,6 +15,132 @@ from ayon_core.lib import (
|
|||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
# region assettools
|
||||
# logic based on Houdini 19.5.752 `assettools.py` because
|
||||
# this logic was removed in Houdini 20+
|
||||
def get_tool_submenus(hda_def):
|
||||
"""Returns the tab submenu entries of this node.
|
||||
|
||||
Note: A node could be placed in multiple entries at once.
|
||||
|
||||
Arguments:
|
||||
hda_def: the HDA Definition by hou.node.type().definition()
|
||||
|
||||
Returns:
|
||||
Optional[list[str]]: A list of submenus
|
||||
"""
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
if hda_def.hasSection('Tools.shelf'):
|
||||
sections = hda_def.sections()
|
||||
ts_section = sections['Tools.shelf'].contents()
|
||||
try:
|
||||
root = ET.fromstring(ts_section)
|
||||
except ET.ParseError:
|
||||
return None
|
||||
tool = root[0]
|
||||
submenus = tool.findall('toolSubmenu')
|
||||
if submenus:
|
||||
tool_submenus = []
|
||||
for submenu in submenus:
|
||||
if submenu is not None:
|
||||
text = submenu.text
|
||||
if text:
|
||||
tool_submenus.append(submenu.text)
|
||||
if tool_submenus:
|
||||
return tool_submenus
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_tool_submenu(hda_def,
|
||||
new_submenu='Digital Assets'):
|
||||
"""Sets the tab menu entry for a node.
|
||||
|
||||
Arguments:
|
||||
hda_def: the HDA Definition by hou.node.type().definition()
|
||||
new_submenu (Optional[str]): This will be the new submenu, replacing
|
||||
old_submenu entry
|
||||
"""
|
||||
|
||||
context_dict = {
|
||||
'Shop': 'SHOP',
|
||||
'Cop2': 'COP2',
|
||||
'Object': 'OBJ',
|
||||
'Chop': 'CHOP',
|
||||
'Sop': 'SOP',
|
||||
'Vop': 'VOP',
|
||||
'VopNet': 'VOPNET',
|
||||
'Driver': 'ROP',
|
||||
'TOP': 'TOP',
|
||||
'Top': 'TOP',
|
||||
'Lop': 'LOP',
|
||||
'Dop': 'DOP'}
|
||||
|
||||
utils_dict = {
|
||||
'Shop': 'shoptoolutils',
|
||||
'Cop2': 'cop2toolutils',
|
||||
'Object': 'objecttoolutils',
|
||||
'Chop': 'choptoolutils',
|
||||
'Sop': 'soptoolutils',
|
||||
'Vop': 'voptoolutils',
|
||||
'VopNet': 'vopnettoolutils',
|
||||
'Driver': 'drivertoolutils',
|
||||
'TOP': 'toptoolutils',
|
||||
'Top': 'toptoolutils',
|
||||
'Lop': 'loptoolutils',
|
||||
'Dop': 'doptoolutils'}
|
||||
|
||||
if hda_def.hasSection('Tools.shelf'):
|
||||
old_submenu = get_tool_submenus(hda_def)[0]
|
||||
else:
|
||||
# Add default tools shelf section
|
||||
content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<shelfDocument>
|
||||
<!-- This file contains definitions of shelves, toolbars, and tools.
|
||||
It should not be hand-edited when it is being used by the application.
|
||||
Note, that two definitions of the same element are not allowed in
|
||||
a single file. -->
|
||||
<tool name="$HDA_DEFAULT_TOOL" label="$HDA_LABEL" icon="$HDA_ICON">
|
||||
<toolMenuContext name="viewer">
|
||||
<contextNetType>SOP</contextNetType>
|
||||
</toolMenuContext>
|
||||
<toolMenuContext name="network">
|
||||
<contextOpType>$HDA_TABLE_AND_NAME</contextOpType>
|
||||
</toolMenuContext>
|
||||
<toolSubmenu>Digital Assets</toolSubmenu>
|
||||
<script scriptType="python"><![CDATA[import soptoolutils
|
||||
soptoolutils.genericTool(kwargs, \'$HDA_NAME\')]]></script>
|
||||
</tool>
|
||||
</shelfDocument>
|
||||
"""
|
||||
|
||||
nodetype_category_name = hda_def.nodeType().category().name()
|
||||
context = context_dict[nodetype_category_name]
|
||||
util = utils_dict[nodetype_category_name]
|
||||
content = content.replace(
|
||||
"<contextNetType>SOP</contextNetType>",
|
||||
f"<contextNetType>{context}</contextNetType>")
|
||||
content = content.replace('soptoolutils', util)
|
||||
hda_def.addSection('Tools.shelf', content)
|
||||
old_submenu = 'Digital Assets'
|
||||
|
||||
# Replace submenu
|
||||
tools = hda_def.sections()["Tools.shelf"]
|
||||
content = tools.contents()
|
||||
content = content.replace(
|
||||
f"<toolSubmenu>{old_submenu}</toolSubmenu>",
|
||||
f"<toolSubmenu>{new_submenu}</toolSubmenu>"
|
||||
)
|
||||
|
||||
hda_def.addSection('Tools.shelf', content)
|
||||
# endregion
|
||||
|
||||
|
||||
class CreateHDA(plugin.HoudiniCreator):
|
||||
"""Publish Houdini Digital Asset file."""
|
||||
|
||||
|
|
@ -121,7 +246,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
hda_def.setUserInfo(get_ayon_username())
|
||||
|
||||
if pre_create_data.get("use_project"):
|
||||
setToolSubmenu(hda_def, "AYON/{}".format(self.project_name))
|
||||
set_tool_submenu(hda_def, "AYON/{}".format(self.project_name))
|
||||
|
||||
return hda_node
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,11 @@ import hou
|
|||
class CreateUSD(plugin.HoudiniCreator):
|
||||
"""Universal Scene Description"""
|
||||
identifier = "io.openpype.creators.houdini.usd"
|
||||
label = "USD (experimental)"
|
||||
label = "USD"
|
||||
product_type = "usd"
|
||||
icon = "gears"
|
||||
icon = "cubes"
|
||||
enabled = False
|
||||
description = "Create USD"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
|
|
@ -49,3 +50,6 @@ class CreateUSD(plugin.HoudiniCreator):
|
|||
hou.ropNodeTypeCategory(),
|
||||
hou.lopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_publish_families(self):
|
||||
return ["usd", "usdrop"]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,73 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating USD looks with textures."""
|
||||
import inspect
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class CreateUSDLook(plugin.HoudiniCreator):
|
||||
"""Universal Scene Description Look"""
|
||||
|
||||
identifier = "io.openpype.creators.houdini.usd.look"
|
||||
label = "Look"
|
||||
product_type = "look"
|
||||
icon = "paint-brush"
|
||||
enabled = True
|
||||
description = "Create USD Look"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "usd"})
|
||||
|
||||
instance = super(CreateUSDLook, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
parms = {
|
||||
"lopoutput": "$HIP/pyblish/{}.usd".format(product_name),
|
||||
"enableoutputprocessor_simplerelativepaths": False,
|
||||
|
||||
# Set the 'default prim' by default to the folder name being
|
||||
# published to
|
||||
"defaultprim": '/`strsplit(chs("folderPath"), "/", -1)`',
|
||||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
parms["loppath"] = self.selected_nodes[0].path()
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = [
|
||||
"fileperframe",
|
||||
# Lock some Avalon attributes
|
||||
"family",
|
||||
"id",
|
||||
]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_detail_description(self):
|
||||
return inspect.cleandoc("""Publish looks in USD data.
|
||||
|
||||
From the Houdini Solaris context (LOPs) this will publish the look for
|
||||
an asset as a USD file with the used textures.
|
||||
|
||||
Any assets used by the look will be relatively remapped to the USD
|
||||
file and integrated into the publish as `resources`.
|
||||
|
||||
""")
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.lopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_publish_families(self):
|
||||
return ["usd", "look", "usdrop"]
|
||||
|
|
@ -1,24 +1,66 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating USD renders."""
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_core.lib import BoolDef, EnumDef
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def get_usd_rop_renderers():
|
||||
"""Return all available renderers supported by USD Render ROP.
|
||||
Note that the USD Render ROP does not include all Hydra renderers, because
|
||||
it excludes the GL ones like Houdini GL and Storm. USD Render ROP only
|
||||
lists the renderers that have `aovsupport` enabled. Also see:
|
||||
https://www.sidefx.com/docs/houdini/nodes/out/usdrender.html#list
|
||||
Returns:
|
||||
dict[str, str]: Plug-in name to display name mapping.
|
||||
"""
|
||||
return {
|
||||
info["name"]: info["displayname"] for info
|
||||
in hou.lop.availableRendererInfo() if info.get('aovsupport')
|
||||
}
|
||||
|
||||
|
||||
class CreateUSDRender(plugin.HoudiniCreator):
|
||||
"""USD Render ROP in /stage"""
|
||||
identifier = "io.openpype.creators.houdini.usdrender"
|
||||
label = "USD Render (experimental)"
|
||||
label = "USD Render"
|
||||
product_type = "usdrender"
|
||||
icon = "magic"
|
||||
description = "Create USD Render"
|
||||
|
||||
default_renderer = "Karma CPU"
|
||||
# Default render target
|
||||
render_target = "farm_split"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
|
||||
instance_data["parent"] = hou.node("/stage")
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
# TODO: Support creation in /stage if wanted by user
|
||||
# pre_create_data["parent"] = "/stage"
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "usdrender"})
|
||||
|
||||
# Override default value for the Export Chunk Size because if the
|
||||
# a single USD file is written as opposed to per frame we want to
|
||||
# ensure only one machine picks up that sequence
|
||||
# TODO: Probably better to change the default somehow for just this
|
||||
# Creator on the HoudiniSubmitDeadline plug-in, if possible?
|
||||
(
|
||||
instance_data
|
||||
.setdefault("publish_attributes", {})
|
||||
.setdefault("HoudiniSubmitDeadlineUsdRender", {})["export_chunk"]
|
||||
) = 1000
|
||||
|
||||
instance = super(CreateUSDRender, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
|
|
@ -26,15 +68,98 @@ class CreateUSDRender(plugin.HoudiniCreator):
|
|||
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1
|
||||
}
|
||||
if self.selected_nodes:
|
||||
parms["loppath"] = self.selected_nodes[0].path()
|
||||
|
||||
if pre_create_data.get("render_target") == "farm_split":
|
||||
# Do not trigger the husk render, only trigger the USD export
|
||||
parms["runcommand"] = False
|
||||
# By default, the render ROP writes out the render file to a
|
||||
# temporary directory. But if we want to render the USD file on
|
||||
# the farm we instead want it in the project available
|
||||
# to all machines. So we ensure all USD files are written to a
|
||||
# folder to our choice. The
|
||||
# `__render__.usd` (default name, defined by `lopoutput` parm)
|
||||
# in that folder will then be the file to render.
|
||||
parms["savetodirectory_directory"] = "$HIP/render/usd/$HIPNAME/$OS"
|
||||
parms["lopoutput"] = "__render__.usd"
|
||||
parms["allframesatonce"] = True
|
||||
|
||||
# By default strip any Houdini custom data from the output file
|
||||
# since the renderer doesn't care about it
|
||||
parms["clearhoudinicustomdata"] = True
|
||||
|
||||
# Use the first selected LOP node if "Use Selection" is enabled
|
||||
# and the user had any nodes selected
|
||||
if self.selected_nodes:
|
||||
for node in self.selected_nodes:
|
||||
if node.type().category() == hou.lopNodeTypeCategory():
|
||||
parms["loppath"] = node.path()
|
||||
break
|
||||
|
||||
# Set default renderer if defined in settings
|
||||
if pre_create_data.get("renderer"):
|
||||
parms["renderer"] = pre_create_data.get("renderer")
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock some Avalon attributes
|
||||
# Lock some AYON attributes
|
||||
to_lock = ["productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
"farm_split": "Farm Rendering - Split export & render jobs",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
|
||||
# Retrieve available renderers and convert default renderer to
|
||||
# plug-in name if settings provided the display name
|
||||
renderer_plugin_to_display_name = get_usd_rop_renderers()
|
||||
default_renderer = self.default_renderer or None
|
||||
if (
|
||||
default_renderer
|
||||
and default_renderer not in renderer_plugin_to_display_name
|
||||
):
|
||||
# Map default renderer display name to plugin name
|
||||
for name, display_name in renderer_plugin_to_display_name.items():
|
||||
if default_renderer == display_name:
|
||||
default_renderer = name
|
||||
break
|
||||
else:
|
||||
# Default renderer not found in available renderers
|
||||
default_renderer = None
|
||||
|
||||
attrs = super(CreateUSDRender, self).get_pre_create_attr_defs()
|
||||
attrs += [
|
||||
EnumDef("renderer",
|
||||
label="Renderer",
|
||||
default=default_renderer,
|
||||
items=renderer_plugin_to_display_name),
|
||||
]
|
||||
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ class CollectFarmInstances(plugin.HoudiniInstancePlugin):
|
|||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
"vray_rop",
|
||||
"usdrender"]
|
||||
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect farm instances"
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ This will add additional families to different instance based on
|
|||
the creator_identifier parameter.
|
||||
"""
|
||||
import pyblish.api
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class CollectPointcacheType(pyblish.api.InstancePlugin):
|
||||
class CollectPointcacheType(plugin.HoudiniInstancePlugin):
|
||||
"""Collect data type for different instances."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
|
|
|||
|
|
@ -1,152 +0,0 @@
|
|||
import hou
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import usdlib
|
||||
from ayon_houdini.api import lib, plugin
|
||||
import ayon_houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class CollectInstancesUsdLayered(plugin.HoudiniContextPlugin):
|
||||
"""Collect Instances from a ROP Network and its configured layer paths.
|
||||
|
||||
The output nodes of the ROP node will only be published when *any* of the
|
||||
layers remain set to 'publish' by the user.
|
||||
|
||||
This works differently from most of our Avalon instances in the pipeline.
|
||||
As opposed to storing `ayon.create.instance` as id on the node we store
|
||||
`pyblish.avalon.usdlayered`.
|
||||
|
||||
Additionally this instance has no need for storing folder, product type,
|
||||
product name or name on the nodes. Instead all information is retrieved
|
||||
solely from the output filepath, which is an Avalon URI:
|
||||
avalon://{folder}/{product}.{representation}
|
||||
|
||||
Each final ROP node is considered a dependency for any of the Configured
|
||||
Save Path layers it sets along the way. As such, the instances shown in
|
||||
the Pyblish UI are solely the configured layers. The encapsulating usd
|
||||
files are generated whenever *any* of the dependencies is published.
|
||||
|
||||
These dependency instances are stored in:
|
||||
instance.data["publishDependencies"]
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances (USD Configured Layers)"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
stage = hou.node("/stage")
|
||||
if not stage:
|
||||
# Likely Houdini version <18
|
||||
return
|
||||
|
||||
nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.evalParm("id") != "pyblish.avalon.usdlayered":
|
||||
continue
|
||||
|
||||
has_product_type = node.evalParm("productType")
|
||||
assert has_product_type, (
|
||||
"'%s' is missing 'productType'" % node.name()
|
||||
)
|
||||
|
||||
self.process_node(node, context)
|
||||
|
||||
def sort_by_family(instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get(
|
||||
"families",
|
||||
instance.data.get("productType")
|
||||
)
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def process_node(self, node, context):
|
||||
|
||||
# Allow a single ROP node or a full ROP network of USD ROP nodes
|
||||
# to be processed as a single entry that should "live together" on
|
||||
# a publish.
|
||||
if node.type().name() == "ropnet":
|
||||
# All rop nodes inside ROP Network
|
||||
ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
else:
|
||||
# A single node
|
||||
ropnodes = [node]
|
||||
|
||||
data = lib.read(node)
|
||||
|
||||
# Don't use the explicit "colorbleed.usd.layered" family for publishing
|
||||
# instead use the "colorbleed.usd" family to integrate.
|
||||
data["publishFamilies"] = ["colorbleed.usd"]
|
||||
|
||||
# For now group ALL of them into USD Layer product group
|
||||
# Allow this product to be grouped into a USD Layer on creation
|
||||
data["productGroup"] = "USD Layer"
|
||||
|
||||
instances = list()
|
||||
dependencies = []
|
||||
for ropnode in ropnodes:
|
||||
|
||||
# Create a dependency instance per ROP Node.
|
||||
lopoutput = ropnode.evalParm("lopoutput")
|
||||
dependency_save_data = self.get_save_data(lopoutput)
|
||||
dependency = context.create_instance(dependency_save_data["name"])
|
||||
dependency.append(ropnode)
|
||||
dependency.data.update(data)
|
||||
dependency.data.update(dependency_save_data)
|
||||
dependency.data["productType"] = "colorbleed.usd.dependency"
|
||||
dependency.data["optional"] = False
|
||||
dependencies.append(dependency)
|
||||
|
||||
# Hide the dependency instance from the context
|
||||
context.pop()
|
||||
|
||||
# Get all configured layers for this USD ROP node
|
||||
# and create a Pyblish instance for each one
|
||||
layers = hou_usdlib.get_configured_save_layers(ropnode)
|
||||
for layer in layers:
|
||||
save_path = hou_usdlib.get_layer_save_path(layer)
|
||||
save_data = self.get_save_data(save_path)
|
||||
if not save_data:
|
||||
continue
|
||||
self.log.info(save_path)
|
||||
|
||||
instance = context.create_instance(save_data["name"])
|
||||
instance[:] = [node]
|
||||
|
||||
# Set the instance data
|
||||
instance.data.update(data)
|
||||
instance.data.update(save_data)
|
||||
instance.data["usdLayer"] = layer
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Store the collected ROP node dependencies
|
||||
self.log.debug("Collected dependencies: %s" % (dependencies,))
|
||||
for instance in instances:
|
||||
instance.data["publishDependencies"] = dependencies
|
||||
|
||||
def get_save_data(self, save_path):
|
||||
|
||||
# Resolve Avalon URI
|
||||
uri_data = usdlib.parse_avalon_uri(save_path)
|
||||
if not uri_data:
|
||||
self.log.warning("Non Avalon URI Layer Path: %s" % save_path)
|
||||
return {}
|
||||
|
||||
# Collect folder + product from URI
|
||||
name = "{product[name]} ({folder[path]})".format(**uri_data)
|
||||
fname = "{folder[path]}_{product[name]}.{ext}".format(**uri_data)
|
||||
|
||||
data = dict(uri_data)
|
||||
data["usdSavePath"] = save_path
|
||||
data["usdFilename"] = fname
|
||||
data["name"] = name
|
||||
return data
|
||||
|
|
@ -21,7 +21,8 @@ class CollectLocalRenderInstances(plugin.HoudiniInstancePlugin):
|
|||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
"vray_rop",
|
||||
"usdrender"]
|
||||
|
||||
label = "Collect local render instances"
|
||||
|
||||
|
|
|
|||
|
|
@ -5,99 +5,70 @@ import hou
|
|||
import pxr.UsdRender
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
def get_var_changed(variable=None):
|
||||
"""Return changed variables and operators that use it.
|
||||
|
||||
Note: `varchange` hscript states that it forces a recook of the nodes
|
||||
that use Variables. That was tested in Houdini
|
||||
18.0.391.
|
||||
|
||||
Args:
|
||||
variable (str, Optional): A specific variable to query the operators
|
||||
for. When None is provided it will return all variables that have
|
||||
had recent changes and require a recook. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Variable that changed with the operators that use it.
|
||||
|
||||
"""
|
||||
cmd = "varchange -V"
|
||||
if variable:
|
||||
cmd += " {0}".format(variable)
|
||||
output, _ = hou.hscript(cmd)
|
||||
|
||||
changed = {}
|
||||
for line in output.split("Variable: "):
|
||||
if not line.strip():
|
||||
continue
|
||||
|
||||
split = line.split()
|
||||
var = split[0]
|
||||
operators = split[1:]
|
||||
changed[var] = operators
|
||||
|
||||
return changed
|
||||
from ayon_houdini.api.usd import (
|
||||
get_usd_render_rop_rendersettings
|
||||
)
|
||||
|
||||
|
||||
class CollectRenderProducts(plugin.HoudiniInstancePlugin):
|
||||
"""Collect USD Render Products."""
|
||||
"""Collect USD Render Products.
|
||||
|
||||
The render products are collected from the USD Render ROP node by detecting
|
||||
what the selected Render Settings prim path is, then finding those
|
||||
Render Settings in the USD Stage and collecting the targeted Render
|
||||
Products and their expected filenames.
|
||||
|
||||
Note: Product refers USD Render Product, not to an AYON Product
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Render Products"
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
# This plugin should run after CollectUsdRender
|
||||
# and, before CollectLocalRenderInstances
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
families = ["usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
node = instance.data.get("output_node")
|
||||
if not node:
|
||||
rop_path = instance.data["instance_node"].path()
|
||||
raise RuntimeError(
|
||||
"No output node found. Make sure to connect an "
|
||||
rop_path = rop_node.path()
|
||||
self.log.error(
|
||||
"No output node found. Make sure to connect a valid "
|
||||
"input to the USD ROP: %s" % rop_path
|
||||
)
|
||||
return
|
||||
|
||||
# Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically
|
||||
# update after scene save.
|
||||
if hou.applicationVersion() == (18, 0, 391):
|
||||
self.log.debug(
|
||||
"Checking for recook to workaround " "$HIPNAME refresh bug..."
|
||||
)
|
||||
changed = get_var_changed("HIPNAME").get("HIPNAME")
|
||||
if changed:
|
||||
self.log.debug("Recooking for $HIPNAME refresh bug...")
|
||||
for operator in changed:
|
||||
hou.node(operator).cook(force=True)
|
||||
|
||||
# Make sure to recook any 'cache' nodes in the history chain
|
||||
chain = [node]
|
||||
chain.extend(node.inputAncestors())
|
||||
for input_node in chain:
|
||||
if input_node.type().name() == "cache":
|
||||
input_node.cook(force=True)
|
||||
|
||||
stage = node.stage()
|
||||
override_output_image = rop_node.evalParm("outputimage")
|
||||
|
||||
filenames = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
if not prim.IsA(pxr.UsdRender.Product):
|
||||
files_by_product = {}
|
||||
stage = node.stage()
|
||||
for prim_path in self.get_render_products(rop_node, stage):
|
||||
prim = stage.GetPrimAtPath(prim_path)
|
||||
if not prim or not prim.IsA(pxr.UsdRender.Product):
|
||||
self.log.warning("Found invalid render product path "
|
||||
"configured in render settings that is not a "
|
||||
"Render Product prim: %s", prim_path)
|
||||
continue
|
||||
|
||||
render_product = pxr.UsdRender.Product(prim)
|
||||
# Get Render Product Name
|
||||
product = pxr.UsdRender.Product(prim)
|
||||
if override_output_image:
|
||||
name = override_output_image
|
||||
else:
|
||||
# We force taking it from any random time sample as opposed to
|
||||
# "default" that the USD Api falls back to since that won't
|
||||
# return time sampled values if they were set per time sample.
|
||||
name = render_product.GetProductNameAttr().Get(time=0)
|
||||
|
||||
# We force taking it from any random time sample as opposed to
|
||||
# "default" that the USD Api falls back to since that won't return
|
||||
# time sampled values if they were set per time sample.
|
||||
name = product.GetProductNameAttr().Get(time=0)
|
||||
dirname = os.path.dirname(name)
|
||||
basename = os.path.basename(name)
|
||||
|
||||
dollarf_regex = r"(\$F([0-9]?))"
|
||||
frame_regex = r"^(.+\.)([0-9]+)(\.[a-zA-Z]+)$"
|
||||
if re.match(dollarf_regex, basename):
|
||||
# TODO: Confirm this actually is allowed USD stages and HUSK
|
||||
# Substitute $F
|
||||
|
|
@ -109,11 +80,28 @@ class CollectRenderProducts(plugin.HoudiniInstancePlugin):
|
|||
filename_base = re.sub(dollarf_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
else:
|
||||
# Last group of digits in the filename before the extension
|
||||
# The frame number must always be prefixed by underscore or dot
|
||||
# Allow product names like:
|
||||
# - filename.1001.exr
|
||||
# - filename.1001.aov.exr
|
||||
# - filename.aov.1001.exr
|
||||
# - filename_1001.exr
|
||||
frame_regex = r"(.*[._])(\d+)(?!.*\d)(.*\.[A-Za-z0-9]+$)"
|
||||
|
||||
# It may be the case that the current USD stage has stored
|
||||
# product name samples (e.g. when loading a USD file with
|
||||
# time samples) where it does not refer to e.g. $F4. And thus
|
||||
# it refers to the actual path like /path/to/frame.1001.exr
|
||||
# TODO: It would be better to maybe sample product name
|
||||
# attribute `ValueMightBeTimeVarying` and if so get it per
|
||||
# frame using `attr.Get(time=frame)` to ensure we get the
|
||||
# actual product name set at that point in time?
|
||||
# Substitute basename.0001.ext
|
||||
def replace(match):
|
||||
prefix, frame, ext = match.groups()
|
||||
head, frame, tail = match.groups()
|
||||
padding = "#" * len(frame)
|
||||
return prefix + padding + ext
|
||||
return head + padding + tail
|
||||
|
||||
filename_base = re.sub(frame_regex, replace, basename)
|
||||
filename = os.path.join(dirname, filename_base)
|
||||
|
|
@ -126,8 +114,135 @@ class CollectRenderProducts(plugin.HoudiniInstancePlugin):
|
|||
|
||||
filenames.append(filename)
|
||||
|
||||
prim_path = str(prim.GetPath())
|
||||
self.log.info("Collected %s name: %s" % (prim_path, filename))
|
||||
# TODO: Improve AOV name detection logic
|
||||
aov_identifier = self.get_aov_identifier(render_product)
|
||||
if aov_identifier in files_by_product:
|
||||
self.log.error(
|
||||
"Multiple render products are identified as the same AOV "
|
||||
"which means one of the two will not be ingested during"
|
||||
"publishing. AOV: '%s'", aov_identifier
|
||||
)
|
||||
self.log.warning("Skipping Render Product: %s", render_product)
|
||||
|
||||
files_by_product[aov_identifier] = self.generate_expected_files(
|
||||
instance,
|
||||
filename
|
||||
)
|
||||
|
||||
aov_label = f"'{aov_identifier}' aov in " if aov_identifier else ""
|
||||
self.log.debug("Render Product %s%s", aov_label, prim_path)
|
||||
self.log.debug("Product name: %s", filename)
|
||||
|
||||
# Filenames for Deadline
|
||||
instance.data["files"] = filenames
|
||||
instance.data.setdefault("expectedFiles", []).append(files_by_product)
|
||||
|
||||
# Farm Publishing add review logic expects this key to exist and
|
||||
# be True if render is a multipart Exr.
|
||||
# otherwise it will most probably fail the AOV filter as multipartExr
|
||||
# files mostly don't include aov name in the file path.
|
||||
# Assume multipartExr is 'True' as long as we have one AOV.
|
||||
instance.data["multipartExr"] = len(files_by_product) <= 1
|
||||
|
||||
def get_aov_identifier(self, render_product):
|
||||
"""Return the AOV identifier for a Render Product
|
||||
|
||||
A Render Product does not really define what 'AOV' it is, it
|
||||
defines the product name (output path) and the render vars to
|
||||
include.
|
||||
|
||||
So we need to define what in particular of a `UsdRenderProduct`
|
||||
we use to separate the AOV (and thus apply sub-grouping with).
|
||||
|
||||
For now we'll consider any Render Product that only refers
|
||||
to a single rendervar that the rendervars prim name is the AOV
|
||||
otherwise we'll assume renderproduct to be a combined multilayer
|
||||
'main' layer
|
||||
|
||||
Args:
|
||||
render_product (pxr.UsdRender.Product): The Render Product
|
||||
|
||||
Returns:
|
||||
str: The AOV identifier
|
||||
|
||||
"""
|
||||
targets = render_product.GetOrderedVarsRel().GetTargets()
|
||||
if len(targets) > 1:
|
||||
# Cryptomattes usually are combined render vars, for example:
|
||||
# - crypto_asset, crypto_asset01, crypto_asset02, crypto_asset03
|
||||
# - crypto_object, crypto_object01, etc.
|
||||
# These still refer to the same AOV so we take the common prefix
|
||||
# e.g. `crypto_asset` or `crypto` (if multiple are combined)
|
||||
if all(target.name.startswith("crypto") for target in targets):
|
||||
start = os.path.commonpath([target.name for target in targets])
|
||||
return start.rstrip("_") # remove any trailing _
|
||||
|
||||
# Main layer
|
||||
return ""
|
||||
elif len(targets) == 1:
|
||||
# AOV for a single var
|
||||
return targets[0].name
|
||||
else:
|
||||
self.log.warning(
|
||||
f"Render product has no rendervars set: {render_product}")
|
||||
return ""
|
||||
|
||||
def get_render_products(self, usdrender_rop, stage):
|
||||
""""The render products in the defined render settings
|
||||
|
||||
Args:
|
||||
usdrender_rop (hou.Node): The Houdini USD Render ROP node.
|
||||
stage (pxr.Usd.Stage): The USD stage to find the render settings
|
||||
in. This is usually the stage from the LOP path the USD Render
|
||||
ROP node refers to.
|
||||
|
||||
Returns:
|
||||
List[Sdf.Path]: Render Product paths enabled in the render settings
|
||||
|
||||
"""
|
||||
render_settings = get_usd_render_rop_rendersettings(usdrender_rop,
|
||||
stage,
|
||||
logger=self.log)
|
||||
if not render_settings:
|
||||
return []
|
||||
|
||||
return render_settings.GetProductsRel().GetTargets()
|
||||
|
||||
def generate_expected_files(self, instance, path):
|
||||
"""Generate full sequence of expected files from a filepath.
|
||||
|
||||
The filepath should have '#' token as placeholder for frame numbers or
|
||||
should have %04d or %d placeholders. The `#` characters indicate frame
|
||||
number and padding, e.g. #### becomes 0001 for frame 1.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): The publish instance.
|
||||
path (str): The filepath to generate the list of output files for.
|
||||
|
||||
Returns:
|
||||
list: Filepath per frame.
|
||||
|
||||
"""
|
||||
|
||||
folder = os.path.dirname(path)
|
||||
filename = os.path.basename(path)
|
||||
|
||||
if "#" in filename:
|
||||
def replace(match):
|
||||
return "%0{}d".format(len(match.group()))
|
||||
|
||||
filename = re.sub("#+", replace, filename)
|
||||
|
||||
if "%" not in filename:
|
||||
# Not a sequence, single file
|
||||
return path
|
||||
|
||||
expected_files = []
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
|
||||
for frame in range(int(start), (int(end) + 1)):
|
||||
expected_files.append(
|
||||
os.path.join(folder, (filename % frame)).replace("\\", "/"))
|
||||
|
||||
return expected_files
|
||||
|
|
|
|||
|
|
@ -15,7 +15,8 @@ class CollectReviewableInstances(plugin.HoudiniInstancePlugin):
|
|||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
"vray_rop",
|
||||
"usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
creator_attribute = instance.data["creator_attributes"]
|
||||
|
|
|
|||
|
|
@ -1,122 +0,0 @@
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
from ayon_core.pipeline import usdlib, KnownPublishError
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
class CollectUsdBootstrap(plugin.HoudiniInstancePlugin):
|
||||
"""Collect special Asset/Shot bootstrap instances if those are needed.
|
||||
|
||||
Some specific products are intended to be part of the default structure
|
||||
of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset
|
||||
we layer a Model and Shade USD file over each other and expose that in
|
||||
a Asset USD file, ready to use.
|
||||
|
||||
On the first publish of any of the components of a Asset or Shot the
|
||||
missing pieces are bootstrapped and generated in the pipeline too. This
|
||||
means that on the very first publish of your model the Asset USD file
|
||||
will exist too.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = "Collect USD Bootstrap"
|
||||
families = ["usd", "usd.layered"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Detect whether the current product is a product in a pipeline
|
||||
def get_bootstrap(instance):
|
||||
instance_product_name = instance.data["productName"]
|
||||
for name, layers in usdlib.PIPELINE.items():
|
||||
if instance_product_name in set(layers):
|
||||
return name # e.g. "asset"
|
||||
else:
|
||||
return
|
||||
|
||||
bootstrap = get_bootstrap(instance)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(instance, bootstrap)
|
||||
|
||||
# Check if any of the dependencies requires a bootstrap
|
||||
for dependency in instance.data.get("publishDependencies", list()):
|
||||
bootstrap = get_bootstrap(dependency)
|
||||
if bootstrap:
|
||||
self.add_bootstrap(dependency, bootstrap)
|
||||
|
||||
def add_bootstrap(self, instance, bootstrap):
|
||||
|
||||
self.log.debug("Add bootstrap for: %s" % bootstrap)
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
folder_name = folder_path.rsplit("/", 1)[-1]
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
if not folder_entity:
|
||||
raise KnownPublishError(
|
||||
"Folder '{}' does not exist".format(folder_path)
|
||||
)
|
||||
|
||||
# Check which are not about to be created and don't exist yet
|
||||
required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap)
|
||||
|
||||
require_all_layers = instance.data.get("requireAllLayers", False)
|
||||
if require_all_layers:
|
||||
# USD files load fine in usdview and Houdini even when layered or
|
||||
# referenced files do not exist. So by default we don't require
|
||||
# the layers to exist.
|
||||
layers = usdlib.PIPELINE.get(bootstrap)
|
||||
if layers:
|
||||
required += list(layers)
|
||||
|
||||
self.log.debug("Checking required bootstrap: %s" % required)
|
||||
for product_name in required:
|
||||
if self._product_exists(
|
||||
project_name, instance, product_name, folder_entity
|
||||
):
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Creating {0} USD bootstrap: {1} {2}".format(
|
||||
bootstrap, folder_path, product_name
|
||||
)
|
||||
)
|
||||
|
||||
product_type = "usd.bootstrap"
|
||||
new = instance.context.create_instance(product_name)
|
||||
new.data["productName"] = product_name
|
||||
new.data["label"] = "{0} ({1})".format(product_name, folder_name)
|
||||
new.data["productType"] = product_type
|
||||
new.data["family"] = product_type
|
||||
new.data["comment"] = "Automated bootstrap USD file."
|
||||
new.data["publishFamilies"] = ["usd"]
|
||||
|
||||
# Do not allow the user to toggle this instance
|
||||
new.data["optional"] = False
|
||||
|
||||
# Copy some data from the instance for which we bootstrap
|
||||
for key in ["folderPath"]:
|
||||
new.data[key] = instance.data[key]
|
||||
|
||||
def _product_exists(
|
||||
self, project_name, instance, product_name, folder_entity
|
||||
):
|
||||
"""Return whether product exists in current context or in database."""
|
||||
# Allow it to be created during this publish session
|
||||
context = instance.context
|
||||
|
||||
folder_path = folder_entity["path"]
|
||||
for inst in context:
|
||||
if (
|
||||
inst.data["productName"] == product_name
|
||||
and inst.data["folderPath"] == folder_path
|
||||
):
|
||||
return True
|
||||
|
||||
# Or, if they already exist in the database we can
|
||||
# skip them too.
|
||||
if ayon_api.get_product_by_name(
|
||||
project_name, product_name, folder_entity["id"], fields={"id"}
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
|
@ -1,18 +1,66 @@
|
|||
import copy
|
||||
import os
|
||||
import hou
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
from ayon_houdini.api import plugin
|
||||
import ayon_houdini.api.usd as usdlib
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
def copy_instance_data(instance_src, instance_dest, attr):
|
||||
"""Copy instance data from `src` instance to `dest` instance.
|
||||
|
||||
Examples:
|
||||
>>> copy_instance_data(instance_src, instance_dest,
|
||||
>>> attr="publish_attributes.CollectRopFrameRange")
|
||||
|
||||
Arguments:
|
||||
instance_src (pyblish.api.Instance): Source instance to copy from
|
||||
instance_dest (pyblish.api.Instance): Target instance to copy to
|
||||
attr (str): Attribute on the source instance to copy. This can be
|
||||
a nested key joined by `.` to only copy sub entries of dictionaries
|
||||
in the source instance's data.
|
||||
|
||||
Raises:
|
||||
KeyError: If the key does not exist on the source instance.
|
||||
AssertionError: If a parent key already exists on the destination
|
||||
instance but is not of the correct type (= is not a dict)
|
||||
|
||||
"""
|
||||
|
||||
src_data = instance_src.data
|
||||
dest_data = instance_dest.data
|
||||
keys = attr.split(".")
|
||||
for i, key in enumerate(keys):
|
||||
if key not in src_data:
|
||||
break
|
||||
|
||||
src_value = src_data[key]
|
||||
if i != len(key):
|
||||
dest_data = dest_data.setdefault(key, {})
|
||||
assert isinstance(dest_data, dict), "Destination must be a dict"
|
||||
src_data = src_value
|
||||
else:
|
||||
# Last iteration - assign the value
|
||||
dest_data[key] = copy.deepcopy(src_value)
|
||||
|
||||
|
||||
class CollectUsdLayers(plugin.HoudiniInstancePlugin):
|
||||
"""Collect the USD Layers that have configured save paths."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
order = pyblish.api.CollectorOrder + 0.25
|
||||
label = "Collect USD Layers"
|
||||
families = ["usd"]
|
||||
families = ["usdrop"]
|
||||
|
||||
def process(self, instance):
|
||||
# TODO: Replace this with a Hidden Creator so we collect these BEFORE
|
||||
# starting the publish so the user sees them before publishing
|
||||
# - however user should not be able to individually enable/disable
|
||||
# this from the main ROP its created from?
|
||||
|
||||
output = instance.data.get("output_node")
|
||||
if not output:
|
||||
|
|
@ -29,13 +77,16 @@ class CollectUsdLayers(plugin.HoudiniInstancePlugin):
|
|||
creator = info.customData.get("HoudiniCreatorNode")
|
||||
|
||||
self.log.debug("Found configured save path: "
|
||||
"%s -> %s" % (layer, save_path))
|
||||
"%s -> %s", layer, save_path)
|
||||
|
||||
# Log node that configured this save path
|
||||
if creator:
|
||||
self.log.debug("Created by: %s" % creator)
|
||||
creator_node = hou.nodeBySessionId(creator) if creator else None
|
||||
if creator_node:
|
||||
self.log.debug(
|
||||
"Created by: %s", creator_node.path()
|
||||
)
|
||||
|
||||
save_layers.append((layer, save_path))
|
||||
save_layers.append((layer, save_path, creator_node))
|
||||
|
||||
# Store on the instance
|
||||
instance.data["usdConfiguredSavePaths"] = save_layers
|
||||
|
|
@ -43,23 +94,65 @@ class CollectUsdLayers(plugin.HoudiniInstancePlugin):
|
|||
# Create configured layer instances so User can disable updating
|
||||
# specific configured layers for publishing.
|
||||
context = instance.context
|
||||
product_type = "usdlayer"
|
||||
for layer, save_path in save_layers:
|
||||
for layer, save_path, creator_node in save_layers:
|
||||
name = os.path.basename(save_path)
|
||||
label = "{0} -> {1}".format(instance.data["name"], name)
|
||||
layer_inst = context.create_instance(name)
|
||||
|
||||
layer_inst.data["productType"] = product_type
|
||||
layer_inst.data["family"] = product_type
|
||||
layer_inst.data["families"] = [product_type]
|
||||
layer_inst.data["productName"] = "__stub__"
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["folderPath"] = instance.data["folderPath"]
|
||||
layer_inst.data["instance_node"] = instance.data["instance_node"]
|
||||
# include same USD ROP
|
||||
layer_inst.append(rop_node)
|
||||
# include layer data
|
||||
layer_inst.append((layer, save_path))
|
||||
|
||||
# Allow this product to be grouped into a USD Layer on creation
|
||||
layer_inst.data["productGroup"] = "USD Layer"
|
||||
staging_dir, fname = os.path.split(save_path)
|
||||
fname_no_ext, ext = os.path.splitext(fname)
|
||||
|
||||
variant = fname_no_ext
|
||||
|
||||
# Strip off any trailing version number in the form of _v[0-9]+
|
||||
variant = re.sub("_v[0-9]+$", "", variant)
|
||||
|
||||
layer_inst.data["usd_layer"] = layer
|
||||
layer_inst.data["usd_layer_save_path"] = save_path
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
variant_base = instance.data["variant"]
|
||||
subset = get_product_name(
|
||||
project_name=project_name,
|
||||
# TODO: This should use task from `instance`
|
||||
task_name=context.data["anatomyData"]["task"]["name"],
|
||||
task_type=context.data["anatomyData"]["task"]["type"],
|
||||
host_name=context.data["hostName"],
|
||||
product_type="usd",
|
||||
variant=variant_base + "_" + variant,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
label = "{0} -> {1}".format(instance.data["name"], subset)
|
||||
family = "usd"
|
||||
layer_inst.data["family"] = family
|
||||
layer_inst.data["families"] = [family]
|
||||
layer_inst.data["subset"] = subset
|
||||
layer_inst.data["label"] = label
|
||||
layer_inst.data["asset"] = instance.data["asset"]
|
||||
layer_inst.data["task"] = instance.data.get("task")
|
||||
layer_inst.data["instance_node"] = instance.data["instance_node"]
|
||||
layer_inst.data["render"] = False
|
||||
layer_inst.data["output_node"] = creator_node
|
||||
|
||||
# Inherit "use handles" from the source instance
|
||||
# TODO: Do we want to maybe copy full `publish_attributes` instead?
|
||||
copy_instance_data(
|
||||
instance, layer_inst,
|
||||
attr="publish_attributes.CollectRopFrameRange.use_handles"
|
||||
)
|
||||
|
||||
# Allow this subset to be grouped into a USD Layer on creation
|
||||
layer_inst.data["subsetGroup"] = "USD Layer"
|
||||
|
||||
# For now just assume the representation will get published
|
||||
representation = {
|
||||
"name": "usd",
|
||||
"ext": ext.lstrip("."),
|
||||
"stagingDir": staging_dir,
|
||||
"files": fname
|
||||
}
|
||||
layer_inst.data.setdefault("representations", []).append(
|
||||
representation)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,243 @@
|
|||
import re
|
||||
|
||||
import os
|
||||
import glob
|
||||
from typing import List, Optional
|
||||
import dataclasses
|
||||
|
||||
import pyblish.api
|
||||
import hou
|
||||
from pxr import Sdf
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
# Colorspace attributes differ per renderer implementation in the USD data
|
||||
# Some have dedicated input names like Arnold and Redshift, whereas others like
|
||||
# MaterialX store `colorSpace` metadata on the asset property itself.
|
||||
# See `get_colorspace` method on the plug-in for more details
|
||||
COLORSPACE_ATTRS = [
|
||||
"inputs:color_space", # Image Vop (arnold::image)
|
||||
"inputs:tex0_colorSpace", # RS Texture Vop (redshift::TextureSampler)
|
||||
# TODO: USD UV Texture VOP doesn't seem to use colorspaces from the actual
|
||||
# OCIO configuration so we skip these for now. Especially since the
|
||||
# texture is usually used for 'preview' purposes anyway.
|
||||
# "inputs:sourceColorSpace", # USD UV Texture Vop (usduvtexture::2.0)
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Resource:
|
||||
attribute: str # property path
|
||||
source: str # unresolved source path
|
||||
files: List[str] # resolve list of files, e.g. multiple for <UDIM>
|
||||
color_space: str = None # colorspace of the resource
|
||||
|
||||
|
||||
def get_layer_property_paths(layer: Sdf.Layer) -> List[Sdf.Path]:
|
||||
"""Return all property paths from a layer"""
|
||||
paths = []
|
||||
|
||||
def collect_paths(path):
|
||||
if not path.IsPropertyPath():
|
||||
return
|
||||
paths.append(path)
|
||||
|
||||
layer.Traverse("/", collect_paths)
|
||||
|
||||
return paths
|
||||
|
||||
|
||||
class CollectUsdLookAssets(plugin.HoudiniInstancePlugin):
|
||||
"""Collect all assets introduced by the look.
|
||||
|
||||
We are looking to collect e.g. all texture resources so we can transfer
|
||||
them with the publish and write then to the publish location.
|
||||
|
||||
If possible, we'll also try to identify the colorspace of the asset.
|
||||
|
||||
"""
|
||||
# TODO: Implement $F frame support (per frame values)
|
||||
# TODO: If input image is already a published texture or resource than
|
||||
# preferably we'd keep the link in-tact and NOT update it. We can just
|
||||
# start ignoring AYON URIs
|
||||
|
||||
label = "Collect USD Look Assets"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["look"]
|
||||
|
||||
exclude_suffixes = [".usd", ".usda", ".usdc", ".usdz", ".abc", ".vbd"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop: hou.RopNode = hou.node(instance.data.get("instance_node"))
|
||||
if not rop:
|
||||
return
|
||||
|
||||
lop_node: hou.LopNode = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
return
|
||||
|
||||
above_break_layers = set(lop_node.layersAboveLayerBreak())
|
||||
|
||||
stage = lop_node.stage()
|
||||
layers = [
|
||||
layer for layer
|
||||
in stage.GetLayerStack(includeSessionLayers=False)
|
||||
if layer.identifier not in above_break_layers
|
||||
]
|
||||
|
||||
instance_resources = self.get_layer_assets(layers)
|
||||
|
||||
# Define a relative asset remapping for the USD Extractor so that
|
||||
# any textures are remapped to their 'relative' publish path.
|
||||
# All textures will be in a relative `./resources/` folder
|
||||
remap = {}
|
||||
for resource in instance_resources:
|
||||
source = resource.source
|
||||
name = os.path.basename(source)
|
||||
remap[os.path.normpath(source)] = f"./resources/{name}"
|
||||
instance.data["assetRemap"] = remap
|
||||
|
||||
# Store resources on instance
|
||||
resources = instance.data.setdefault("resources", [])
|
||||
for resource in instance_resources:
|
||||
resources.append(dataclasses.asdict(resource))
|
||||
|
||||
# Log all collected textures
|
||||
# Note: It is fine for a single texture to be included more than once
|
||||
# where even one of them does not have a color space set, but the other
|
||||
# does. For example, there may be a USD UV Texture just for a GL
|
||||
# preview material which does not specify an OCIO color
|
||||
# space.
|
||||
all_files = []
|
||||
for resource in instance_resources:
|
||||
all_files.append(f"{resource.attribute}:")
|
||||
|
||||
for filepath in resource.files:
|
||||
if resource.color_space:
|
||||
file_label = f"- {filepath} ({resource.color_space})"
|
||||
else:
|
||||
file_label = f"- {filepath}"
|
||||
all_files.append(file_label)
|
||||
|
||||
self.log.info(
|
||||
"Collected assets:\n{}".format(
|
||||
"\n".join(all_files)
|
||||
)
|
||||
)
|
||||
|
||||
def get_layer_assets(self, layers: List[Sdf.Layer]) -> List[Resource]:
|
||||
# TODO: Correctly resolve paths using Asset Resolver.
|
||||
# Preferably this would use one cached
|
||||
# resolver context to optimize the path resolving.
|
||||
# TODO: Fix for timesamples - if timesamples, then `.default` might
|
||||
# not be authored on the spec
|
||||
|
||||
resources: List[Resource] = list()
|
||||
for layer in layers:
|
||||
for path in get_layer_property_paths(layer):
|
||||
|
||||
spec = layer.GetAttributeAtPath(path)
|
||||
if not spec:
|
||||
continue
|
||||
|
||||
if spec.typeName != "asset":
|
||||
continue
|
||||
|
||||
asset: Sdf.AssetPath = spec.default
|
||||
base, ext = os.path.splitext(asset.path)
|
||||
if ext in self.exclude_suffixes:
|
||||
continue
|
||||
|
||||
filepath = asset.path.replace("\\", "/")
|
||||
|
||||
# Expand <UDIM> to all files of the available files on disk
|
||||
# TODO: Add support for `<TILE>`
|
||||
# TODO: Add support for `<ATTR:name INDEX:name DEFAULT:value>`
|
||||
if "<UDIM>" in filepath.upper():
|
||||
pattern = re.sub(
|
||||
r"<UDIM>",
|
||||
# UDIM is always four digits
|
||||
"[0-9]" * 4,
|
||||
filepath,
|
||||
flags=re.IGNORECASE
|
||||
)
|
||||
files = glob.glob(pattern)
|
||||
else:
|
||||
# Single file
|
||||
files = [filepath]
|
||||
|
||||
# Detect the colorspace of the input asset property
|
||||
colorspace = self.get_colorspace(spec)
|
||||
|
||||
resource = Resource(
|
||||
attribute=path.pathString,
|
||||
source=asset.path,
|
||||
files=files,
|
||||
color_space=colorspace
|
||||
)
|
||||
resources.append(resource)
|
||||
|
||||
# Sort by filepath
|
||||
resources.sort(key=lambda r: r.source)
|
||||
|
||||
return resources
|
||||
|
||||
def get_colorspace(self, spec: Sdf.AttributeSpec) -> Optional[str]:
|
||||
"""Return colorspace for a Asset attribute spec.
|
||||
|
||||
There is currently no USD standard on how colorspaces should be
|
||||
represented for shaders or asset properties - each renderer's material
|
||||
implementations seem to currently use their own way of specifying the
|
||||
colorspace on the shader. As such, this comes with some guesswork.
|
||||
|
||||
Args:
|
||||
spec (Sdf.AttributeSpec): The asset type attribute to retrieve
|
||||
the colorspace for.
|
||||
|
||||
Returns:
|
||||
Optional[str]: The colorspace for the given attribute, if any.
|
||||
|
||||
"""
|
||||
# TODO: Support Karma, V-Ray, Renderman texture colorspaces
|
||||
# Materialx image defines colorspace as custom info on the attribute
|
||||
if spec.HasInfo("colorSpace"):
|
||||
return spec.GetInfo("colorSpace")
|
||||
|
||||
# Arnold materials define the colorspace as a separate primvar
|
||||
# TODO: Fix for timesamples - if timesamples, then `.default` might
|
||||
# not be authored on the spec
|
||||
prim_path = spec.path.GetPrimPath()
|
||||
layer = spec.layer
|
||||
for name in COLORSPACE_ATTRS:
|
||||
colorspace_property_path = prim_path.AppendProperty(name)
|
||||
colorspace_spec = layer.GetAttributeAtPath(
|
||||
colorspace_property_path
|
||||
)
|
||||
if colorspace_spec and colorspace_spec.default:
|
||||
return colorspace_spec.default
|
||||
|
||||
|
||||
class CollectUsdLookResourceTransfers(plugin.HoudiniInstancePlugin):
|
||||
"""Define the publish direct file transfers for any found resources.
|
||||
|
||||
This ensures that any source texture will end up in the published look
|
||||
in the `resourcesDir`.
|
||||
|
||||
"""
|
||||
label = "Collect USD Look Transfers"
|
||||
order = pyblish.api.CollectorOrder + 0.496
|
||||
hosts = ["houdini"]
|
||||
families = ["look"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
resources_dir = instance.data["resourcesDir"]
|
||||
transfers = instance.data.setdefault("transfers", [])
|
||||
for resource in instance.data.get("resources", []):
|
||||
for src in resource["files"]:
|
||||
dest = os.path.join(resources_dir, os.path.basename(src))
|
||||
transfers.append((src, dest))
|
||||
self.log.debug("Registering transfer: %s -> %s", src, dest)
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import (
|
||||
colorspace,
|
||||
plugin
|
||||
)
|
||||
from ayon_houdini.api.lib import (
|
||||
evalParmNoFrame,
|
||||
get_color_management_preferences
|
||||
)
|
||||
|
||||
|
||||
class CollectUsdRender(plugin.HoudiniInstancePlugin):
|
||||
"""Collect publishing data for USD Render ROP.
|
||||
|
||||
If `rendercommand` parm is disabled (and thus no rendering triggers by the
|
||||
usd render rop) it is assumed to be a "Split Render" job where the farm
|
||||
will get an additional render job after the USD file is extracted.
|
||||
|
||||
Provides:
|
||||
instance -> ifdFile
|
||||
instance -> colorspaceConfig
|
||||
instance -> colorspaceDisplay
|
||||
instance -> colorspaceView
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect USD Render Rop"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
if instance.data["splitRender"]:
|
||||
# USD file output
|
||||
lop_output = evalParmNoFrame(
|
||||
rop, "lopoutput", pad_character="#"
|
||||
)
|
||||
|
||||
# The file is usually relative to the Output Processor's 'Save to
|
||||
# Directory' which forces all USD files to end up in that directory
|
||||
# TODO: It is possible for a user to disable this
|
||||
# TODO: When enabled I think only the basename of the `lopoutput`
|
||||
# parm is preserved, any parent folders defined are likely ignored
|
||||
folder = evalParmNoFrame(
|
||||
rop, "savetodirectory_directory", pad_character="#"
|
||||
)
|
||||
|
||||
export_file = os.path.join(folder, lop_output)
|
||||
|
||||
# Substitute any # characters in the name back to their $F4
|
||||
# equivalent
|
||||
def replace_to_f(match):
|
||||
number = len(match.group(0))
|
||||
if number <= 1:
|
||||
number = "" # make it just $F not $F1 or $F0
|
||||
return "$F{}".format(number)
|
||||
|
||||
export_file = re.sub("#+", replace_to_f, export_file)
|
||||
self.log.debug(
|
||||
"Found export file: {}".format(export_file)
|
||||
)
|
||||
instance.data["ifdFile"] = export_file
|
||||
|
||||
# The render job is not frame dependent but fully dependent on
|
||||
# the job having been completed, since the extracted file is a
|
||||
# single file.
|
||||
if "$F" not in export_file:
|
||||
instance.data["splitRenderFrameDependent"] = False
|
||||
|
||||
# update the colorspace data
|
||||
colorspace_data = get_color_management_preferences()
|
||||
instance.data["colorspaceConfig"] = colorspace_data["config"]
|
||||
instance.data["colorspaceDisplay"] = colorspace_data["display"]
|
||||
instance.data["colorspaceView"] = colorspace_data["view"]
|
||||
|
||||
# stub required data for Submit Publish Job publish plug-in
|
||||
instance.data["attachTo"] = []
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
|
|
@ -15,13 +15,20 @@ class ExtractRender(plugin.HoudiniExtractorPlugin):
|
|||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
"vray_rop",
|
||||
"usdrender"]
|
||||
|
||||
def process(self, instance):
|
||||
creator_attribute = instance.data["creator_attributes"]
|
||||
product_type = instance.data["productType"]
|
||||
rop_node = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# TODO: This section goes against pyblish concepts where
|
||||
# pyblish plugins should change the state of the scene.
|
||||
# However, in ayon publisher tool users can have options and
|
||||
# these options should some how synced with the houdini nodes.
|
||||
# More info: https://github.com/ynput/ayon-core/issues/417
|
||||
|
||||
# Align split parameter value on rop node to the render target.
|
||||
if instance.data["splitRender"]:
|
||||
if product_type == "arnold_rop":
|
||||
|
|
@ -32,6 +39,8 @@ class ExtractRender(plugin.HoudiniExtractorPlugin):
|
|||
rop_node.setParms({"RS_archive_enable": 1})
|
||||
elif product_type == "vray_rop":
|
||||
rop_node.setParms({"render_export_mode": "2"})
|
||||
elif product_type == "usdrender":
|
||||
rop_node.setParms({"runcommand": 0})
|
||||
else:
|
||||
if product_type == "arnold_rop":
|
||||
rop_node.setParms({"ar_ass_export_enable": 0})
|
||||
|
|
@ -41,6 +50,8 @@ class ExtractRender(plugin.HoudiniExtractorPlugin):
|
|||
rop_node.setParms({"RS_archive_enable": 0})
|
||||
elif product_type == "vray_rop":
|
||||
rop_node.setParms({"render_export_mode": "1"})
|
||||
elif product_type == "usdrender":
|
||||
rop_node.setParms({"runcommand": 1})
|
||||
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Render should be processed on farm, skipping local render.")
|
||||
|
|
|
|||
|
|
@ -1,19 +1,21 @@
|
|||
import os
|
||||
import hou
|
||||
from typing import List, AnyStr
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish.lib import get_instance_expected_output_path
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
from ayon_houdini.api.usd import remap_paths
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class ExtractUSD(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract USD"
|
||||
families = ["usd",
|
||||
"usdModel",
|
||||
"usdSetDress"]
|
||||
families = ["usdrop"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -27,7 +29,18 @@ class ExtractUSD(plugin.HoudiniExtractorPlugin):
|
|||
|
||||
self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
mapping = self.get_source_to_publish_paths(instance.context)
|
||||
|
||||
# Allow instance-specific path remapping overrides, e.g. changing
|
||||
# paths on used resources/textures for looks
|
||||
instance_mapping = instance.data.get("assetRemap", {})
|
||||
if instance_mapping:
|
||||
self.log.debug("Instance-specific asset path remapping:\n"
|
||||
f"{instance_mapping}")
|
||||
mapping.update(instance_mapping)
|
||||
|
||||
with remap_paths(ropnode, mapping):
|
||||
render_rop(ropnode)
|
||||
|
||||
assert os.path.exists(output), "Output does not exist: %s" % output
|
||||
|
||||
|
|
@ -41,3 +54,51 @@ class ExtractUSD(plugin.HoudiniExtractorPlugin):
|
|||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def get_source_to_publish_paths(self, context):
|
||||
"""Define a mapping of all current instances in context from source
|
||||
file to publish file so this can be used on the USD save to remap
|
||||
asset layer paths on publish via AyonRemapPaths output processor"""
|
||||
|
||||
mapping = {}
|
||||
for instance in context:
|
||||
if not instance.data.get("active", True):
|
||||
continue
|
||||
|
||||
if not instance.data.get("publish", True):
|
||||
continue
|
||||
|
||||
for repre in instance.data.get("representations", []):
|
||||
name = repre.get("name")
|
||||
ext = repre.get("ext")
|
||||
|
||||
# TODO: The remapping might need to get more involved if the
|
||||
# asset paths that are set use e.g. $F
|
||||
# TODO: If the representation has multiple files we might need
|
||||
# to define the path remapping per file of the sequence
|
||||
path = get_instance_expected_output_path(
|
||||
instance, representation_name=name, ext=ext
|
||||
)
|
||||
for source_path in get_source_paths(instance, repre):
|
||||
source_path = os.path.normpath(source_path)
|
||||
mapping[source_path] = path
|
||||
|
||||
return mapping
|
||||
|
||||
|
||||
def get_source_paths(
|
||||
instance: pyblish.api.Instance,
|
||||
repre: dict
|
||||
) -> List[AnyStr]:
|
||||
"""Return the full source filepaths for an instance's representations"""
|
||||
|
||||
staging = repre.get("stagingDir", instance.data.get("stagingDir"))
|
||||
files = repre.get("files", [])
|
||||
if isinstance(files, list):
|
||||
return [os.path.join(staging, fname) for fname in files]
|
||||
elif isinstance(files, str):
|
||||
# Single file
|
||||
return [os.path.join(staging, files)]
|
||||
|
||||
raise TypeError(f"Unsupported type for representation files: {files} "
|
||||
"(supports list or str)")
|
||||
|
|
|
|||
|
|
@ -1,322 +0,0 @@
|
|||
import os
|
||||
import contextlib
|
||||
import sys
|
||||
from collections import deque
|
||||
import hou
|
||||
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_houdini.api import plugin
|
||||
import ayon_houdini.api.usd as hou_usdlib
|
||||
from ayon_houdini.api.lib import render_rop
|
||||
|
||||
|
||||
class ExitStack(object):
|
||||
"""Context manager for dynamic management of a stack of exit callbacks.
|
||||
|
||||
For example:
|
||||
|
||||
with ExitStack() as stack:
|
||||
files = [stack.enter_context(open(fname)) for fname in filenames]
|
||||
# All opened files will automatically be closed at the end of
|
||||
# the with statement, even if attempts to open files later
|
||||
# in the list raise an exception
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._exit_callbacks = deque()
|
||||
|
||||
def pop_all(self):
|
||||
"""Preserve the context stack by transferring it to a new instance"""
|
||||
new_stack = type(self)()
|
||||
new_stack._exit_callbacks = self._exit_callbacks
|
||||
self._exit_callbacks = deque()
|
||||
return new_stack
|
||||
|
||||
def _push_cm_exit(self, cm, cm_exit):
|
||||
"""Helper to correctly register callbacks to __exit__ methods"""
|
||||
|
||||
def _exit_wrapper(*exc_details):
|
||||
return cm_exit(cm, *exc_details)
|
||||
|
||||
_exit_wrapper.__self__ = cm
|
||||
self.push(_exit_wrapper)
|
||||
|
||||
def push(self, exit):
|
||||
"""Registers a callback with the standard __exit__ method signature.
|
||||
|
||||
Can suppress exceptions the same way __exit__ methods can.
|
||||
|
||||
Also accepts any object with an __exit__ method (registering a call
|
||||
to the method instead of the object itself)
|
||||
|
||||
"""
|
||||
# We use an unbound method rather than a bound method to follow
|
||||
# the standard lookup behaviour for special methods
|
||||
_cb_type = type(exit)
|
||||
try:
|
||||
exit_method = _cb_type.__exit__
|
||||
except AttributeError:
|
||||
# Not a context manager, so assume its a callable
|
||||
self._exit_callbacks.append(exit)
|
||||
else:
|
||||
self._push_cm_exit(exit, exit_method)
|
||||
return exit # Allow use as a decorator
|
||||
|
||||
def callback(self, callback, *args, **kwds):
|
||||
"""Registers an arbitrary callback and arguments.
|
||||
|
||||
Cannot suppress exceptions.
|
||||
"""
|
||||
|
||||
def _exit_wrapper(exc_type, exc, tb):
|
||||
callback(*args, **kwds)
|
||||
|
||||
# We changed the signature, so using @wraps is not appropriate, but
|
||||
# setting __wrapped__ may still help with introspection
|
||||
_exit_wrapper.__wrapped__ = callback
|
||||
self.push(_exit_wrapper)
|
||||
return callback # Allow use as a decorator
|
||||
|
||||
def enter_context(self, cm):
|
||||
"""Enters the supplied context manager
|
||||
|
||||
If successful, also pushes its __exit__ method as a callback and
|
||||
returns the result of the __enter__ method.
|
||||
"""
|
||||
# We look up the special methods on the type to match the with
|
||||
# statement
|
||||
_cm_type = type(cm)
|
||||
_exit = _cm_type.__exit__
|
||||
result = _cm_type.__enter__(cm)
|
||||
self._push_cm_exit(cm, _exit)
|
||||
return result
|
||||
|
||||
def close(self):
|
||||
"""Immediately unwind the context stack"""
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
# We manipulate the exception state so it behaves as though
|
||||
# we were actually nesting multiple with statements
|
||||
frame_exc = sys.exc_info()[1]
|
||||
|
||||
def _fix_exception_context(new_exc, old_exc):
|
||||
while 1:
|
||||
exc_context = new_exc.__context__
|
||||
if exc_context in (None, frame_exc):
|
||||
break
|
||||
new_exc = exc_context
|
||||
new_exc.__context__ = old_exc
|
||||
|
||||
# Callbacks are invoked in LIFO order to match the behaviour of
|
||||
# nested context managers
|
||||
suppressed_exc = False
|
||||
while self._exit_callbacks:
|
||||
cb = self._exit_callbacks.pop()
|
||||
try:
|
||||
if cb(*exc_details):
|
||||
suppressed_exc = True
|
||||
exc_details = (None, None, None)
|
||||
except Exception:
|
||||
new_exc_details = sys.exc_info()
|
||||
# simulate the stack of exceptions by setting the context
|
||||
_fix_exception_context(new_exc_details[1], exc_details[1])
|
||||
if not self._exit_callbacks:
|
||||
raise
|
||||
exc_details = new_exc_details
|
||||
return suppressed_exc
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def parm_values(overrides):
|
||||
"""Override Parameter values during the context."""
|
||||
|
||||
originals = []
|
||||
try:
|
||||
for parm, value in overrides:
|
||||
originals.append((parm, parm.eval()))
|
||||
parm.set(value)
|
||||
yield
|
||||
finally:
|
||||
for parm, value in originals:
|
||||
# Parameter might not exist anymore so first
|
||||
# check whether it's still valid
|
||||
if hou.parm(parm.path()):
|
||||
parm.set(value)
|
||||
|
||||
|
||||
class ExtractUSDLayered(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Layered USD"
|
||||
families = ["usdLayered", "usdShade"]
|
||||
|
||||
# Force Output Processors so it will always save any file
|
||||
# into our unique staging directory with processed Avalon paths
|
||||
output_processors = ["avalon_uri_processor", "stagingdir_processor"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Extracting: %s" % instance)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
fname = instance.data.get("usdFilename")
|
||||
|
||||
# The individual rop nodes are collected as "publishDependencies"
|
||||
dependencies = instance.data["publishDependencies"]
|
||||
ropnodes = [dependency[0] for dependency in dependencies]
|
||||
assert all(
|
||||
node.type().name() in {"usd", "usd_rop"} for node in ropnodes
|
||||
)
|
||||
|
||||
# Main ROP node, either a USD Rop or ROP network with
|
||||
# multiple USD ROPs
|
||||
node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Collect any output dependencies that have not been processed yet
|
||||
# during extraction of other instances
|
||||
outputs = [fname]
|
||||
active_dependencies = [
|
||||
dep
|
||||
for dep in dependencies
|
||||
if dep.data.get("publish", True)
|
||||
and not dep.data.get("_isExtracted", False)
|
||||
]
|
||||
for dependency in active_dependencies:
|
||||
outputs.append(dependency.data["usdFilename"])
|
||||
|
||||
pattern = r"*[/\]{0} {0}"
|
||||
save_pattern = " ".join(pattern.format(fname) for fname in outputs)
|
||||
|
||||
# Run a stack of context managers before we start the render to
|
||||
# temporarily adjust USD ROP settings for our publish output.
|
||||
rop_overrides = {
|
||||
# This sets staging directory on the processor to force our
|
||||
# output files to end up in the Staging Directory.
|
||||
"stagingdiroutputprocessor_stagingDir": staging_dir,
|
||||
# Force the Avalon URI Output Processor to refactor paths for
|
||||
# references, payloads and layers to published paths.
|
||||
"avalonurioutputprocessor_use_publish_paths": True,
|
||||
# Only write out specific USD files based on our outputs
|
||||
"savepattern": save_pattern,
|
||||
}
|
||||
overrides = list()
|
||||
with ExitStack() as stack:
|
||||
|
||||
for ropnode in ropnodes:
|
||||
manager = hou_usdlib.outputprocessors(
|
||||
ropnode,
|
||||
processors=self.output_processors,
|
||||
disable_all_others=True,
|
||||
)
|
||||
stack.enter_context(manager)
|
||||
|
||||
# Some of these must be added after we enter the output
|
||||
# processor context manager because those parameters only
|
||||
# exist when the Output Processor is added to the ROP node.
|
||||
for name, value in rop_overrides.items():
|
||||
parm = ropnode.parm(name)
|
||||
assert parm, "Parm not found: %s.%s" % (
|
||||
ropnode.path(),
|
||||
name,
|
||||
)
|
||||
overrides.append((parm, value))
|
||||
|
||||
stack.enter_context(parm_values(overrides))
|
||||
|
||||
# Render the single ROP node or the full ROP network
|
||||
render_rop(node)
|
||||
|
||||
# Assert all output files in the Staging Directory
|
||||
for output_fname in outputs:
|
||||
path = os.path.join(staging_dir, output_fname)
|
||||
assert os.path.exists(path), "Output file must exist: %s" % path
|
||||
|
||||
# Set up the dependency for publish if they have new content
|
||||
# compared to previous publishes
|
||||
project_name = instance.context.data["projectName"]
|
||||
for dependency in active_dependencies:
|
||||
dependency_fname = dependency.data["usdFilename"]
|
||||
|
||||
filepath = os.path.join(staging_dir, dependency_fname)
|
||||
similar = self._compare_with_latest_publish(
|
||||
project_name, dependency, filepath
|
||||
)
|
||||
if similar:
|
||||
# Deactivate this dependency
|
||||
self.log.debug(
|
||||
"Dependency matches previous publish version,"
|
||||
" deactivating %s for publish" % dependency
|
||||
)
|
||||
dependency.data["publish"] = False
|
||||
else:
|
||||
self.log.debug("Extracted dependency: %s" % dependency)
|
||||
# This dependency should be published
|
||||
dependency.data["files"] = [dependency_fname]
|
||||
dependency.data["stagingDir"] = staging_dir
|
||||
dependency.data["_isExtracted"] = True
|
||||
|
||||
# Store the created files on the instance
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
instance.data["files"].append(fname)
|
||||
|
||||
def _compare_with_latest_publish(self, project_name, dependency, new_file):
|
||||
import filecmp
|
||||
|
||||
_, ext = os.path.splitext(new_file)
|
||||
|
||||
# Compare this dependency with the latest published version
|
||||
# to detect whether we should make this into a new publish
|
||||
# version. If not, skip it.
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, dependency.data["folderPath"], fields={"id"}
|
||||
)
|
||||
product_entity = ayon_api.get_product_by_name(
|
||||
project_name,
|
||||
dependency.data["productName"],
|
||||
folder_entity["id"],
|
||||
fields={"id"}
|
||||
)
|
||||
if not product_entity:
|
||||
# Subset doesn't exist yet. Definitely new file
|
||||
self.log.debug("No existing product..")
|
||||
return False
|
||||
|
||||
version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, product_entity["id"], fields={"id"}
|
||||
)
|
||||
if not version_entity:
|
||||
self.log.debug("No existing version..")
|
||||
return False
|
||||
|
||||
representation = ayon_api.get_representation_by_name(
|
||||
project_name, ext.lstrip("."), version_entity["id"]
|
||||
)
|
||||
if not representation:
|
||||
self.log.debug("No existing representation..")
|
||||
return False
|
||||
|
||||
old_file = get_representation_path(representation)
|
||||
if not os.path.exists(old_file):
|
||||
return False
|
||||
|
||||
return filecmp.cmp(old_file, new_file)
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
|
||||
from ayon_core.pipeline.publish import get_instance_staging_dir
|
||||
|
||||
return get_instance_staging_dir(instance)
|
||||
|
|
@ -22,9 +22,12 @@ class ValidateBypassed(plugin.HoudiniInstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
if len(instance) == 0:
|
||||
# Ignore instances without any nodes
|
||||
if not instance.data.get("instance_node"):
|
||||
# Ignore instances without an instance node
|
||||
# e.g. in memory bootstrap instances
|
||||
self.log.debug(
|
||||
"Skipping instance without instance node: {}".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator for checking that export is a single frame."""
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder
|
||||
from ayon_houdini.api.action import SelectInvalidAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateSingleFrame(pyblish.api.InstancePlugin,
|
||||
class ValidateSingleFrame(plugin.HoudiniInstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate Export is a Single Frame.
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ValidateHoudiniNotApprenticeLicense(plugin.HoudiniInstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd", "abc", "fbx", "camera"]
|
||||
families = ["usdrop", "abc", "fbx", "camera"]
|
||||
label = "Houdini Apprentice License"
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -30,6 +30,15 @@ class ValidateInstanceInContextHoudini(plugin.HoudiniInstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
if not attr_values and not instance.data.get("instance_node"):
|
||||
# Skip instances that do not have the attr values because that
|
||||
# hints these are runtime-instances, like e.g. USD layer
|
||||
# contributions. We will confirm that by checking these do not
|
||||
# have an instance node. We do not need to check these because they
|
||||
# 'spawn off' from an original instance that has the check itself.
|
||||
return
|
||||
|
||||
folder_path = instance.data.get("folderPath")
|
||||
task = instance.data.get("task")
|
||||
context = self.get_context(instance)
|
||||
|
|
|
|||
|
|
@ -37,6 +37,13 @@ class ValidateNoErrors(plugin.HoudiniInstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
if not instance.data.get("instance_node"):
|
||||
self.log.debug(
|
||||
"Skipping 'Validate no errors' because instance "
|
||||
"has no instance node: {}".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
validate_nodes = []
|
||||
|
||||
if len(instance) > 0:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,56 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUsdRenderProducts(plugin.HoudiniInstancePlugin):
|
||||
"""Validate at least one render product is present"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Render Products"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### No Render Products
|
||||
|
||||
The render submission specified no Render Product outputs and
|
||||
as such would not generate any rendered files.
|
||||
|
||||
This is usually the case if no Render Settings or Render
|
||||
Products were created.
|
||||
|
||||
Make sure to create the Render Settings
|
||||
relevant to the renderer you want to use.
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if not instance.data.get("output_node"):
|
||||
self.log.warning("No valid LOP node to render found.")
|
||||
return
|
||||
|
||||
if not instance.data.get("files", []):
|
||||
node_path = instance.data["instance_node"]
|
||||
node = hou.node(node_path)
|
||||
rendersettings_path = (
|
||||
node.evalParm("rendersettings") or "/Render/rendersettings"
|
||||
)
|
||||
raise PublishValidationError(
|
||||
message=(
|
||||
"No Render Products found in Render Settings "
|
||||
"for '{}' at '{}'".format(node_path, rendersettings_path)
|
||||
),
|
||||
description=self.get_description(),
|
||||
title=self.label
|
||||
)
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
import inspect
|
||||
|
||||
import hou
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
from ayon_core.pipeline.publish import RepairAction, OptionalPyblishPluginMixin
|
||||
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUSDAssetContributionDefaultPrim(plugin.HoudiniInstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate the default prim is set when USD contribution is set to asset.
|
||||
|
||||
If the USD asset contributions is enabled and the user has it set to
|
||||
initialize asset as "asset" then most likely they are looking to publish
|
||||
into an asset structure - which should have a default prim that matches
|
||||
the folder's name. To ensure that's the case we force require the
|
||||
value to be set on the ROP node.
|
||||
|
||||
Note that another validator "Validate USD Rop Default Prim" enforces the
|
||||
primitive actually exists (or has modifications) if the ROP specifies
|
||||
a default prim - so that does not have to be validated with this validator.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrop"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Asset Contribution Default Prim"
|
||||
actions = [SelectROPAction, RepairAction]
|
||||
|
||||
# TODO: Unfortunately currently this does not show as optional toggle
|
||||
# because the product type is `usd` and not `usdrop` - however we do
|
||||
# not want to run this for ALL `usd` product types?
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
# Check if instance is set to be an asset contribution
|
||||
settings = self.get_attr_values_from_data_for_plugin_name(
|
||||
"CollectUSDLayerContributions", instance.data
|
||||
)
|
||||
if (
|
||||
not settings.get("contribution_enabled", False)
|
||||
or settings.get("contribution_target_product_init") != "asset"
|
||||
):
|
||||
return
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
default_prim = rop_node.evalParm("defaultprim")
|
||||
if not default_prim:
|
||||
raise PublishValidationError(
|
||||
f"No default prim specified on ROP node: {rop_node.path()}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
folder_name = instance.data["folderPath"].rsplit("/", 1)[-1]
|
||||
if not default_prim.lstrip("/") == folder_name:
|
||||
raise PublishValidationError(
|
||||
f"Default prim specified on ROP node does not match the "
|
||||
f"asset's folder name: '{default_prim}' "
|
||||
f"(should be: '/{folder_name}')",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
rop_node.parm("defaultprim").set(
|
||||
"/`strsplit(chs(\"folderPath\"), \"/\", -1)`"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_attr_values_from_data_for_plugin_name(
|
||||
plugin_name: str, data: dict) -> dict:
|
||||
return (
|
||||
data
|
||||
.get("publish_attributes", {})
|
||||
.get(plugin_name, {})
|
||||
)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Default primitive not set to current asset
|
||||
|
||||
The USD instance has **USD Contribution** enabled and is set to
|
||||
initialize as **asset**. The asset requires a default root
|
||||
primitive with the name of the folder it's related to.
|
||||
|
||||
For example, you're working in `/asset/char_hero` then the
|
||||
folder's name is `char_hero`. For the asset hence all prims should
|
||||
live under `/char_hero` root primitive.
|
||||
|
||||
This validation solely ensures the **default primitive** on the ROP
|
||||
node is set to match the folder name.
|
||||
"""
|
||||
)
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
import ayon_houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUSDLayerPathBackslashes(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD loaded paths have no backslashes.
|
||||
|
||||
This is a crucial validation for HUSK USD rendering as Houdini's
|
||||
USD Render ROP will fail to write out a .usd file for rendering that
|
||||
correctly preserves the backslashes, e.g. it will incorrectly convert a
|
||||
'\t' to a TAB character disallowing HUSK to find those specific files.
|
||||
|
||||
This validation is redundant for usdModel since that flattens the model
|
||||
before write. As such it will never have any used layers with a path.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress", "usdShade", "usd", "usdrender"]
|
||||
label = "USD Layer path backslashes"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for layer in stage.GetUsedLayers():
|
||||
references = layer.externalReferences
|
||||
|
||||
for ref in references:
|
||||
|
||||
# Ignore anonymous layers
|
||||
if ref.startswith("anon:"):
|
||||
continue
|
||||
|
||||
# If any backslashes in the path consider it invalid
|
||||
if "\\" in ref:
|
||||
self.log.error("Found invalid path: %s" % ref)
|
||||
invalid.append(layer)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError((
|
||||
"Loaded layers have backslashes. "
|
||||
"This is invalid for HUSK USD rendering."),
|
||||
title=self.label)
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
import hou
|
||||
from pxr import Usd, UsdShade, UsdGeom
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
def has_material(prim: Usd.Prim,
|
||||
include_subsets: bool=True,
|
||||
purpose=UsdShade.Tokens.allPurpose) -> bool:
|
||||
"""Return whether primitive has any material binding."""
|
||||
search_from = [prim]
|
||||
if include_subsets:
|
||||
subsets = UsdShade.MaterialBindingAPI(prim).GetMaterialBindSubsets()
|
||||
for subset in subsets:
|
||||
search_from.append(subset.GetPrim())
|
||||
|
||||
bounds = UsdShade.MaterialBindingAPI.ComputeBoundMaterials(search_from,
|
||||
purpose)
|
||||
for (material, relationship) in zip(*bounds):
|
||||
material_prim = material.GetPrim()
|
||||
if material_prim.IsValid():
|
||||
# Has a material binding
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class ValidateUsdLookAssignments(plugin.HoudiniInstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate all geometry prims have a material binding.
|
||||
|
||||
Note: This does not necessarily validate the material binding is authored
|
||||
by the current layers if the input already had material bindings.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["look"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate All Geometry Has Material Assignment"
|
||||
actions = [SelectROPAction]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
lop_node: hou.LopNode = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
return
|
||||
|
||||
# We iterate the composed stage for code simplicity; however this
|
||||
# means that it does not validate across e.g. multiple model variants
|
||||
# but only checks against the current composed stage. Likely this is
|
||||
# also what you actually want to validate, because your look might not
|
||||
# apply to *all* model variants.
|
||||
stage = lop_node.stage()
|
||||
invalid = []
|
||||
for prim in stage.Traverse():
|
||||
if not prim.IsA(UsdGeom.Gprim):
|
||||
continue
|
||||
|
||||
if not has_material(prim):
|
||||
invalid.append(prim.GetPath())
|
||||
|
||||
for path in sorted(invalid):
|
||||
self.log.warning("No material binding on: %s", path.pathString)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Found geometry without material bindings.",
|
||||
title="No assigned materials",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc(
|
||||
"""### Geometry has no material assignments.
|
||||
|
||||
A look publish should usually define a material assignment for all
|
||||
geometry of a model. As such, this validates whether all geometry
|
||||
currently has at least one material binding applied.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
from typing import List, Union
|
||||
from functools import partial
|
||||
|
||||
import hou
|
||||
from pxr import Sdf
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import PublishValidationError
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api.usd import get_schema_type_names
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
def get_applied_items(list_proxy) -> List[Union[Sdf.Reference, Sdf.Payload]]:
|
||||
"""Backwards compatible equivalent of `GetAppliedItems()`"""
|
||||
return list_proxy.ApplyEditsToList([])
|
||||
|
||||
|
||||
class ValidateUsdLookContents(plugin.HoudiniInstancePlugin):
|
||||
"""Validate no meshes are defined in the look.
|
||||
|
||||
Usually, a published look should not contain generated meshes in the output
|
||||
but only the materials, material bindings and render geometry settings.
|
||||
|
||||
To avoid accidentally including a Mesh definition we ensure none of the
|
||||
generated output layers for the instance is defining any Mesh type.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["look"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Look No Meshes/Lights"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
disallowed_types = [
|
||||
"UsdGeomBoundable", # Meshes/Lights/Procedurals
|
||||
"UsdRenderSettingsBase", # Render Settings
|
||||
"UsdRenderVar", # Render Var
|
||||
"UsdGeomCamera" # Cameras
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
lop_node: hou.LopNode = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
return
|
||||
|
||||
# Get layers below layer break
|
||||
above_break_layers = set(layer for layer in lop_node.layersAboveLayerBreak())
|
||||
stage = lop_node.stage()
|
||||
layers = [
|
||||
layer for layer
|
||||
in stage.GetLayerStack(includeSessionLayers=False)
|
||||
if layer.identifier not in above_break_layers
|
||||
]
|
||||
if not layers:
|
||||
return
|
||||
|
||||
# The Sdf.PrimSpec type name will not have knowledge about inherited
|
||||
# types for the type, name. So we pre-collect all invalid types
|
||||
# and their child types to ensure we match inherited types as well.
|
||||
disallowed_type_names = set()
|
||||
for type_name in self.disallowed_types:
|
||||
disallowed_type_names.update(get_schema_type_names(type_name))
|
||||
|
||||
# Find invalid prims
|
||||
invalid = []
|
||||
|
||||
def collect_invalid(layer: Sdf.Layer, path: Sdf.Path):
|
||||
"""Collect invalid paths into the `invalid` list"""
|
||||
if not path.IsPrimPath():
|
||||
return
|
||||
|
||||
prim = layer.GetPrimAtPath(path)
|
||||
if prim.typeName in disallowed_type_names:
|
||||
self.log.warning(
|
||||
"Disallowed prim type '%s' at %s",
|
||||
prim.typeName, prim.path.pathString
|
||||
)
|
||||
invalid.append(path)
|
||||
return
|
||||
|
||||
# TODO: We should allow referencing or payloads, but if so - we
|
||||
# should still check whether the loaded reference or payload
|
||||
# introduces any geometry. If so, disallow it because that
|
||||
# opinion would 'define' geometry in the output
|
||||
references= get_applied_items(prim.referenceList)
|
||||
if references:
|
||||
self.log.warning(
|
||||
"Disallowed references are added at %s: %s",
|
||||
prim.path.pathString,
|
||||
", ".join(ref.assetPath for ref in references)
|
||||
)
|
||||
invalid.append(path)
|
||||
|
||||
payloads = get_applied_items(prim.payloadList)
|
||||
if payloads:
|
||||
self.log.warning(
|
||||
"Disallowed payloads are added at %s: %s",
|
||||
prim.path.pathString,
|
||||
", ".join(payload.assetPath for payload in payloads)
|
||||
)
|
||||
invalid.append(path)
|
||||
|
||||
for layer in layers:
|
||||
layer.Traverse("/", partial(collect_invalid, layer))
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Invalid look members found.",
|
||||
title="Look Invalid Members",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc(
|
||||
"""### Look contains invalid members
|
||||
|
||||
A look publish should usually only contain materials, material
|
||||
bindings and render geometry settings.
|
||||
|
||||
This validation invalidates any creation of:
|
||||
- Render Settings,
|
||||
- Lights,
|
||||
- Cameras,
|
||||
- Geometry (Meshes, Curves and other geometry types)
|
||||
|
||||
To avoid writing out loaded geometry into the output make sure to
|
||||
add a Layer Break after loading all the content you do **not** want
|
||||
to save into the output file. Then your materials, material
|
||||
bindings and render geometry settings are overrides applied to the
|
||||
loaded content after the **Layer Break LOP** node.
|
||||
|
||||
If you happen to write out additional data for the meshes via
|
||||
e.g. a SOP Modify make sure to import to LOPs only the relevant
|
||||
attributes, mark them as static attributes, static topology and
|
||||
set the Primitive Definitions to be Overlay instead of Defines.
|
||||
|
||||
Currently, to avoid issues with referencing/payloading geometry
|
||||
from external files any references or payloads are also disallowed
|
||||
for looks.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
import hou
|
||||
from pxr import Sdf, UsdShade
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api.usd import get_schema_type_names
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateLookShaderDefs(plugin.HoudiniInstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate Material primitives are defined types instead of overs"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["look"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Look Shaders Are Defined"
|
||||
actions = [SelectROPAction]
|
||||
optional = True
|
||||
|
||||
# Types to validate at the low-level Sdf API
|
||||
# For Usd API we validate directly against `UsdShade.Material`
|
||||
validate_types = [
|
||||
"UsdShadeMaterial"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
lop_node: hou.LopNode = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
return
|
||||
|
||||
# Get layers below layer break
|
||||
above_break_layers = set(
|
||||
layer for layer in lop_node.layersAboveLayerBreak())
|
||||
stage = lop_node.stage()
|
||||
layers = [
|
||||
layer for layer
|
||||
in stage.GetLayerStack(includeSessionLayers=False)
|
||||
if layer.identifier not in above_break_layers
|
||||
]
|
||||
if not layers:
|
||||
return
|
||||
|
||||
# The Sdf.PrimSpec type name will not have knowledge about inherited
|
||||
# types for the type, name. So we pre-collect all invalid types
|
||||
# and their child types to ensure we match inherited types as well.
|
||||
validate_type_names = set()
|
||||
for type_name in self.validate_types:
|
||||
validate_type_names.update(get_schema_type_names(type_name))
|
||||
|
||||
invalid = []
|
||||
for layer in layers:
|
||||
def log_overs(path: Sdf.Path):
|
||||
if not path.IsPrimPath():
|
||||
return
|
||||
prim_spec = layer.GetPrimAtPath(path)
|
||||
|
||||
if not prim_spec.typeName:
|
||||
# Typeless may mean Houdini generated the material or
|
||||
# shader as override because upstream the nodes already
|
||||
# existed. So we check the stage instead to identify
|
||||
# the composed type of the prim
|
||||
prim = stage.GetPrimAtPath(path)
|
||||
if not prim:
|
||||
return
|
||||
|
||||
if not prim.IsA(UsdShade.Material):
|
||||
return
|
||||
|
||||
self.log.debug("Material Prim has no type defined: %s",
|
||||
path)
|
||||
|
||||
elif prim_spec.typeName not in validate_type_names:
|
||||
return
|
||||
|
||||
if prim_spec.specifier != Sdf.SpecifierDef:
|
||||
specifier = {
|
||||
Sdf.SpecifierDef: "Def",
|
||||
Sdf.SpecifierOver: "Over",
|
||||
Sdf.SpecifierClass: "Class"
|
||||
}[prim_spec.specifier]
|
||||
|
||||
self.log.warning(
|
||||
"Material is not defined but specified as "
|
||||
"'%s': %s", specifier, path
|
||||
)
|
||||
invalid.append(path)
|
||||
|
||||
layer.Traverse("/", log_overs)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Found Materials not specifying an authored definition.",
|
||||
title="Materials not defined",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc(
|
||||
"""### Materials are not defined types
|
||||
|
||||
There are materials in your current look that do not **define** the
|
||||
material primitives, but rather **override** or specify a
|
||||
**class**. This is most likely not what you want since you want
|
||||
most looks to define new materials instead of overriding existing
|
||||
materials.
|
||||
|
||||
Usually this happens if your current scene loads an input asset
|
||||
that already has the materials you're creating in your current
|
||||
scene as well. For example, if you are loading the Asset that
|
||||
contains the previously publish of your look without muting the
|
||||
look layer. As such, Houdini sees the materials already exist and
|
||||
will not make new definitions, but only write "override changes".
|
||||
However, once your look publish would replace the previous one then
|
||||
suddenly the materials would be missing and only specified as
|
||||
overrides.
|
||||
|
||||
So, in most cases this is solved by Layer Muting upstream the
|
||||
look layers of the loaded asset.
|
||||
|
||||
If for a specific case the materials already existing in the input
|
||||
is correct then you can either specify new material names for what
|
||||
you're creating in the current scene or disable this validation
|
||||
if you are sure you want to write overrides in your look publish
|
||||
instead of definitions.
|
||||
"""
|
||||
)
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import hou
|
||||
from pxr import UsdShade, UsdRender, UsdLux
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
import ayon_houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
def fullname(o):
|
||||
"""Get fully qualified class name"""
|
||||
module = o.__module__
|
||||
if module is None or module == str.__module__:
|
||||
return o.__name__
|
||||
return module + "." + o.__name__
|
||||
|
||||
|
||||
class ValidateUsdModel(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD Model.
|
||||
|
||||
Disallow Shaders, Render settings, products and vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdModel"]
|
||||
label = "Validate USD Model"
|
||||
optional = True
|
||||
|
||||
disallowed = [
|
||||
UsdShade.Shader,
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for prim in stage.Traverse():
|
||||
|
||||
for klass in self.disallowed:
|
||||
if klass(prim):
|
||||
# Get full class name without pxr. prefix
|
||||
name = fullname(klass).split("pxr.", 1)[-1]
|
||||
path = str(prim.GetPath())
|
||||
self.log.warning("Disallowed %s: %s" % (name, path))
|
||||
|
||||
invalid.append(prim)
|
||||
|
||||
if invalid:
|
||||
prim_paths = sorted([str(prim.GetPath()) for prim in invalid])
|
||||
raise PublishValidationError(
|
||||
"Found invalid primitives: {}".format(prim_paths))
|
||||
|
||||
|
||||
class ValidateUsdShade(ValidateUsdModel):
|
||||
"""Validate usdShade.
|
||||
|
||||
Disallow Render settings, products, vars and Lux lights.
|
||||
|
||||
"""
|
||||
|
||||
families = ["usdShade"]
|
||||
label = "Validate USD Shade"
|
||||
|
||||
disallowed = [
|
||||
UsdRender.Settings,
|
||||
UsdRender.Product,
|
||||
UsdRender.Var,
|
||||
UsdLux.Light,
|
||||
]
|
||||
|
|
@ -1,8 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
|
|
@ -16,18 +18,23 @@ class ValidateUSDOutputNode(plugin.HoudiniInstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usd"]
|
||||
# Validate early so that this error reports higher than others to the user
|
||||
# so that if another invalidation is due to the output node being invalid
|
||||
# the user will likely first focus on this first issue
|
||||
order = pyblish.api.ValidatorOrder - 0.4
|
||||
families = ["usdrop"]
|
||||
label = "Validate Output Node (USD)"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
path = invalid[0]
|
||||
raise PublishValidationError(
|
||||
("Output node(s) `{}` are incorrect. "
|
||||
"See plug-in log for details.").format(invalid),
|
||||
title=self.label
|
||||
"Output node '{}' has no valid LOP path set.".format(path),
|
||||
title=self.label,
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -35,12 +42,12 @@ class ValidateUSDOutputNode(plugin.HoudiniInstancePlugin):
|
|||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
output_node = instance.data.get("output_node")
|
||||
|
||||
if output_node is None:
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
cls.log.error(
|
||||
"USD node '%s' LOP path does not exist. "
|
||||
"USD node '%s' configured LOP path does not exist. "
|
||||
"Ensure a valid LOP path is set." % node.path()
|
||||
)
|
||||
|
||||
|
|
@ -55,3 +62,13 @@ class ValidateUSDOutputNode(plugin.HoudiniInstancePlugin):
|
|||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### USD ROP has invalid LOP path
|
||||
|
||||
The USD ROP node has no or an invalid LOP path set to be exported.
|
||||
Make sure to correctly configure what you want to export for the
|
||||
publish.
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,311 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
import hou
|
||||
import pxr
|
||||
from pxr import UsdRender
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import PublishValidationError, RepairAction
|
||||
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api.usd import get_usd_render_rop_rendersettings
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUSDRenderSingleFile(plugin.HoudiniInstancePlugin):
|
||||
"""Validate the writing of a single USD Render Output file.
|
||||
|
||||
When writing to single file with USD Render ROP make sure to write the
|
||||
output USD file from a single process to avoid overwriting it with
|
||||
different processes.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render ROP Settings"
|
||||
actions = [SelectROPAction, RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if instance.data.get("creator_attributes",
|
||||
{}).get("render_target") != "farm_split":
|
||||
# Validation is only relevant when submitting a farm job where the
|
||||
# export and render are separate jobs.
|
||||
return
|
||||
|
||||
# Get configured settings for this instance
|
||||
submission_data = (
|
||||
instance.data
|
||||
.get("publish_attributes", {})
|
||||
.get("HoudiniSubmitDeadlineUsdRender", {})
|
||||
)
|
||||
render_chunk_size = submission_data.get("chunk", 1)
|
||||
export_chunk_size = submission_data.get("export_chunk", 1)
|
||||
usd_file_per_frame = "$F" in instance.data["ifdFile"]
|
||||
frame_start_handle = instance.data["frameStartHandle"]
|
||||
frame_end_handle = instance.data["frameEndHandle"]
|
||||
num_frames = frame_end_handle - frame_start_handle + 1
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# Whether ROP node is set to render all Frames within a single process
|
||||
# When this is disabled then Husk will restart completely per frame
|
||||
# no matter the chunk size.
|
||||
all_frames_at_once = rop_node.evalParm("allframesatonce")
|
||||
|
||||
invalid = False
|
||||
if usd_file_per_frame:
|
||||
# USD file per frame
|
||||
# If rendering multiple frames per task and USD file has $F then
|
||||
# log a warning that the optimization will be less efficient
|
||||
# since husk will still restart per frame.
|
||||
if render_chunk_size > 1:
|
||||
self.log.debug(
|
||||
"Render chunk size is bigger than one but export file is "
|
||||
"a USD file per frame. Husk does not allow rendering "
|
||||
"separate USD files in one process. As such, Husk will "
|
||||
"restart per frame even within the chunk to render the "
|
||||
"correct file per frame."
|
||||
)
|
||||
else:
|
||||
# Single export USD file
|
||||
# Export chunk size must be higher than the amount of frames to
|
||||
# ensure the file is written in one go on one machine and thus
|
||||
# ends up containing all frames correctly
|
||||
if export_chunk_size < num_frames:
|
||||
self.log.error(
|
||||
"The export chunk size %s is smaller than the amount of "
|
||||
"frames %s, so multiple tasks will try to export to "
|
||||
"the same file. Make sure to increase chunk "
|
||||
"size to higher than the amount of frames to render, "
|
||||
"more than >%s",
|
||||
export_chunk_size, num_frames, num_frames
|
||||
)
|
||||
invalid = True
|
||||
|
||||
if not all_frames_at_once:
|
||||
self.log.error(
|
||||
"Please enable 'Render All Frames With A Single Process' "
|
||||
"on the USD Render ROP node or add $F to the USD filename",
|
||||
)
|
||||
invalid = True
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Render USD file being overwritten during export.",
|
||||
title="Render USD file overwritten",
|
||||
description=self.get_description())
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
# Enable all frames at once and make the frames per task
|
||||
# very large
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
rop_node.parm("allframesatonce").set(True)
|
||||
|
||||
# Override instance setting for export chunk size
|
||||
create_context = instance.context.data["create_context"]
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance.data["instance_id"]
|
||||
)
|
||||
created_instance.publish_attributes["HoudiniSubmitDeadlineUsdRender"]["export_chunk"] = 1000 # noqa
|
||||
create_context.save_changes()
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Render USD file configured incorrectly
|
||||
|
||||
The USD render ROP is currently configured to write a single
|
||||
USD file to render instead of a file per frame.
|
||||
|
||||
When that is the case, a single machine must produce that file in
|
||||
one process to avoid the file being overwritten by the other
|
||||
processes.
|
||||
|
||||
We resolve that by enabling _Render All Frames With A Single
|
||||
Process_ on the ROP node and ensure the export job task size
|
||||
is larger than the amount of frames of the sequence, so the file
|
||||
gets written in one go.
|
||||
|
||||
Run **Repair** to resolve this for you.
|
||||
|
||||
If instead you want to write separate render USD files, please
|
||||
include $F in the USD output filename on the `ROP node > Output >
|
||||
USD Export > Output File`
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class ValidateUSDRenderArnoldSettings(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD Render Product names are correctly set absolute paths."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render Arnold Settings"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
node = instance.data.get("output_node")
|
||||
if not node:
|
||||
# No valid output node was set. We ignore it since it will
|
||||
# be validated by another plug-in.
|
||||
return
|
||||
|
||||
# Check only for Arnold renderer
|
||||
renderer = rop_node.evalParm("renderer")
|
||||
if renderer != "HdArnoldRendererPlugin":
|
||||
self.log.debug("Skipping Arnold Settings validation because "
|
||||
"renderer is set to: %s", renderer)
|
||||
return
|
||||
|
||||
# Validate Arnold Product Type is enabled on the Arnold Render Settings
|
||||
# This is confirmed by the `includeAovs` attribute on the RenderProduct
|
||||
stage: pxr.Usd.Stage = node.stage()
|
||||
invalid = False
|
||||
for prim_path in instance.data.get("usdRenderProducts", []):
|
||||
prim = stage.GetPrimAtPath(prim_path)
|
||||
include_aovs = prim.GetAttribute("includeAovs")
|
||||
if not include_aovs.IsValid() or not include_aovs.Get(0):
|
||||
self.log.error(
|
||||
"All Render Products must be set to 'Arnold Product "
|
||||
"Type' on the Arnold Render Settings node to ensure "
|
||||
"correct output of metadata and AOVs."
|
||||
)
|
||||
invalid = True
|
||||
break
|
||||
|
||||
# Ensure 'Delegate Products' is enabled for Husk
|
||||
if not rop_node.evalParm("husk_delegateprod"):
|
||||
invalid = True
|
||||
self.log.error("USD Render ROP has `Husk > Rendering > Delegate "
|
||||
"Products` disabled. Please enable to ensure "
|
||||
"correct output files")
|
||||
|
||||
# TODO: Detect bug of invalid Cryptomatte state?
|
||||
# Detect if any Render Products were set that do not actually exist
|
||||
# (e.g. invalid rendervar targets for a renderproduct) because that
|
||||
# is what originated the Cryptomatte enable->disable bug.
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Invalid Render Settings for Arnold render."
|
||||
)
|
||||
|
||||
|
||||
class ValidateUSDRenderCamera(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD Render Settings refer to a valid render camera.
|
||||
|
||||
The render camera is defined in priority by this order:
|
||||
1. ROP Node Override Camera Parm (if set)
|
||||
2. Render Product Camera (if set - this may differ PER render product!)
|
||||
3. Render Settings Camera (if set)
|
||||
|
||||
If None of these are set *or* a currently set entry resolves to an invalid
|
||||
camera prim path then we'll report it as an error.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render Camera"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
lop_node = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
# No valid output node was set. We ignore it since it will
|
||||
# be validated by another plug-in.
|
||||
return
|
||||
|
||||
stage = lop_node.stage()
|
||||
|
||||
render_settings = get_usd_render_rop_rendersettings(rop_node, stage,
|
||||
logger=self.log)
|
||||
if not render_settings:
|
||||
# Without render settings we basically have no defined
|
||||
self.log.error("No render settings found for %s.", rop_node.path())
|
||||
return
|
||||
|
||||
render_settings_camera = self._get_camera(render_settings)
|
||||
rop_camera = rop_node.evalParm("override_camera")
|
||||
|
||||
invalid = False
|
||||
camera_paths = set()
|
||||
for render_product in self.iter_render_products(render_settings,
|
||||
stage):
|
||||
render_product_camera = self._get_camera(render_product)
|
||||
|
||||
# Get first camera path as per order in in this plug-in docstring
|
||||
camera_path = next(
|
||||
(cam_path for cam_path in [rop_camera,
|
||||
render_product_camera,
|
||||
render_settings_camera]
|
||||
if cam_path),
|
||||
None
|
||||
)
|
||||
if not camera_path:
|
||||
self.log.error(
|
||||
"No render camera defined for render product: '%s'",
|
||||
render_product.GetPath()
|
||||
)
|
||||
invalid = True
|
||||
continue
|
||||
|
||||
camera_paths.add(camera_path)
|
||||
|
||||
# For the camera paths used across the render products detect
|
||||
# whether the path is a valid camera in the stage
|
||||
for camera_path in sorted(camera_paths):
|
||||
camera_prim = stage.GetPrimAtPath(camera_path)
|
||||
if not camera_prim or not camera_prim.IsValid():
|
||||
self.log.error(
|
||||
"Render camera path '%s' does not exist in stage.",
|
||||
camera_path
|
||||
)
|
||||
invalid = True
|
||||
continue
|
||||
|
||||
if not camera_prim.IsA(pxr.UsdGeom.Camera):
|
||||
self.log.error(
|
||||
"Render camera path '%s' is not a camera.",
|
||||
camera_path
|
||||
)
|
||||
invalid = True
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
f"No render camera found for {instance.name}.",
|
||||
title="Invalid Render Camera",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
def iter_render_products(self, render_settings, stage):
|
||||
for product_path in render_settings.GetProductsRel().GetTargets():
|
||||
prim = stage.GetPrimAtPath(product_path)
|
||||
if prim.IsA(UsdRender.Product):
|
||||
yield UsdRender.Product(prim)
|
||||
|
||||
def _get_camera(self, settings: UsdRender.SettingsBase):
|
||||
"""Return primary camera target from RenderSettings or RenderProduct"""
|
||||
camera_targets = settings.GetCameraRel().GetForwardedTargets()
|
||||
if camera_targets:
|
||||
return camera_targets[0]
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Missing render camera
|
||||
|
||||
No valid render camera was set for the USD Render Settings.
|
||||
|
||||
The configured render camera path must be a valid camera in the
|
||||
stage. Make sure it refers to an existing path and that it is
|
||||
a camera.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -18,7 +18,7 @@ class ValidateUSDRenderProductNames(plugin.HoudiniInstancePlugin):
|
|||
def process(self, instance):
|
||||
|
||||
invalid = []
|
||||
for filepath in instance.data["files"]:
|
||||
for filepath in instance.data.get("files", []):
|
||||
|
||||
if not filepath:
|
||||
invalid.append("Detected empty output filepath.")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,83 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import hou
|
||||
import inspect
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUSDRenderProductPaths(plugin.HoudiniInstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate USD Render Settings refer to a valid render camera.
|
||||
|
||||
The publishing logic uses a metadata `.json` in the render output images'
|
||||
folder to identify how the files should be published. To ensure multiple
|
||||
subsequent submitted versions of a scene do not override the same metadata
|
||||
json file we want to ensure the user has the render paths set up to
|
||||
contain the $HIPNAME in a parent folder.
|
||||
|
||||
"""
|
||||
# NOTE(colorbleed): This workflow might be relatively Colorbleed-specific
|
||||
# TODO: Preferably we find ways to make what this tries to avoid no issue
|
||||
# itself by e.g. changing how AYON deals with these metadata json files.
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrender"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD Render Product Paths"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
current_file = instance.context.data["currentFile"]
|
||||
|
||||
# mimic `$HIPNAME:r` because `hou.text.collapseCommonVars can not
|
||||
# collapse it
|
||||
hipname_r = os.path.splitext(os.path.basename(current_file))[0]
|
||||
|
||||
invalid = False
|
||||
for filepath in instance.data.get("files", []):
|
||||
folder = os.path.dirname(filepath)
|
||||
|
||||
if hipname_r not in folder:
|
||||
filepath_raw = hou.text.collapseCommonVars(filepath, vars=[
|
||||
"$HIP", "$JOB", "$HIPNAME"
|
||||
])
|
||||
filepath_raw = filepath_raw.replace(hipname_r, "$HIPNAME:r")
|
||||
self.log.error("Invalid render output path:\n%s", filepath_raw)
|
||||
invalid = True
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Render path is invalid. Please make sure to include a "
|
||||
"folder with '$HIPNAME:r'.",
|
||||
title=self.label,
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Invalid render output path
|
||||
|
||||
The render output path must include the current scene name in
|
||||
a parent folder to ensure uniqueness across multiple workfile
|
||||
versions. Otherwise subsequent farm publishes could fail because
|
||||
newer versions will overwrite the metadata files of older versions.
|
||||
|
||||
The easiest way to do so is to include **`$HIPNAME:r`** somewhere
|
||||
in the render product names.
|
||||
|
||||
A recommended output path is for example:
|
||||
```
|
||||
$HIP/renders/$HIPNAME:r/$OS/$HIPNAME:r.$OS.$F4.exr
|
||||
```
|
||||
"""
|
||||
)
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import inspect
|
||||
import hou
|
||||
from pxr import Sdf
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api.action import SelectROPAction
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUSDRopDefaultPrim(plugin.HoudiniInstancePlugin):
|
||||
"""Validate the default prim exists if default prim value is set on ROP"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdrop"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate USD ROP Default Prim"
|
||||
actions = [SelectROPAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
default_prim = rop_node.evalParm("defaultprim")
|
||||
if not default_prim:
|
||||
self.log.debug(
|
||||
"No default prim specified on ROP node: %s", rop_node.path()
|
||||
)
|
||||
return
|
||||
|
||||
lop_node: hou.LopNode = instance.data.get("output_node")
|
||||
if not lop_node:
|
||||
return
|
||||
|
||||
above_break_layers = set(layer for layer in lop_node.layersAboveLayerBreak())
|
||||
stage = lop_node.stage()
|
||||
layers = [
|
||||
layer for layer
|
||||
in stage.GetLayerStack(includeSessionLayers=False)
|
||||
if layer.identifier not in above_break_layers
|
||||
]
|
||||
if not layers:
|
||||
self.log.error("No USD layers found. This is likely a bug.")
|
||||
return
|
||||
|
||||
# TODO: This only would detect any local opinions on that prim and thus
|
||||
# would fail to detect if a sublayer added on the stage root layer
|
||||
# being exported would actually be generating the prim path. We
|
||||
# should maybe consider that if this fails that we still check
|
||||
# whether a sublayer doesn't create the default prim path.
|
||||
for layer in layers:
|
||||
if layer.GetPrimAtPath(default_prim):
|
||||
break
|
||||
else:
|
||||
# No prim found at the given path on any of the generated layers
|
||||
raise PublishValidationError(
|
||||
"Default prim specified by USD ROP does not exist in "
|
||||
f"stage: '{default_prim}'",
|
||||
title="Default Prim",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
# Warn about any paths that are authored that are not a child
|
||||
# of the default prim
|
||||
outside_paths = set()
|
||||
default_prim_path = f"/{default_prim.strip('/')}"
|
||||
for layer in layers:
|
||||
|
||||
def collect_outside_paths(path: Sdf.Path):
|
||||
"""Collect all paths that are no child of the default prim"""
|
||||
|
||||
if not path.IsPrimPath():
|
||||
# Collect only prim paths
|
||||
return
|
||||
|
||||
# Ignore the HoudiniLayerInfo prim
|
||||
if path.pathString == "/HoudiniLayerInfo":
|
||||
return
|
||||
|
||||
if not path.pathString.startswith(default_prim_path):
|
||||
outside_paths.add(path)
|
||||
|
||||
layer.Traverse("/", collect_outside_paths)
|
||||
|
||||
if outside_paths:
|
||||
self.log.warning(
|
||||
"Found paths that are not within default primitive path '%s'. "
|
||||
"When referencing the following paths by default will not be "
|
||||
"loaded:",
|
||||
default_prim
|
||||
)
|
||||
for outside_path in sorted(outside_paths):
|
||||
self.log.warning("Outside default prim: %s", outside_path)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""### Default Prim not found
|
||||
|
||||
The USD render ROP is currently configured to write the output
|
||||
USD file with a default prim. However, the default prim is not
|
||||
found in the USD stage.
|
||||
|
||||
Make sure to double check the Default Prim setting on the USD
|
||||
Render ROP for typos or make sure the hierarchy and opinions you
|
||||
are creating exist in the default prim path.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
import ayon_houdini.api.usd as hou_usdlib
|
||||
|
||||
|
||||
class ValidateUsdSetDress(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD Set Dress.
|
||||
|
||||
Must only have references or payloads. May not generate new mesh or
|
||||
flattened meshes.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdSetDress"]
|
||||
label = "Validate USD Set Dress"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
from pxr import UsdGeom
|
||||
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
|
||||
stage = lop_path.stage(apply_viewport_overrides=False)
|
||||
|
||||
invalid = []
|
||||
for node in stage.Traverse():
|
||||
|
||||
if UsdGeom.Mesh(node):
|
||||
# This solely checks whether there is any USD involved
|
||||
# in this Prim's Stack and doesn't accurately tell us
|
||||
# whether it was generated locally or not.
|
||||
# TODO: More accurately track whether the Prim was created
|
||||
# in the local scene
|
||||
stack = node.GetPrimStack()
|
||||
for sdf in stack:
|
||||
path = sdf.layer.realPath
|
||||
if path:
|
||||
break
|
||||
else:
|
||||
prim_path = node.GetPath()
|
||||
self.log.error(
|
||||
"%s is not referenced geometry." % prim_path
|
||||
)
|
||||
invalid.append(node)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError((
|
||||
"SetDress contains local geometry. "
|
||||
"This is not allowed, it must be an assembly "
|
||||
"of referenced assets."),
|
||||
title=self.label
|
||||
)
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
import ayon_api
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
KnownPublishError,
|
||||
PublishValidationError,
|
||||
)
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUSDShadeModelExists(plugin.HoudiniInstancePlugin):
|
||||
"""Validate the Instance has no current cooking errors."""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade model exists"
|
||||
|
||||
def process(self, instance):
|
||||
project_name = instance.context.data["projectName"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
product_name = instance.data["productName"]
|
||||
|
||||
# Assume shading variation starts after a dot separator
|
||||
shade_product_name = product_name.split(".", 1)[0]
|
||||
model_product_name = re.sub(
|
||||
"^usdShade", "usdModel", shade_product_name
|
||||
)
|
||||
|
||||
folder_entity = instance.data.get("folderEntity")
|
||||
if not folder_entity:
|
||||
raise KnownPublishError(
|
||||
"Folder entity is not filled on instance."
|
||||
)
|
||||
|
||||
product_entity = ayon_api.get_product_by_name(
|
||||
project_name,
|
||||
model_product_name,
|
||||
folder_entity["id"],
|
||||
fields={"id"}
|
||||
)
|
||||
if not product_entity:
|
||||
raise PublishValidationError(
|
||||
("USD Model product not found: "
|
||||
"{} ({})").format(model_product_name, folder_path),
|
||||
title=self.label
|
||||
)
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ValidateUsdShadeWorkspace(plugin.HoudiniInstancePlugin):
|
||||
"""Validate USD Shading Workspace is correct version.
|
||||
|
||||
There have been some issues with outdated/erroneous Shading Workspaces
|
||||
so this is to confirm everything is set as it should.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["usdShade"]
|
||||
label = "USD Shade Workspace"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
rop = hou.node(instance.data.get("instance_node"))
|
||||
workspace = rop.parent()
|
||||
|
||||
definition = workspace.type().definition()
|
||||
name = definition.nodeType().name()
|
||||
library = definition.libraryFilePath()
|
||||
|
||||
all_definitions = hou.hda.definitionsInFile(library)
|
||||
node_type, version = name.rsplit(":", 1)
|
||||
version = float(version)
|
||||
|
||||
highest = version
|
||||
for other_definition in all_definitions:
|
||||
other_name = other_definition.nodeType().name()
|
||||
other_node_type, other_version = other_name.rsplit(":", 1)
|
||||
other_version = float(other_version)
|
||||
|
||||
if node_type != other_node_type:
|
||||
continue
|
||||
|
||||
# Get the highest version
|
||||
highest = max(highest, other_version)
|
||||
|
||||
if version != highest:
|
||||
raise PublishValidationError(
|
||||
("Shading Workspace is not the latest version."
|
||||
" Found {}. Latest is {}.").format(version, highest),
|
||||
title=self.label
|
||||
)
|
||||
|
||||
# There were some issues with the editable node not having the right
|
||||
# configured path. So for now let's assure that is correct to.from
|
||||
value = (
|
||||
'avalon://`chs("../folder_path")`/'
|
||||
'usdShade`chs("../model_variantname1")`.usd'
|
||||
)
|
||||
rop_value = rop.parm("lopoutput").rawValue()
|
||||
if rop_value != value:
|
||||
raise PublishValidationError(
|
||||
("Shading Workspace has invalid 'lopoutput'"
|
||||
" parameter value. The Shading Workspace"
|
||||
" needs to be reset to its default values."),
|
||||
title=self.label
|
||||
)
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
import logging
|
||||
|
||||
from husd.outputprocessor import OutputProcessor
|
||||
|
||||
from ayon_core.pipeline import entity_uri
|
||||
from ayon_core.pipeline.load.utils import get_representation_path_by_names
|
||||
|
||||
|
||||
class AYONURIOutputProcessor(OutputProcessor):
|
||||
"""Process AYON Entity URIs into their full path equivalents."""
|
||||
|
||||
def __init__(self):
|
||||
""" There is only one object of each output processor class that is
|
||||
ever created in a Houdini session. Therefore, be very careful
|
||||
about what data gets put in this object.
|
||||
"""
|
||||
self._save_cache = dict()
|
||||
self._ref_cache = dict()
|
||||
self._publish_context = None
|
||||
self.log = logging.getLogger(__name__)
|
||||
|
||||
@staticmethod
|
||||
def name():
|
||||
return "ayon_uri_processor"
|
||||
|
||||
@staticmethod
|
||||
def displayName():
|
||||
return "AYON URI Output Processor"
|
||||
|
||||
def processReferencePath(self,
|
||||
asset_path,
|
||||
referencing_layer_path,
|
||||
asset_is_layer):
|
||||
"""
|
||||
Args:
|
||||
asset_path (str): The path to the asset, as specified in Houdini.
|
||||
If this asset is being written to disk, this will be the final
|
||||
output of the `processSavePath()` calls on all output
|
||||
processors.
|
||||
referencing_layer_path (str): The absolute file path of the file
|
||||
containing the reference to the asset. You can use this to make
|
||||
the path pointer relative.
|
||||
asset_is_layer (bool): A boolean value indicating whether this
|
||||
asset is a USD layer file. If this is `False`, the asset is
|
||||
something else (for example, a texture or volume file).
|
||||
|
||||
Returns:
|
||||
The refactored reference path.
|
||||
|
||||
"""
|
||||
|
||||
cache = self._ref_cache
|
||||
|
||||
# Retrieve from cache if this query occurred before (optimization)
|
||||
if asset_path in cache:
|
||||
return cache[asset_path]
|
||||
|
||||
uri_data = entity_uri.parse_ayon_entity_uri(asset_path)
|
||||
if not uri_data:
|
||||
cache[asset_path] = asset_path
|
||||
return asset_path
|
||||
|
||||
# Try and find it as an existing publish
|
||||
query = {
|
||||
"project_name": uri_data["project"],
|
||||
"folder_path": uri_data["folder"],
|
||||
"product_name": uri_data["product"],
|
||||
"version_name": uri_data["version"],
|
||||
"representation_name": uri_data["representation"],
|
||||
}
|
||||
path = get_representation_path_by_names(
|
||||
**query
|
||||
)
|
||||
if path:
|
||||
self.log.debug(
|
||||
"AYON URI Resolver - ref: %s -> %s", asset_path, path
|
||||
)
|
||||
cache[asset_path] = path
|
||||
return path
|
||||
|
||||
elif self._publish_context:
|
||||
# Query doesn't resolve to an existing version - likely
|
||||
# points to a version defined in the current publish session
|
||||
# as such we should resolve it using the current publish
|
||||
# context if that was set prior to this publish
|
||||
raise NotImplementedError("TODO")
|
||||
|
||||
self.log.warning(f"Unable to resolve AYON URI: {asset_path}")
|
||||
cache[asset_path] = asset_path
|
||||
return asset_path
|
||||
|
||||
def processSavePath(self,
|
||||
asset_path,
|
||||
referencing_layer_path,
|
||||
asset_is_layer):
|
||||
"""
|
||||
Args:
|
||||
asset_path (str): The path to the asset, as specified in Houdini.
|
||||
If this asset is being written to disk, this will be the final
|
||||
output of the `processSavePath()` calls on all output
|
||||
processors.
|
||||
referencing_layer_path (str): The absolute file path of the file
|
||||
containing the reference to the asset. You can use this to make
|
||||
the path pointer relative.
|
||||
asset_is_layer (bool): A boolean value indicating whether this
|
||||
asset is a USD layer file. If this is `False`, the asset is
|
||||
something else (for example, a texture or volume file).
|
||||
|
||||
Returns:
|
||||
The refactored save path.
|
||||
|
||||
"""
|
||||
cache = self._save_cache
|
||||
|
||||
# Retrieve from cache if this query occurred before (optimization)
|
||||
if asset_path in cache:
|
||||
return cache[asset_path]
|
||||
|
||||
uri_data = entity_uri.parse_ayon_entity_uri(asset_path)
|
||||
if not uri_data:
|
||||
cache[asset_path] = asset_path
|
||||
return asset_path
|
||||
|
||||
relative_template = "{asset}_{product}_{version}_{representation}.usd"
|
||||
# Set save output path to a relative path so other
|
||||
# processors can potentially manage it easily?
|
||||
path = relative_template.format(**uri_data)
|
||||
|
||||
self.log.debug("AYON URI Resolver - save: %s -> %s", asset_path, path)
|
||||
cache[asset_path] = path
|
||||
return path
|
||||
|
||||
|
||||
def usdOutputProcessor():
|
||||
return AYONURIOutputProcessor
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import hou
|
||||
from husd.outputprocessor import OutputProcessor
|
||||
|
||||
|
||||
class AYONRemapPaths(OutputProcessor):
|
||||
"""Remap paths based on a mapping dict on rop node."""
|
||||
|
||||
def __init__(self):
|
||||
self._mapping = dict()
|
||||
|
||||
@staticmethod
|
||||
def name():
|
||||
return "ayon_remap_paths"
|
||||
|
||||
@staticmethod
|
||||
def displayName():
|
||||
return "AYON Remap Paths"
|
||||
|
||||
@staticmethod
|
||||
def hidden():
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parameters():
|
||||
group = hou.ParmTemplateGroup()
|
||||
|
||||
parm_template = hou.StringParmTemplate(
|
||||
"ayon_remap_paths_remap_json",
|
||||
"Remapping dict (json)",
|
||||
default_value="{}",
|
||||
num_components=1,
|
||||
string_type=hou.stringParmType.Regular,
|
||||
)
|
||||
group.append(parm_template)
|
||||
|
||||
return group.asDialogScript()
|
||||
|
||||
def beginSave(self, config_node, config_overrides, lop_node, t):
|
||||
super(AYONRemapPaths, self).beginSave(config_node,
|
||||
config_overrides,
|
||||
lop_node,
|
||||
t)
|
||||
|
||||
value = config_node.evalParm("ayon_remap_paths_remap_json")
|
||||
mapping = json.loads(value)
|
||||
assert isinstance(self._mapping, dict)
|
||||
|
||||
# Ensure all keys are normalized paths so the lookup can be done
|
||||
# correctly
|
||||
mapping = {
|
||||
os.path.normpath(key): value for key, value in mapping.items()
|
||||
}
|
||||
self._mapping = mapping
|
||||
|
||||
def processReferencePath(self,
|
||||
asset_path,
|
||||
referencing_layer_path,
|
||||
asset_is_layer):
|
||||
return self._mapping.get(os.path.normpath(asset_path), asset_path)
|
||||
|
||||
|
||||
def usdOutputProcessor():
|
||||
return AYONRemapPaths
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'houdini' version."""
|
||||
__version__ = "0.3.8"
|
||||
__version__ = "0.3.9"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "houdini"
|
||||
title = "Houdini"
|
||||
version = "0.3.8"
|
||||
version = "0.3.9"
|
||||
|
||||
client_dir = "ayon_houdini"
|
||||
|
||||
|
|
|
|||
|
|
@ -32,6 +32,16 @@ class CreateStaticMeshModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class CreateUSDRenderModel(CreatorModel):
|
||||
default_renderer: str = SettingsField(
|
||||
"Karma CPU",
|
||||
title="Default Renderer",
|
||||
description=(
|
||||
"Specify either the Hydra renderer plug-in nice name, like "
|
||||
"'Karma CPU', or the plug-in name, e.g. 'BRAY_HdKarma'"
|
||||
))
|
||||
|
||||
|
||||
class CreatePluginsModel(BaseSettingsModel):
|
||||
CreateAlembicCamera: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
|
|
@ -78,10 +88,10 @@ class CreatePluginsModel(BaseSettingsModel):
|
|||
title="Create Static Mesh")
|
||||
CreateUSD: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create USD (experimental)")
|
||||
CreateUSDRender: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create USD render (experimental)")
|
||||
title="Create USD")
|
||||
CreateUSDRender: CreateUSDRenderModel = SettingsField(
|
||||
default_factory=CreateUSDRenderModel,
|
||||
title="Create USD render")
|
||||
CreateVDBCache: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create VDB Cache")
|
||||
|
|
@ -158,12 +168,13 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
|
|||
]
|
||||
},
|
||||
"CreateUSD": {
|
||||
"enabled": False,
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateUSDRender": {
|
||||
"enabled": False,
|
||||
"default_variants": ["Main"]
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"],
|
||||
"default_renderer": "Karma CPU"
|
||||
},
|
||||
"CreateVDBCache": {
|
||||
"enabled": True,
|
||||
|
|
|
|||
|
|
@ -134,6 +134,9 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
ValidateWorkfilePaths: ValidateWorkfilePathsModel = SettingsField(
|
||||
default_factory=ValidateWorkfilePathsModel,
|
||||
title="Validate workfile paths settings")
|
||||
ValidateUSDRenderProductPaths: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Validate USD Render Product Paths")
|
||||
ExtractActiveViewThumbnail: BasicEnabledStatesModel = SettingsField(
|
||||
default_factory=BasicEnabledStatesModel,
|
||||
title="Extract Active View Thumbnail",
|
||||
|
|
@ -202,6 +205,11 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
|
|||
"$JOB"
|
||||
]
|
||||
},
|
||||
"ValidateUSDRenderProductPaths": {
|
||||
"enabled": False,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ExtractActiveViewThumbnail": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import re
|
||||
import clique
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
|
|
@ -271,22 +272,34 @@ class CollectYetiRig(plugin.MayaInstancePlugin):
|
|||
pattern (str): The pattern to swap with the variable frame number.
|
||||
|
||||
Returns:
|
||||
list: file sequence.
|
||||
Optional[list[str]]: file sequence.
|
||||
|
||||
"""
|
||||
import clique
|
||||
|
||||
escaped = re.escape(filepath)
|
||||
re_pattern = escaped.replace(pattern, "-?[0-9]+")
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
re_pattern = re.escape(filename)
|
||||
re_pattern = re_pattern.replace(re.escape(pattern), "-?[0-9]+")
|
||||
source_dir = os.path.dirname(filepath)
|
||||
files = [f for f in os.listdir(source_dir)
|
||||
if re.match(re_pattern, f)]
|
||||
files = [f for f in os.listdir(source_dir) if re.match(re_pattern, f)]
|
||||
if not files:
|
||||
# Files do not exist, this may not be a problem if e.g. the
|
||||
# textures were relative paths and we're searching across
|
||||
# multiple image search paths.
|
||||
return
|
||||
|
||||
collections, _remainder = clique.assemble(
|
||||
files,
|
||||
patterns=[clique.PATTERNS["frames"]],
|
||||
minimum_items=1)
|
||||
|
||||
if len(collections) > 1:
|
||||
raise ValueError(
|
||||
f"Multiple collections found for {collections}. "
|
||||
"This is a bug.")
|
||||
|
||||
pattern = [clique.PATTERNS["frames"]]
|
||||
collection, remainder = clique.assemble(files, patterns=pattern)
|
||||
|
||||
return collection
|
||||
return [
|
||||
os.path.join(source_dir, filename)
|
||||
for filename in collections[0]
|
||||
]
|
||||
|
||||
def _replace_tokens(self, strings):
|
||||
env_re = re.compile(r"\$\{(\w+)\}")
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
ValidateContentsOrder,
|
||||
)
|
||||
from ayon_maya.api import plugin
|
||||
|
||||
|
||||
class ValidateResources(plugin.MayaInstancePlugin):
|
||||
"""Validates mapped resources.
|
||||
|
||||
These are external files to the current application, for example
|
||||
these could be textures, image planes, cache files or other linked
|
||||
media.
|
||||
|
||||
This validates:
|
||||
- The resources have unique filenames (without extension)
|
||||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Resources Unique"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
if not resources:
|
||||
self.log.debug("No resources to validate..")
|
||||
return
|
||||
|
||||
basenames = defaultdict(set)
|
||||
|
||||
for resource in resources:
|
||||
files = resource.get("files", [])
|
||||
for filename in files:
|
||||
|
||||
# Use normalized paths in comparison and ignore case
|
||||
# sensitivity
|
||||
filename = os.path.normpath(filename).lower()
|
||||
|
||||
basename = os.path.splitext(os.path.basename(filename))[0]
|
||||
basenames[basename].add(filename)
|
||||
|
||||
invalid_resources = list()
|
||||
for basename, sources in basenames.items():
|
||||
if len(sources) > 1:
|
||||
invalid_resources.extend(sources)
|
||||
|
||||
self.log.error(
|
||||
"Non-unique resource name: {0}"
|
||||
"{0} (sources: {1})".format(
|
||||
basename,
|
||||
list(sources)
|
||||
)
|
||||
)
|
||||
|
||||
if invalid_resources:
|
||||
raise PublishValidationError("Invalid resources in instance.")
|
||||
Loading…
Add table
Add a link
Reference in a new issue