Merge branch 'develop' into enhancement/AY-1001_validate-render-settings-optional

This commit is contained in:
Ondřej Samohel 2024-05-02 11:35:47 +02:00 committed by GitHub
commit b87e9049ca
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 1616 additions and 769 deletions

View file

@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin):
for fbx_object in current_fbx_objects:
fbx_object.name = f"{namespace}:{fbx_object.name}"
fbx_objects.append(fbx_object)
fbx_transform = f"{fbx_object.name}.transform"
fbx_transform = f"{fbx_object}.transform"
if fbx_transform in transform_data.keys():
fbx_object.pos = transform_data[fbx_transform] or 0
fbx_object.scale = transform_data[
f"{fbx_object.name}.scale"] or 0
f"{fbx_object}.scale"] or 0
with maintained_selection():
rt.Select(node)

View file

@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin):
selections = rt.GetCurrentSelection()
for selection in selections:
selection.name = f"{namespace}:{selection.name}"
selection_transform = f"{selection.name}.transform"
selection_transform = f"{selection}.transform"
if selection_transform in transform_data.keys():
selection.pos = transform_data[selection_transform] or 0
selection.scale = transform_data[
f"{selection.name}.scale"] or 0
f"{selection}.scale"] or 0
update_custom_attribute_data(node, selections)
with maintained_selection():
rt.Select(node)

View file

@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin):
for children in asset.Children:
children.name = f"{namespace}:{children.name}"
usd_objects.append(children)
children_transform = f"{children.name}.transform"
children_transform = f"{children}.transform"
if children_transform in transform_data.keys():
children.pos = transform_data[children_transform] or 0
children.scale = transform_data[
f"{children.name}.scale"] or 0
f"{children}.scale"] or 0
asset.name = f"{namespace}:{asset.name}"
usd_objects.append(asset)

View file

@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin):
abc.Parent = container
abc.name = f"{namespace}:{abc.name}"
ox_abc_objects.append(abc)
ox_transform = f"{abc.name}.transform"
ox_transform = f"{abc}.transform"
if ox_transform in transform_data.keys():
abc.pos = transform_data[ox_transform] or 0
abc.scale = transform_data[f"{abc.name}.scale"] or 0
abc.scale = transform_data[f"{abc}.scale"] or 0
update_custom_attribute_data(node, ox_abc_objects)
lib.imprint(
container["instance_node"],

View file

@ -12,4 +12,4 @@
max create mode
python.ExecuteFile startup
)
)

View file

@ -0,0 +1,305 @@
import json
import logging
import os
from maya import cmds # noqa
from ayon_core.hosts.maya.api.lib import evaluation
log = logging.getLogger(__name__)
# The maya alembic export types
ALEMBIC_ARGS = {
"attr": (list, tuple),
"attrPrefix": (list, tuple),
"autoSubd": bool,
"dataFormat": str,
"endFrame": float,
"eulerFilter": bool,
"frameRange": str, # "start end"; overrides startFrame & endFrame
"frameRelativeSample": float,
"melPerFrameCallback": str,
"melPostJobCallback": str,
"noNormals": bool,
"preRoll": bool,
"preRollStartFrame": int,
"pythonPerFrameCallback": str,
"pythonPostJobCallback": str,
"renderableOnly": bool,
"root": (list, tuple),
"selection": bool,
"startFrame": float,
"step": float,
"stripNamespaces": bool,
"userAttr": (list, tuple),
"userAttrPrefix": (list, tuple),
"uvWrite": bool,
"uvsOnly": bool,
"verbose": bool,
"wholeFrameGeo": bool,
"worldSpace": bool,
"writeColorSets": bool,
"writeCreases": bool, # Maya 2015 Ext1+
"writeFaceSets": bool,
"writeUVSets": bool, # Maya 2017+
"writeVisibility": bool,
}
def extract_alembic(
file,
attr=None,
attrPrefix=None,
dataFormat="ogawa",
endFrame=None,
eulerFilter=True,
frameRange="",
noNormals=False,
preRoll=False,
preRollStartFrame=0,
renderableOnly=False,
root=None,
selection=True,
startFrame=None,
step=1.0,
stripNamespaces=True,
uvWrite=True,
verbose=False,
wholeFrameGeo=False,
worldSpace=False,
writeColorSets=False,
writeCreases=False,
writeFaceSets=False,
writeUVSets=False,
writeVisibility=False
):
"""Extract a single Alembic Cache.
This extracts an Alembic cache using the `-selection` flag to minimize
the extracted content to solely what was Collected into the instance.
Arguments:
file (str): The filepath to write the alembic file to.
attr (list of str, optional): A specific geometric attribute to write
out. Defaults to [].
attrPrefix (list of str, optional): Prefix filter for determining which
geometric attributes to write out. Defaults to ["ABC_"].
dataFormat (str): The data format to use for the cache,
defaults to "ogawa"
endFrame (float): End frame of output. Ignored if `frameRange`
provided.
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
an Euler filter. Euler filtering helps resolve irregularities in
rotations especially if X, Y, and Z rotations exceed 360 degrees.
Defaults to True.
frameRange (tuple or str): Two-tuple with start and end frame or a
string formatted as: "startFrame endFrame". This argument
overrides `startFrame` and `endFrame` arguments.
noNormals (bool): When on, normal data from the original polygon
objects is not included in the exported Alembic cache file.
preRoll (bool): This frame range will not be sampled.
Defaults to False.
preRollStartFrame (float): The frame to start scene
evaluation at. This is used to set the starting frame for time
dependent translations and can be used to evaluate run-up that
isn't actually translated. Defaults to 0.
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
such as hidden objects, are not included in the Alembic file.
Defaults to False.
root (list of str): Maya dag path which will be parented to
the root of the Alembic file. Defaults to [], which means the
entire scene will be written out.
selection (bool): Write out all all selected nodes from the
active selection list that are descendents of the roots specified
with -root. Defaults to False.
startFrame (float): Start frame of output. Ignored if `frameRange`
provided.
step (float): The time interval (expressed in frames) at
which the frame range is sampled. Additional samples around each
frame can be specified with -frs. Defaults to 1.0.
stripNamespaces (bool): When on, any namespaces associated with the
exported objects are removed from the Alembic file. For example, an
object with the namespace taco:foo:bar appears as bar in the
Alembic file.
uvWrite (bool): When on, UV data from polygon meshes and subdivision
objects are written to the Alembic file. Only the current UV map is
included.
verbose (bool): When on, outputs frame number information to the
Script Editor or output window during extraction.
wholeFrameGeo (bool): Data for geometry will only be written
out on whole frames. Defaults to False.
worldSpace (bool): When on, the top node in the node hierarchy is
stored as world space. By default, these nodes are stored as local
space. Defaults to False.
writeColorSets (bool): Write all color sets on MFnMeshes as
color 3 or color 4 indexed geometry parameters with face varying
scope. Defaults to False.
writeCreases (bool): If the mesh has crease edges or crease
vertices, the mesh (OPolyMesh) would now be written out as an OSubD
and crease info will be stored in the Alembic file. Otherwise,
creases info won't be preserved in Alembic file unless a custom
Boolean attribute SubDivisionMesh has been added to mesh node and
its value is true. Defaults to False.
writeFaceSets (bool): Write all Face sets on MFnMeshes.
Defaults to False.
writeUVSets (bool): Write all uv sets on MFnMeshes as vector
2 indexed geometry parameters with face varying scope. Defaults to
False.
writeVisibility (bool): Visibility state will be stored in
the Alembic file. Otherwise everything written out is treated as
visible. Defaults to False.
"""
# Ensure alembic exporter is loaded
cmds.loadPlugin('AbcExport', quiet=True)
# Alembic Exporter requires forward slashes
file = file.replace('\\', '/')
# Ensure list arguments are valid.
attr = attr or []
attrPrefix = attrPrefix or []
root = root or []
# Pass the start and end frame on as `frameRange` so that it
# never conflicts with that argument
if not frameRange:
# Fallback to maya timeline if no start or end frame provided.
if startFrame is None:
startFrame = cmds.playbackOptions(query=True,
animationStartTime=True)
if endFrame is None:
endFrame = cmds.playbackOptions(query=True,
animationEndTime=True)
# Ensure valid types are converted to frame range
assert isinstance(startFrame, ALEMBIC_ARGS["startFrame"])
assert isinstance(endFrame, ALEMBIC_ARGS["endFrame"])
frameRange = "{0} {1}".format(startFrame, endFrame)
else:
# Allow conversion from tuple for `frameRange`
if isinstance(frameRange, (list, tuple)):
assert len(frameRange) == 2
frameRange = "{0} {1}".format(frameRange[0], frameRange[1])
# Assemble options
options = {
"selection": selection,
"frameRange": frameRange,
"eulerFilter": eulerFilter,
"noNormals": noNormals,
"preRoll": preRoll,
"renderableOnly": renderableOnly,
"uvWrite": uvWrite,
"writeColorSets": writeColorSets,
"writeFaceSets": writeFaceSets,
"wholeFrameGeo": wholeFrameGeo,
"worldSpace": worldSpace,
"writeVisibility": writeVisibility,
"writeUVSets": writeUVSets,
"writeCreases": writeCreases,
"dataFormat": dataFormat,
"step": step,
"attr": attr,
"attrPrefix": attrPrefix,
"stripNamespaces": stripNamespaces,
"verbose": verbose,
"preRollStartFrame": preRollStartFrame
}
# Validate options
for key, value in options.copy().items():
# Discard unknown options
if key not in ALEMBIC_ARGS:
log.warning("extract_alembic() does not support option '%s'. "
"Flag will be ignored..", key)
options.pop(key)
continue
# Validate value type
valid_types = ALEMBIC_ARGS[key]
if not isinstance(value, valid_types):
raise TypeError("Alembic option unsupported type: "
"{0} (expected {1})".format(value, valid_types))
# Ignore empty values, like an empty string, since they mess up how
# job arguments are built
if isinstance(value, (list, tuple)):
value = [x for x in value if x.strip()]
# Ignore option completely if no values remaining
if not value:
options.pop(key)
continue
options[key] = value
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
maya_version = int(cmds.about(version=True))
if maya_version >= 2018:
options['autoSubd'] = options.pop('writeCreases', False)
# Format the job string from options
job_args = list()
for key, value in options.items():
if isinstance(value, (list, tuple)):
for entry in value:
job_args.append("-{} {}".format(key, entry))
elif isinstance(value, bool):
# Add only when state is set to True
if value:
job_args.append("-{0}".format(key))
else:
job_args.append("-{0} {1}".format(key, value))
job_str = " ".join(job_args)
job_str += ' -file "%s"' % file
# Ensure output directory exists
parent_dir = os.path.dirname(file)
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
if verbose:
log.debug("Preparing Alembic export with options: %s",
json.dumps(options, indent=4))
log.debug("Extracting Alembic with job arguments: %s", job_str)
# Perform extraction
print("Alembic Job Arguments : {}".format(job_str))
# Disable the parallel evaluation temporarily to ensure no buggy
# exports are made. (PLN-31)
# TODO: Make sure this actually fixes the issues
with evaluation("off"):
cmds.AbcExport(j=job_str, verbose=verbose)
if verbose:
log.debug("Extracted Alembic to: %s", file)
return file

View file

@ -70,37 +70,6 @@ DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0]
# The maya alembic export types
_alembic_options = {
"startFrame": float,
"endFrame": float,
"frameRange": str, # "start end"; overrides startFrame & endFrame
"eulerFilter": bool,
"frameRelativeSample": float,
"noNormals": bool,
"renderableOnly": bool,
"step": float,
"stripNamespaces": bool,
"uvWrite": bool,
"wholeFrameGeo": bool,
"worldSpace": bool,
"writeVisibility": bool,
"writeColorSets": bool,
"writeFaceSets": bool,
"writeCreases": bool, # Maya 2015 Ext1+
"writeUVSets": bool, # Maya 2017+
"dataFormat": str,
"root": (list, tuple),
"attr": (list, tuple),
"attrPrefix": (list, tuple),
"userAttr": (list, tuple),
"melPerFrameCallback": str,
"melPostJobCallback": str,
"pythonPerFrameCallback": str,
"pythonPostJobCallback": str,
"selection": bool
}
INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94}
@ -1346,178 +1315,6 @@ def is_visible(node,
return True
def extract_alembic(file,
startFrame=None,
endFrame=None,
selection=True,
uvWrite=True,
eulerFilter=True,
dataFormat="ogawa",
verbose=False,
**kwargs):
"""Extract a single Alembic Cache.
This extracts an Alembic cache using the `-selection` flag to minimize
the extracted content to solely what was Collected into the instance.
Arguments:
startFrame (float): Start frame of output. Ignored if `frameRange`
provided.
endFrame (float): End frame of output. Ignored if `frameRange`
provided.
frameRange (tuple or str): Two-tuple with start and end frame or a
string formatted as: "startFrame endFrame". This argument
overrides `startFrame` and `endFrame` arguments.
dataFormat (str): The data format to use for the cache,
defaults to "ogawa"
verbose (bool): When on, outputs frame number information to the
Script Editor or output window during extraction.
noNormals (bool): When on, normal data from the original polygon
objects is not included in the exported Alembic cache file.
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
such as hidden objects, are not included in the Alembic file.
Defaults to False.
stripNamespaces (bool): When on, any namespaces associated with the
exported objects are removed from the Alembic file. For example, an
object with the namespace taco:foo:bar appears as bar in the
Alembic file.
uvWrite (bool): When on, UV data from polygon meshes and subdivision
objects are written to the Alembic file. Only the current UV map is
included.
worldSpace (bool): When on, the top node in the node hierarchy is
stored as world space. By default, these nodes are stored as local
space. Defaults to False.
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
an Euler filter. Euler filtering helps resolve irregularities in
rotations especially if X, Y, and Z rotations exceed 360 degrees.
Defaults to True.
"""
# Ensure alembic exporter is loaded
cmds.loadPlugin('AbcExport', quiet=True)
# Alembic Exporter requires forward slashes
file = file.replace('\\', '/')
# Pass the start and end frame on as `frameRange` so that it
# never conflicts with that argument
if "frameRange" not in kwargs:
# Fallback to maya timeline if no start or end frame provided.
if startFrame is None:
startFrame = cmds.playbackOptions(query=True,
animationStartTime=True)
if endFrame is None:
endFrame = cmds.playbackOptions(query=True,
animationEndTime=True)
# Ensure valid types are converted to frame range
assert isinstance(startFrame, _alembic_options["startFrame"])
assert isinstance(endFrame, _alembic_options["endFrame"])
kwargs["frameRange"] = "{0} {1}".format(startFrame, endFrame)
else:
# Allow conversion from tuple for `frameRange`
frame_range = kwargs["frameRange"]
if isinstance(frame_range, (list, tuple)):
assert len(frame_range) == 2
kwargs["frameRange"] = "{0} {1}".format(frame_range[0],
frame_range[1])
# Assemble options
options = {
"selection": selection,
"uvWrite": uvWrite,
"eulerFilter": eulerFilter,
"dataFormat": dataFormat
}
options.update(kwargs)
# Validate options
for key, value in options.copy().items():
# Discard unknown options
if key not in _alembic_options:
log.warning("extract_alembic() does not support option '%s'. "
"Flag will be ignored..", key)
options.pop(key)
continue
# Validate value type
valid_types = _alembic_options[key]
if not isinstance(value, valid_types):
raise TypeError("Alembic option unsupported type: "
"{0} (expected {1})".format(value, valid_types))
# Ignore empty values, like an empty string, since they mess up how
# job arguments are built
if isinstance(value, (list, tuple)):
value = [x for x in value if x.strip()]
# Ignore option completely if no values remaining
if not value:
options.pop(key)
continue
options[key] = value
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
maya_version = int(cmds.about(version=True))
if maya_version >= 2018:
options['autoSubd'] = options.pop('writeCreases', False)
# Format the job string from options
job_args = list()
for key, value in options.items():
if isinstance(value, (list, tuple)):
for entry in value:
job_args.append("-{} {}".format(key, entry))
elif isinstance(value, bool):
# Add only when state is set to True
if value:
job_args.append("-{0}".format(key))
else:
job_args.append("-{0} {1}".format(key, value))
job_str = " ".join(job_args)
job_str += ' -file "%s"' % file
# Ensure output directory exists
parent_dir = os.path.dirname(file)
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
if verbose:
log.debug("Preparing Alembic export with options: %s",
json.dumps(options, indent=4))
log.debug("Extracting Alembic with job arguments: %s", job_str)
# Perform extraction
print("Alembic Job Arguments : {}".format(job_str))
# Disable the parallel evaluation temporarily to ensure no buggy
# exports are made. (PLN-31)
# TODO: Make sure this actually fixes the issues
with evaluation("off"):
cmds.AbcExport(j=job_str, verbose=verbose)
if verbose:
log.debug("Extracted Alembic to: %s", file)
return file
# region ID
def get_id_required_nodes(referenced_nodes=False,
nodes=None,

View file

@ -1,89 +0,0 @@
from ayon_core.hosts.maya.api import (
lib,
plugin
)
from ayon_core.lib import (
BoolDef,
TextDef
)
class CreateAnimation(plugin.MayaHiddenCreator):
"""Animation output for character rigs
We hide the animation creator from the UI since the creation of it is
automated upon loading a rig. There's an inventory action to recreate it
for loaded rigs if by chance someone deleted the animation instance.
"""
identifier = "io.openpype.creators.maya.animation"
name = "animationDefault"
label = "Animation"
product_type = "animation"
icon = "male"
write_color_sets = False
write_face_sets = False
include_parent_hierarchy = False
include_user_defined_attributes = False
def get_instance_attr_defs(self):
defs = lib.collect_animation_defs()
defs.extend([
BoolDef("writeColorSets",
label="Write vertex colors",
tooltip="Write vertex colors with the geometry",
default=self.write_color_sets),
BoolDef("writeFaceSets",
label="Write face sets",
tooltip="Write face sets with the geometry",
default=self.write_face_sets),
BoolDef("writeNormals",
label="Write normals",
tooltip="Write normals with the deforming geometry",
default=True),
BoolDef("renderableOnly",
label="Renderable Only",
tooltip="Only export renderable visible shapes",
default=False),
BoolDef("visibleOnly",
label="Visible Only",
tooltip="Only export dag objects visible during "
"frame range",
default=False),
BoolDef("includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip="Whether to include parent hierarchy of nodes in "
"the publish instance",
default=self.include_parent_hierarchy),
BoolDef("worldSpace",
label="World-Space Export",
default=True),
BoolDef("includeUserDefinedAttributes",
label="Include User Defined Attributes",
default=self.include_user_defined_attributes),
TextDef("attr",
label="Custom Attributes",
default="",
placeholder="attr1, attr2"),
TextDef("attrPrefix",
label="Custom Attributes Prefix",
placeholder="prefix1, prefix2")
])
# TODO: Implement these on a Deadline plug-in instead?
"""
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50
"""
return defs
def apply_settings(self, project_settings):
super(CreateAnimation, self).apply_settings(project_settings)
# Hardcoding creator to be enabled due to existing settings would
# disable the creator causing the creator plugin to not be
# discoverable.
self.enabled = True

View file

@ -0,0 +1,139 @@
from maya import cmds
from ayon_core.hosts.maya.api import lib, plugin
from ayon_core.lib import (
BoolDef,
NumberDef,
)
from ayon_core.pipeline import CreatedInstance
def _get_animation_attr_defs(cls):
"""Get Animation generic definitions."""
defs = lib.collect_animation_defs()
defs.extend(
[
BoolDef("farm", label="Submit to Farm"),
NumberDef("priority", label="Farm job Priority", default=50),
BoolDef("refresh", label="Refresh viewport during export"),
BoolDef(
"includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip=(
"Whether to include parent hierarchy of nodes in the "
"publish instance."
)
),
BoolDef(
"includeUserDefinedAttributes",
label="Include User Defined Attributes",
tooltip=(
"Whether to include all custom maya attributes found "
"on nodes as attributes in the Alembic data."
)
),
]
)
return defs
def convert_legacy_alembic_creator_attributes(node_data, class_name):
"""This is a legacy transfer of creator attributes to publish attributes
for ExtractAlembic/ExtractAnimation plugin.
"""
publish_attributes = node_data["publish_attributes"]
if class_name in publish_attributes:
return node_data
attributes = [
"attr",
"attrPrefix",
"visibleOnly",
"writeColorSets",
"writeFaceSets",
"writeNormals",
"renderableOnly",
"visibleOnly",
"worldSpace",
"renderableOnly"
]
plugin_attributes = {}
for attr in attributes:
if attr not in node_data["creator_attributes"]:
continue
value = node_data["creator_attributes"].pop(attr)
plugin_attributes[attr] = value
publish_attributes[class_name] = plugin_attributes
return node_data
class CreateAnimation(plugin.MayaHiddenCreator):
"""Animation output for character rigs
We hide the animation creator from the UI since the creation of it is
automated upon loading a rig. There's an inventory action to recreate it
for loaded rigs if by chance someone deleted the animation instance.
"""
identifier = "io.openpype.creators.maya.animation"
name = "animationDefault"
label = "Animation"
product_type = "animation"
icon = "male"
write_color_sets = False
write_face_sets = False
include_parent_hierarchy = False
include_user_defined_attributes = False
def read_instance_node(self, node):
node_data = super(CreateAnimation, self).read_instance_node(node)
node_data = convert_legacy_alembic_creator_attributes(
node_data, "ExtractAnimation"
)
return node_data
def get_instance_attr_defs(self):
defs = super(CreateAnimation, self).get_instance_attr_defs()
defs += _get_animation_attr_defs(self)
return defs
class CreatePointCache(plugin.MayaCreator):
"""Alembic pointcache for animated data"""
identifier = "io.openpype.creators.maya.pointcache"
label = "Pointcache"
product_type = "pointcache"
icon = "gears"
write_color_sets = False
write_face_sets = False
include_user_defined_attributes = False
def read_instance_node(self, node):
node_data = super(CreatePointCache, self).read_instance_node(node)
node_data = convert_legacy_alembic_creator_attributes(
node_data, "ExtractAlembic"
)
return node_data
def get_instance_attr_defs(self):
defs = super(CreatePointCache, self).get_instance_attr_defs()
defs += _get_animation_attr_defs(self)
return defs
def create(self, product_name, instance_data, pre_create_data):
instance = super(CreatePointCache, self).create(
product_name, instance_data, pre_create_data
)
instance_node = instance.get("instance_node")
# For Arnold standin proxy
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
cmds.sets(proxy_set, forceElement=instance_node)

View file

@ -1,88 +0,0 @@
from maya import cmds
from ayon_core.hosts.maya.api import (
lib,
plugin
)
from ayon_core.lib import (
BoolDef,
TextDef
)
class CreatePointCache(plugin.MayaCreator):
"""Alembic pointcache for animated data"""
identifier = "io.openpype.creators.maya.pointcache"
label = "Pointcache"
product_type = "pointcache"
icon = "gears"
write_color_sets = False
write_face_sets = False
include_user_defined_attributes = False
def get_instance_attr_defs(self):
defs = lib.collect_animation_defs()
defs.extend([
BoolDef("writeColorSets",
label="Write vertex colors",
tooltip="Write vertex colors with the geometry",
default=False),
BoolDef("writeFaceSets",
label="Write face sets",
tooltip="Write face sets with the geometry",
default=False),
BoolDef("renderableOnly",
label="Renderable Only",
tooltip="Only export renderable visible shapes",
default=False),
BoolDef("visibleOnly",
label="Visible Only",
tooltip="Only export dag objects visible during "
"frame range",
default=False),
BoolDef("includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip="Whether to include parent hierarchy of nodes in "
"the publish instance",
default=False),
BoolDef("worldSpace",
label="World-Space Export",
default=True),
BoolDef("refresh",
label="Refresh viewport during export",
default=False),
BoolDef("includeUserDefinedAttributes",
label="Include User Defined Attributes",
default=self.include_user_defined_attributes),
TextDef("attr",
label="Custom Attributes",
default="",
placeholder="attr1, attr2"),
TextDef("attrPrefix",
label="Custom Attributes Prefix",
default="",
placeholder="prefix1, prefix2")
])
# TODO: Implement these on a Deadline plug-in instead?
"""
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50
"""
return defs
def create(self, product_name, instance_data, pre_create_data):
instance = super(CreatePointCache, self).create(
product_name, instance_data, pre_create_data
)
instance_node = instance.get("instance_node")
# For Arnold standin proxy
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
cmds.sets(proxy_set, forceElement=instance_node)

View file

@ -58,4 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
if instance.data.get("farm"):
instance.data["families"].append("publish.farm")

View file

@ -14,7 +14,9 @@ class CollectUserDefinedAttributes(pyblish.api.InstancePlugin):
def process(self, instance):
# Collect user defined attributes.
if not instance.data.get("includeUserDefinedAttributes", False):
if not instance.data["creator_attributes"].get(
"includeUserDefinedAttributes"
):
return
if "out_hierarchy" in instance.data:

View file

@ -2,7 +2,7 @@ import os
import json
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.lib import extract_alembic
from ayon_core.hosts.maya.api.alembic import extract_alembic
from maya import cmds

View file

@ -1,17 +1,28 @@
import os
from collections import OrderedDict
from maya import cmds
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection,
iter_visible_nodes_in_range
)
from ayon_core.lib import (
BoolDef,
TextDef,
NumberDef,
EnumDef,
UISeparatorDef,
UILabelDef,
)
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
from ayon_core.pipeline import KnownPublishError
class ExtractAlembic(publish.Extractor):
class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
"""Produce an alembic of just point positions and normals.
Positions and normals, uvs, creases are preserved, but nothing more,
@ -27,8 +38,35 @@ class ExtractAlembic(publish.Extractor):
targets = ["local", "remote"]
# From settings
attr = []
attrPrefix = []
autoSubd = False
bake_attributes = []
bake_attribute_prefixes = []
dataFormat = "ogawa"
eulerFilter = False
melPerFrameCallback = ""
melPostJobCallback = ""
overrides = []
preRoll = False
preRollStartFrame = 0
pythonPerFrameCallback = ""
pythonPostJobCallback = ""
renderableOnly = False
stripNamespaces = True
uvsOnly = False
uvWrite = False
userAttr = ""
userAttrPrefix = ""
verbose = False
visibleOnly = False
wholeFrameGeo = False
worldSpace = True
writeColorSets = False
writeFaceSets = False
writeNormals = True
writeUVSets = False
writeVisibility = False
def process(self, instance):
if instance.data.get("farm"):
@ -41,16 +79,38 @@ class ExtractAlembic(publish.Extractor):
start = float(instance.data.get("frameStartHandle", 1))
end = float(instance.data.get("frameEndHandle", 1))
attrs = instance.data.get("attr", "").split(";")
attrs = [value for value in attrs if value.strip()]
attribute_values = self.get_attr_values_from_data(
instance.data
)
attrs = [
attr.strip()
for attr in attribute_values.get("attr", "").split(";")
if attr.strip()
]
attrs += instance.data.get("userDefinedAttributes", [])
attrs += self.bake_attributes
attrs += ["cbId"]
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
attr_prefixes = [value for value in attr_prefixes if value.strip()]
attr_prefixes = [
attr.strip()
for attr in attribute_values.get("attrPrefix", "").split(";")
if attr.strip()
]
attr_prefixes += self.bake_attribute_prefixes
user_attrs = [
attr.strip()
for attr in attribute_values.get("userAttr", "").split(";")
if attr.strip()
]
user_attr_prefixes = [
attr.strip()
for attr in attribute_values.get("userAttrPrefix", "").split(";")
if attr.strip()
]
self.log.debug("Extracting pointcache..")
dirname = self.staging_dir(instance)
@ -58,28 +118,83 @@ class ExtractAlembic(publish.Extractor):
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
options = {
"step": instance.data.get("step", 1.0),
"attr": attrs,
"attrPrefix": attr_prefixes,
"writeVisibility": True,
"writeCreases": True,
"writeColorSets": instance.data.get("writeColorSets", False),
"writeFaceSets": instance.data.get("writeFaceSets", False),
"uvWrite": True,
"selection": True,
"worldSpace": instance.data.get("worldSpace", True)
}
root = None
if not instance.data.get("includeParentHierarchy", True):
# Set the root nodes if we don't want to include parents
# The roots are to be considered the ones that are the actual
# direct members of the set
options["root"] = roots
root = roots
if int(cmds.about(version=True)) >= 2017:
# Since Maya 2017 alembic supports multiple uv sets - write them.
options["writeUVSets"] = True
kwargs = {
"file": path,
"attr": attrs,
"attrPrefix": attr_prefixes,
"userAttr": user_attrs,
"userAttrPrefix": user_attr_prefixes,
"dataFormat": attribute_values.get("dataFormat", self.dataFormat),
"endFrame": end,
"eulerFilter": attribute_values.get(
"eulerFilter", self.eulerFilter
),
"preRoll": attribute_values.get("preRoll", self.preRoll),
"preRollStartFrame": attribute_values.get(
"preRollStartFrame", self.preRollStartFrame
),
"renderableOnly": attribute_values.get(
"renderableOnly", self.renderableOnly
),
"root": root,
"selection": True,
"startFrame": start,
"step": instance.data.get(
"creator_attributes", {}
).get("step", 1.0),
"stripNamespaces": attribute_values.get(
"stripNamespaces", self.stripNamespaces
),
"uvWrite": attribute_values.get("uvWrite", self.uvWrite),
"verbose": attribute_values.get("verbose", self.verbose),
"wholeFrameGeo": attribute_values.get(
"wholeFrameGeo", self.wholeFrameGeo
),
"worldSpace": attribute_values.get("worldSpace", self.worldSpace),
"writeColorSets": attribute_values.get(
"writeColorSets", self.writeColorSets
),
"writeCreases": attribute_values.get(
"writeCreases", self.writeCreases
),
"writeFaceSets": attribute_values.get(
"writeFaceSets", self.writeFaceSets
),
"writeUVSets": attribute_values.get(
"writeUVSets", self.writeUVSets
),
"writeVisibility": attribute_values.get(
"writeVisibility", self.writeVisibility
),
"autoSubd": attribute_values.get(
"autoSubd", self.autoSubd
),
"uvsOnly": attribute_values.get(
"uvsOnly", self.uvsOnly
),
"writeNormals": attribute_values.get(
"writeNormals", self.writeNormals
),
"melPerFrameCallback": attribute_values.get(
"melPerFrameCallback", self.melPerFrameCallback
),
"melPostJobCallback": attribute_values.get(
"melPostJobCallback", self.melPostJobCallback
),
"pythonPerFrameCallback": attribute_values.get(
"pythonPerFrameCallback", self.pythonPostJobCallback
),
"pythonPostJobCallback": attribute_values.get(
"pythonPostJobCallback", self.pythonPostJobCallback
)
}
if instance.data.get("visibleOnly", False):
# If we only want to include nodes that are visible in the frame
@ -87,20 +202,19 @@ class ExtractAlembic(publish.Extractor):
# flag does not filter out those that are only hidden on some
# frames as it counts "animated" or "connected" visibilities as
# if it's always visible.
nodes = list(iter_visible_nodes_in_range(nodes,
start=start,
end=end))
nodes = list(
iter_visible_nodes_in_range(nodes, start=start, end=end)
)
suspend = not instance.data.get("refresh", False)
with suspended_refresh(suspend=suspend):
with maintained_selection():
cmds.select(nodes, noExpand=True)
extract_alembic(
file=path,
startFrame=start,
endFrame=end,
**options
self.log.debug(
"Running `extract_alembic` with the keyword arguments: "
"{}".format(kwargs)
)
extract_alembic(**kwargs)
if "representations" not in instance.data:
instance.data["representations"] = []
@ -124,21 +238,17 @@ class ExtractAlembic(publish.Extractor):
return
path = path.replace(".abc", "_proxy.abc")
kwargs["file"] = path
if not instance.data.get("includeParentHierarchy", True):
# Set the root nodes if we don't want to include parents
# The roots are to be considered the ones that are the actual
# direct members of the set
options["root"] = instance.data["proxyRoots"]
kwargs["root"] = instance.data["proxyRoots"]
with suspended_refresh(suspend=suspend):
with maintained_selection():
cmds.select(instance.data["proxy"])
extract_alembic(
file=path,
startFrame=start,
endFrame=end,
**options
)
extract_alembic(**kwargs)
representation = {
"name": "proxy",
@ -152,24 +262,274 @@ class ExtractAlembic(publish.Extractor):
def get_members_and_roots(self, instance):
return instance[:], instance.data.get("setMembers")
@classmethod
def get_attribute_defs(cls):
if not cls.overrides:
return []
override_defs = OrderedDict({
"autoSubd": BoolDef(
"autoSubd",
label="Auto Subd",
default=cls.autoSubd,
tooltip=(
"If this flag is present and the mesh has crease edges, "
"crease vertices or holes, the mesh (OPolyMesh) would now "
"be written out as an OSubD and crease info will be stored"
" in the Alembic file. Otherwise, creases info won't be "
"preserved in Alembic file unless a custom Boolean "
"attribute SubDivisionMesh has been added to mesh node and"
" its value is true."
)
),
"eulerFilter": BoolDef(
"eulerFilter",
label="Euler Filter",
default=cls.eulerFilter,
tooltip="Apply Euler filter while sampling rotations."
),
"renderableOnly": BoolDef(
"renderableOnly",
label="Renderable Only",
default=cls.renderableOnly,
tooltip="Only export renderable visible shapes."
),
"stripNamespaces": BoolDef(
"stripNamespaces",
label="Strip Namespaces",
default=cls.stripNamespaces,
tooltip=(
"Namespaces will be stripped off of the node before being "
"written to Alembic."
)
),
"uvsOnly": BoolDef(
"uvsOnly",
label="UVs Only",
default=cls.uvsOnly,
tooltip=(
"If this flag is present, only uv data for PolyMesh and "
"SubD shapes will be written to the Alembic file."
)
),
"uvWrite": BoolDef(
"uvWrite",
label="UV Write",
default=cls.uvWrite,
tooltip=(
"Uv data for PolyMesh and SubD shapes will be written to "
"the Alembic file."
)
),
"verbose": BoolDef(
"verbose",
label="Verbose",
default=cls.verbose,
tooltip="Prints the current frame that is being evaluated."
),
"visibleOnly": BoolDef(
"visibleOnly",
label="Visible Only",
default=cls.visibleOnly,
tooltip="Only export dag objects visible during frame range."
),
"wholeFrameGeo": BoolDef(
"wholeFrameGeo",
label="Whole Frame Geo",
default=cls.wholeFrameGeo,
tooltip=(
"Data for geometry will only be written out on whole "
"frames."
)
),
"worldSpace": BoolDef(
"worldSpace",
label="World Space",
default=cls.worldSpace,
tooltip="Any root nodes will be stored in world space."
),
"writeColorSets": BoolDef(
"writeColorSets",
label="Write Color Sets",
default=cls.writeColorSets,
tooltip="Write vertex colors with the geometry."
),
"writeFaceSets": BoolDef(
"writeFaceSets",
label="Write Face Sets",
default=cls.writeFaceSets,
tooltip="Write face sets with the geometry."
),
"writeNormals": BoolDef(
"writeNormals",
label="Write Normals",
default=cls.writeNormals,
tooltip="Write normals with the deforming geometry."
),
"writeUVSets": BoolDef(
"writeUVSets",
label="Write UV Sets",
default=cls.writeUVSets,
tooltip=(
"Write all uv sets on MFnMeshes as vector 2 indexed "
"geometry parameters with face varying scope."
)
),
"writeVisibility": BoolDef(
"writeVisibility",
label="Write Visibility",
default=cls.writeVisibility,
tooltip=(
"Visibility state will be stored in the Alembic file. "
"Otherwise everything written out is treated as visible."
)
),
"preRoll": BoolDef(
"preRoll",
label="Pre Roll",
default=cls.preRoll,
tooltip="This frame range will not be sampled."
),
"preRollStartFrame": NumberDef(
"preRollStartFrame",
label="Pre Roll Start Frame",
tooltip=(
"The frame to start scene evaluation at. This is used"
" to set the starting frame for time dependent "
"translations and can be used to evaluate run-up that"
" isn't actually translated."
),
default=cls.preRollStartFrame
),
"dataFormat": EnumDef(
"dataFormat",
label="Data Format",
items=["ogawa", "HDF"],
default=cls.dataFormat,
tooltip="The data format to use to write the file."
),
"attr": TextDef(
"attr",
label="Custom Attributes",
placeholder="attr1; attr2; ...",
default=cls.attr,
tooltip=(
"Attributes matching by name will be included in the "
"Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"attrPrefix": TextDef(
"attrPrefix",
label="Custom Attributes Prefix",
placeholder="prefix1; prefix2; ...",
default=cls.attrPrefix,
tooltip=(
"Attributes starting with these prefixes will be included "
"in the Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"userAttr": TextDef(
"userAttr",
label="User Attr",
placeholder="attr1; attr2; ...",
default=cls.userAttr,
tooltip=(
"Attributes matching by name will be included in the "
"Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"userAttrPrefix": TextDef(
"userAttrPrefix",
label="User Attr Prefix",
placeholder="prefix1; prefix2; ...",
default=cls.userAttrPrefix,
tooltip=(
"Attributes starting with these prefixes will be included "
"in the Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"melPerFrameCallback": TextDef(
"melPerFrameCallback",
label="Mel Per Frame Callback",
default=cls.melPerFrameCallback,
tooltip=(
"When each frame (and the static frame) is evaluated the "
"string specified is evaluated as a Mel command."
)
),
"melPostJobCallback": TextDef(
"melPostJobCallback",
label="Mel Post Job Callback",
default=cls.melPostJobCallback,
tooltip=(
"When the translation has finished the string specified "
"is evaluated as a Mel command."
)
),
"pythonPerFrameCallback": TextDef(
"pythonPerFrameCallback",
label="Python Per Frame Callback",
default=cls.pythonPerFrameCallback,
tooltip=(
"When each frame (and the static frame) is evaluated the "
"string specified is evaluated as a python command."
)
),
"pythonPostJobCallback": TextDef(
"pythonPostJobCallback",
label="Python Post Frame Callback",
default=cls.pythonPostJobCallback,
tooltip=(
"When the translation has finished the string specified "
"is evaluated as a python command."
)
)
})
defs = super(ExtractAlembic, cls).get_attribute_defs()
defs.extend([
UISeparatorDef("sep_alembic_options"),
UILabelDef("Alembic Options"),
])
# The Arguments that can be modified by the Publisher
overrides = set(cls.overrides)
for key, value in override_defs.items():
if key not in overrides:
continue
defs.append(value)
defs.append(
UISeparatorDef("sep_alembic_options_end")
)
return defs
class ExtractAnimation(ExtractAlembic):
label = "Extract Animation"
label = "Extract Animation (Alembic)"
families = ["animation"]
def get_members_and_roots(self, instance):
# Collect the out set nodes
out_sets = [node for node in instance if node.endswith("out_SET")]
if len(out_sets) != 1:
raise RuntimeError("Couldn't find exactly one out_SET: "
"{0}".format(out_sets))
raise KnownPublishError(
"Couldn't find exactly one out_SET: {0}".format(out_sets)
)
out_set = out_sets[0]
roots = cmds.sets(out_set, query=True)
roots = cmds.sets(out_set, query=True) or []
# Include all descendants
nodes = roots + cmds.listRelatives(roots,
allDescendents=True,
fullPath=True) or []
nodes = roots
nodes += cmds.listRelatives(
roots, allDescendents=True, fullPath=True
) or []
return nodes, roots

View file

@ -3,8 +3,8 @@ import os
from maya import cmds
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection,
iter_visible_nodes_in_range

View file

@ -5,8 +5,8 @@ import os
from maya import cmds # noqa
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection
)

View file

@ -5,7 +5,7 @@ import copy
from maya import cmds
import pyblish.api
from ayon_core.hosts.maya.api.lib import extract_alembic
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.pipeline import publish

View file

@ -0,0 +1,98 @@
import pyblish.api
from ayon_core.pipeline import OptionalPyblishPluginMixin
from ayon_core.pipeline.publish import RepairAction, PublishValidationError
class ValidateAlembicDefaultsPointcache(
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
):
"""Validate the attributes on the instance are defaults.
The defaults are defined in the project settings.
"""
order = pyblish.api.ValidatorOrder
families = ["pointcache"]
hosts = ["maya"]
label = "Validate Alembic Options Defaults"
actions = [RepairAction]
optional = True
plugin_name = "ExtractAlembic"
@classmethod
def _get_settings(cls, context):
maya_settings = context.data["project_settings"]["maya"]
settings = maya_settings["publish"]["ExtractAlembic"]
return settings
@classmethod
def _get_publish_attributes(cls, instance):
attributes = instance.data["publish_attributes"][
cls.plugin_name(
instance.data["publish_attributes"]
)
]
return attributes
def process(self, instance):
if not self.is_active(instance.data):
return
settings = self._get_settings(instance.context)
attributes = self._get_publish_attributes(instance)
msg = (
"Alembic Extract setting \"{}\" is not the default value:"
"\nCurrent: {}"
"\nDefault Value: {}\n"
)
errors = []
for key, value in attributes.items():
default_value = settings[key]
# Lists are best to compared sorted since we cant rely on the order
# of the items.
if isinstance(value, list):
value = sorted(value)
default_value = sorted(default_value)
if value != default_value:
errors.append(msg.format(key, value, default_value))
if errors:
raise PublishValidationError("\n".join(errors))
@classmethod
def repair(cls, instance):
# Find create instance twin.
create_context = instance.context.data["create_context"]
create_instance = create_context.get_instance_by_id(
instance.data["instance_id"]
)
# Set the settings values on the create context then save to workfile.
publish_attributes = instance.data["publish_attributes"]
plugin_name = cls.plugin_name(publish_attributes)
attributes = cls._get_publish_attributes(instance)
settings = cls._get_settings(instance.context)
create_publish_attributes = create_instance.data["publish_attributes"]
for key in attributes:
create_publish_attributes[plugin_name][key] = settings[key]
create_context.save_changes()
class ValidateAlembicDefaultsAnimation(
ValidateAlembicDefaultsPointcache
):
"""Validate the attributes on the instance are defaults.
The defaults are defined in the project settings.
"""
label = "Validate Alembic Options Defaults"
families = ["animation"]
plugin_name = "ExtractAnimation"

View file

@ -187,6 +187,11 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
# Hide placeholder and add them to placeholder set
node = placeholder.scene_identifier
# If we just populate the placeholders from current scene, the
# placeholder set will not be created so account for that.
if not cmds.objExists(PLACEHOLDER_SET):
cmds.sets(name=PLACEHOLDER_SET, empty=True)
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr(node + ".hiddenInOutliner", True)

View file

@ -27,6 +27,10 @@ from .local_settings import (
get_openpype_username,
)
from .ayon_connection import initialize_ayon_connection
from .cache import (
CacheItem,
NestedCacheItem,
)
from .events import (
emit_event,
register_event_callback
@ -157,6 +161,9 @@ __all__ = [
"initialize_ayon_connection",
"CacheItem",
"NestedCacheItem",
"emit_event",
"register_event_callback",

View file

@ -0,0 +1,250 @@
import time
import collections
InitInfo = collections.namedtuple(
"InitInfo",
["default_factory", "lifetime"]
)
def _default_factory_func():
return None
class CacheItem:
"""Simple cache item with lifetime and default factory for default value.
Default factory should return default value that is used on init
and on reset.
Args:
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
Default lifetime is 120 seconds.
"""
def __init__(self, default_factory=None, lifetime=None):
if lifetime is None:
lifetime = 120
self._lifetime = lifetime
self._last_update = None
if default_factory is None:
default_factory = _default_factory_func
self._default_factory = default_factory
self._data = default_factory()
@property
def is_valid(self):
"""Is cache valid to use.
Return:
bool: True if cache is valid, False otherwise.
"""
if self._last_update is None:
return False
return (time.time() - self._last_update) < self._lifetime
def set_lifetime(self, lifetime):
"""Change lifetime of cache item.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._lifetime = lifetime
def set_invalid(self):
"""Set cache as invalid."""
self._last_update = None
def reset(self):
"""Set cache as invalid and reset data."""
self._last_update = None
self._data = self._default_factory()
def get_data(self):
"""Receive cached data.
Returns:
Any: Any data that are cached.
"""
return self._data
def update_data(self, data):
"""Update cache data.
Args:
data (Any): Any data that are cached.
"""
self._data = data
self._last_update = time.time()
class NestedCacheItem:
"""Helper for cached items stored in nested structure.
Example:
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
>>> cache["a"]["b"].is_valid
False
>>> cache["a"]["b"].get_data()
0
>>> cache["a"]["b"] = 1
>>> cache["a"]["b"].is_valid
True
>>> cache["a"]["b"].get_data()
1
>>> cache.reset()
>>> cache["a"]["b"].is_valid
False
Args:
levels (int): Number of nested levels where read cache is stored.
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
Default value is based on default value of 'CacheItem'.
_init_info (Optional[InitInfo]): Private argument. Init info for
nested cache where created from parent item.
"""
def __init__(
self, levels=1, default_factory=None, lifetime=None, _init_info=None
):
if levels < 1:
raise ValueError("Nested levels must be greater than 0")
self._data_by_key = {}
if _init_info is None:
_init_info = InitInfo(default_factory, lifetime)
self._init_info = _init_info
self._levels = levels
def __getitem__(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
cache = self._data_by_key.get(key)
if cache is None:
if self._levels > 1:
cache = NestedCacheItem(
levels=self._levels - 1,
_init_info=self._init_info
)
else:
cache = CacheItem(
self._init_info.default_factory,
self._init_info.lifetime
)
self._data_by_key[key] = cache
return cache
def __setitem__(self, key, value):
"""Update cached data.
Args:
key (str): Key of the cache item.
value (Any): Any data that are cached.
"""
if self._levels > 1:
raise AttributeError((
"{} does not support '__setitem__'. Lower nested level by {}"
).format(self.__class__.__name__, self._levels - 1))
cache = self[key]
cache.update_data(value)
def get(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
return self[key]
def cached_count(self):
"""Amount of cached items.
Returns:
int: Amount of cached items.
"""
return len(self._data_by_key)
def clear_key(self, key):
"""Clear cached item by key.
Args:
key (str): Key of the cache item.
"""
self._data_by_key.pop(key, None)
def clear_invalid(self):
"""Clear all invalid cache items.
Note:
To clear all cache items use 'reset'.
"""
changed = {}
children_are_nested = self._levels > 1
for key, cache in tuple(self._data_by_key.items()):
if children_are_nested:
output = cache.clear_invalid()
if output:
changed[key] = output
if not cache.cached_count():
self._data_by_key.pop(key)
elif not cache.is_valid:
changed[key] = cache.get_data()
self._data_by_key.pop(key)
return changed
def reset(self):
"""Reset cache.
Note:
To clear only invalid cache items use 'clear_invalid'.
"""
self._data_by_key = {}
def set_lifetime(self, lifetime):
"""Change lifetime of all children cache items.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._init_info.lifetime = lifetime
for cache in self._data_by_key.values():
cache.set_lifetime(lifetime)
@property
def is_valid(self):
"""Raise reasonable error when called on wrong level.
Raises:
AttributeError: If called on nested cache item.
"""
raise AttributeError((
"{} does not support 'is_valid'. Lower nested level by '{}'"
).format(self.__class__.__name__, self._levels))

View file

@ -3,11 +3,16 @@ import re
import copy
import platform
import collections
import time
import ayon_api
from ayon_core.lib import Logger, get_local_site_id, StringTemplate
from ayon_core.lib import (
Logger,
get_local_site_id,
StringTemplate,
CacheItem,
NestedCacheItem,
)
from ayon_core.addon import AddonsManager
from .exceptions import RootCombinationError, ProjectNotSet
@ -397,62 +402,11 @@ class BaseAnatomy(object):
)
class CacheItem:
"""Helper to cache data.
Helper does not handle refresh of data and does not mark data as outdated.
Who uses the object should check of outdated state on his own will.
"""
default_lifetime = 10
def __init__(self, lifetime=None):
self._data = None
self._cached = None
self._lifetime = lifetime or self.default_lifetime
@property
def data(self):
"""Cached data/object.
Returns:
Any: Whatever was cached.
"""
return self._data
@property
def is_outdated(self):
"""Item has outdated cache.
Lifetime of cache item expired or was not yet set.
Returns:
bool: Item is outdated.
"""
if self._cached is None:
return True
return (time.time() - self._cached) > self._lifetime
def update_data(self, data):
"""Update cache of data.
Args:
data (Any): Data to cache.
"""
self._data = data
self._cached = time.time()
class Anatomy(BaseAnatomy):
_sitesync_addon_cache = CacheItem()
_project_cache = collections.defaultdict(CacheItem)
_default_site_id_cache = collections.defaultdict(CacheItem)
_root_overrides_cache = collections.defaultdict(
lambda: collections.defaultdict(CacheItem)
)
_project_cache = NestedCacheItem(lifetime=10)
_sitesync_addon_cache = CacheItem(lifetime=60)
_default_site_id_cache = NestedCacheItem(lifetime=60)
_root_overrides_cache = NestedCacheItem(2, lifetime=60)
def __init__(
self, project_name=None, site_name=None, project_entity=None
@ -477,18 +431,18 @@ class Anatomy(BaseAnatomy):
@classmethod
def get_project_entity_from_cache(cls, project_name):
project_cache = cls._project_cache[project_name]
if project_cache.is_outdated:
if not project_cache.is_valid:
project_cache.update_data(ayon_api.get_project(project_name))
return copy.deepcopy(project_cache.data)
return copy.deepcopy(project_cache.get_data())
@classmethod
def get_sitesync_addon(cls):
if cls._sitesync_addon_cache.is_outdated:
if not cls._sitesync_addon_cache.is_valid:
manager = AddonsManager()
cls._sitesync_addon_cache.update_data(
manager.get_enabled_addon("sitesync")
)
return cls._sitesync_addon_cache.data
return cls._sitesync_addon_cache.get_data()
@classmethod
def _get_studio_roots_overrides(cls, project_name):
@ -533,14 +487,14 @@ class Anatomy(BaseAnatomy):
elif not site_name:
# Use sync server to receive active site name
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
if not project_cache.is_valid:
project_cache.update_data(
sitesync_addon.get_active_site_type(project_name)
)
site_name = project_cache.data
site_name = project_cache.get_data()
site_cache = cls._root_overrides_cache[project_name][site_name]
if site_cache.is_outdated:
if not site_cache.is_valid:
if site_name == "studio":
# Handle studio root overrides without sync server
# - studio root overrides can be done even without sync server
@ -553,4 +507,4 @@ class Anatomy(BaseAnatomy):
project_name, site_name
)
site_cache.update_data(roots_overrides)
return site_cache.data
return site_cache.get_data()

View file

@ -225,6 +225,7 @@ def create_skeleton_instance(
instance_skeleton_data = {
"productType": product_type,
"productName": data["productName"],
"task": data["task"],
"families": families,
"folderPath": data["folderPath"],
"frameStart": time_data.start,

View file

@ -1,239 +1,31 @@
import time
import collections
import warnings
InitInfo = collections.namedtuple(
"InitInfo",
["default_factory", "lifetime"]
from ayon_core.lib import CacheItem as _CacheItem
from ayon_core.lib import NestedCacheItem as _NestedCacheItem
# Cache classes were moved to `ayon_core.lib.cache`
class CacheItem(_CacheItem):
def __init__(self, *args, **kwargs):
warnings.warn(
"Used 'CacheItem' from deprecated location "
"'ayon_core.tools.common_models', use 'ayon_core.lib' instead.",
DeprecationWarning,
)
super().__init__(*args, **kwargs)
class NestedCacheItem(_NestedCacheItem):
def __init__(self, *args, **kwargs):
warnings.warn(
"Used 'NestedCacheItem' from deprecated location "
"'ayon_core.tools.common_models', use 'ayon_core.lib' instead.",
DeprecationWarning,
)
super().__init__(*args, **kwargs)
__all__ = (
"CacheItem",
"NestedCacheItem",
)
def _default_factory_func():
return None
class CacheItem:
"""Simple cache item with lifetime and default value.
Args:
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
"""
def __init__(self, default_factory=None, lifetime=None):
if lifetime is None:
lifetime = 120
self._lifetime = lifetime
self._last_update = None
if default_factory is None:
default_factory = _default_factory_func
self._default_factory = default_factory
self._data = default_factory()
@property
def is_valid(self):
"""Is cache valid to use.
Return:
bool: True if cache is valid, False otherwise.
"""
if self._last_update is None:
return False
return (time.time() - self._last_update) < self._lifetime
def set_lifetime(self, lifetime):
"""Change lifetime of cache item.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._lifetime = lifetime
def set_invalid(self):
"""Set cache as invalid."""
self._last_update = None
def reset(self):
"""Set cache as invalid and reset data."""
self._last_update = None
self._data = self._default_factory()
def get_data(self):
"""Receive cached data.
Returns:
Any: Any data that are cached.
"""
return self._data
def update_data(self, data):
self._data = data
self._last_update = time.time()
class NestedCacheItem:
"""Helper for cached items stored in nested structure.
Example:
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
>>> cache["a"]["b"].is_valid
False
>>> cache["a"]["b"].get_data()
0
>>> cache["a"]["b"] = 1
>>> cache["a"]["b"].is_valid
True
>>> cache["a"]["b"].get_data()
1
>>> cache.reset()
>>> cache["a"]["b"].is_valid
False
Args:
levels (int): Number of nested levels where read cache is stored.
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
_init_info (Optional[InitInfo]): Private argument. Init info for
nested cache where created from parent item.
"""
def __init__(
self, levels=1, default_factory=None, lifetime=None, _init_info=None
):
if levels < 1:
raise ValueError("Nested levels must be greater than 0")
self._data_by_key = {}
if _init_info is None:
_init_info = InitInfo(default_factory, lifetime)
self._init_info = _init_info
self._levels = levels
def __getitem__(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
cache = self._data_by_key.get(key)
if cache is None:
if self._levels > 1:
cache = NestedCacheItem(
levels=self._levels - 1,
_init_info=self._init_info
)
else:
cache = CacheItem(
self._init_info.default_factory,
self._init_info.lifetime
)
self._data_by_key[key] = cache
return cache
def __setitem__(self, key, value):
"""Update cached data.
Args:
key (str): Key of the cache item.
value (Any): Any data that are cached.
"""
if self._levels > 1:
raise AttributeError((
"{} does not support '__setitem__'. Lower nested level by {}"
).format(self.__class__.__name__, self._levels - 1))
cache = self[key]
cache.update_data(value)
def get(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
return self[key]
def cached_count(self):
"""Amount of cached items.
Returns:
int: Amount of cached items.
"""
return len(self._data_by_key)
def clear_key(self, key):
"""Clear cached item by key.
Args:
key (str): Key of the cache item.
"""
self._data_by_key.pop(key, None)
def clear_invalid(self):
"""Clear all invalid cache items.
Note:
To clear all cache items use 'reset'.
"""
changed = {}
children_are_nested = self._levels > 1
for key, cache in tuple(self._data_by_key.items()):
if children_are_nested:
output = cache.clear_invalid()
if output:
changed[key] = output
if not cache.cached_count():
self._data_by_key.pop(key)
elif not cache.is_valid:
changed[key] = cache.get_data()
self._data_by_key.pop(key)
return changed
def reset(self):
"""Reset cache.
Note:
To clear only invalid cache items use 'clear_invalid'.
"""
self._data_by_key = {}
def set_lifetime(self, lifetime):
"""Change lifetime of all children cache items.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._init_info.lifetime = lifetime
for cache in self._data_by_key.values():
cache.set_lifetime(lifetime)
@property
def is_valid(self):
"""Raise reasonable error when called on wront level.
Raises:
AttributeError: If called on nested cache item.
"""
raise AttributeError((
"{} does not support 'is_valid'. Lower nested level by '{}'"
).format(self.__class__.__name__, self._levels))

View file

@ -6,8 +6,7 @@ import ayon_api
import six
from ayon_core.style import get_default_entity_icon_color
from .cache import NestedCacheItem
from ayon_core.lib import NestedCacheItem
HIERARCHY_MODEL_SENDER = "hierarchy.model"

View file

@ -5,8 +5,7 @@ import ayon_api
import six
from ayon_core.style import get_default_entity_icon_color
from .cache import CacheItem
from ayon_core.lib import CacheItem
PROJECTS_MODEL_SENDER = "projects.model"

View file

@ -5,7 +5,7 @@ import collections
import ayon_api
import appdirs
from .cache import NestedCacheItem
from ayon_core.lib import NestedCacheItem
FileInfo = collections.namedtuple(
"FileInfo",

View file

@ -6,6 +6,7 @@ import uuid
import ayon_api
from ayon_core.lib import NestedCacheItem
from ayon_core.pipeline.load import (
discover_loader_plugins,
ProductLoaderPlugin,
@ -17,7 +18,6 @@ from ayon_core.pipeline.load import (
LoadError,
IncompatibleLoaderError,
)
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import ActionItem
ACTIONS_MODEL_SENDER = "actions.model"

View file

@ -5,8 +5,8 @@ import arrow
import ayon_api
from ayon_api.operations import OperationsSession
from ayon_core.lib import NestedCacheItem
from ayon_core.style import get_default_entity_icon_color
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import (
ProductTypeItem,
ProductItem,

View file

@ -2,9 +2,8 @@ import collections
from ayon_api import get_representations, get_versions_links
from ayon_core.lib import Logger
from ayon_core.lib import Logger, NestedCacheItem
from ayon_core.addon import AddonsManager
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import ActionItem
DOWNLOAD_IDENTIFIER = "sitesync.download"

View file

@ -110,6 +110,26 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
]
}
def launch_application(
self, app_name, project_name, folder_path, task_name
):
"""Launch application.
Args:
app_name (str): Full application name e.g. 'maya/2024'.
project_name (str): Project name.
folder_path (str): Folder path.
task_name (str): Task name.
"""
app_manager = self.get_applications_manager()
return app_manager.launch(
app_name,
project_name=project_name,
folder_path=folder_path,
task_name=task_name,
)
# --- CLI ---
def cli(self, addon_click_group):
main_group = click_wrap.group(
@ -134,6 +154,17 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
default=None
)
)
(
main_group.command(
self._cli_launch_applications,
name="launch",
help="Launch application"
)
.option("--app", required=True, help="Application name")
.option("--project", required=True, help="Project name")
.option("--folder", required=True, help="Folder path")
.option("--task", required=True, help="Task name")
)
# Convert main command to click object and add it to parent group
addon_click_group.add_command(
main_group.to_click_obj()
@ -171,3 +202,15 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
with open(output_json_path, "w") as file_stream:
json.dump(env, file_stream, indent=4)
def _cli_launch_applications(self, project, folder, task, app):
"""Launch application.
Args:
project (str): Project name.
folder (str): Folder path.
task (str): Task name.
app (str): Full application name e.g. 'maya/2024'.
"""
self.launch_application(app, project, folder, task)

View file

@ -1,6 +1,6 @@
name = "applications"
title = "Applications"
version = "0.2.0"
version = "0.2.1"
ayon_server_version = ">=1.0.7"
ayon_launcher_version = ">=1.0.2"

View file

@ -1,3 +1,3 @@
name = "maya"
title = "Maya"
version = "0.1.16"
version = "0.1.17"

View file

@ -35,6 +35,51 @@ def angular_unit_enum():
]
def extract_alembic_data_format_enum():
return [
{"label": "ogawa", "value": "ogawa"},
{"label": "HDF", "value": "HDF"}
]
def extract_alembic_overrides_enum():
return [
{"label": "Custom Attributes", "value": "attr"},
{"label": "Custom Attributes Prefix", "value": "attrPrefix"},
{"label": "Auto Subd", "value": "autoSubd"},
{"label": "Data Format", "value": "dataFormat"},
{"label": "Euler Filter", "value": "eulerFilter"},
{"label": "Mel Per Frame Callback", "value": "melPerFrameCallback"},
{"label": "Mel Post Job Callback", "value": "melPostJobCallback"},
{"label": "Pre Roll", "value": "preRoll"},
{"label": "Pre Roll Start Frame", "value": "preRollStartFrame"},
{
"label": "Python Per Frame Callback",
"value": "pythonPerFrameCallback"
},
{
"label": "Python Post Job Callback",
"value": "pythonPostJobCallback"
},
{"label": "Renderable Only", "value": "renderableOnly"},
{"label": "Strip Namespaces", "value": "stripNamespaces"},
{"label": "User Attr", "value": "userAttr"},
{"label": "User Attr Prefix", "value": "userAttrPrefix"},
{"label": "UV Write", "value": "uvWrite"},
{"label": "UVs Only", "value": "uvsOnly"},
{"label": "Verbose", "value": "verbose"},
{"label": "Visible Only", "value": "visibleOnly"},
{"label": "Whole Frame Geo", "value": "wholeFrameGeo"},
{"label": "World Space", "value": "worldSpace"},
{"label": "Write Color Sets", "value": "writeColorSets"},
{"label": "Write Creases", "value": "writeCreases"},
{"label": "Write Face Sets", "value": "writeFaceSets"},
{"label": "Write Normals", "value": "writeNormals"},
{"label": "Write UV Sets", "value": "writeUVSets"},
{"label": "Write Visibility", "value": "writeVisibility"}
]
class BasicValidateModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
@ -301,6 +346,115 @@ class ExtractAlembicModel(BaseSettingsModel):
families: list[str] = SettingsField(
default_factory=list,
title="Families")
autoSubd: bool = SettingsField(
title="Auto Subd",
description=(
"If this flag is present and the mesh has crease edges, crease "
"vertices or holes, the mesh (OPolyMesh) would now be written out "
"as an OSubD and crease info will be stored in the Alembic file. "
"Otherwise, creases info won't be preserved in Alembic file unless"
" a custom Boolean attribute SubDivisionMesh has been added to "
"mesh node and its value is true."
)
)
eulerFilter: bool = SettingsField(
title="Euler Filter",
description="Apply Euler filter while sampling rotations."
)
renderableOnly: bool = SettingsField(
title="Renderable Only",
description="Only export renderable visible shapes."
)
stripNamespaces: bool = SettingsField(
title="Strip Namespaces",
description=(
"Namespaces will be stripped off of the node before being written "
"to Alembic."
)
)
uvsOnly: bool = SettingsField(
title="UVs Only",
description=(
"If this flag is present, only uv data for PolyMesh and SubD "
"shapes will be written to the Alembic file."
)
)
uvWrite: bool = SettingsField(
title="UV Write",
description=(
"Uv data for PolyMesh and SubD shapes will be written to the "
"Alembic file."
)
)
verbose: bool = SettingsField(
title="Verbose",
description="Prints the current frame that is being evaluated."
)
visibleOnly: bool = SettingsField(
title="Visible Only",
description="Only export dag objects visible during frame range."
)
wholeFrameGeo: bool = SettingsField(
title="Whole Frame Geo",
description=(
"Data for geometry will only be written out on whole frames."
)
)
worldSpace: bool = SettingsField(
title="World Space",
description="Any root nodes will be stored in world space."
)
writeColorSets: bool = SettingsField(
title="Write Color Sets",
description="Write vertex colors with the geometry."
)
writeFaceSets: bool = SettingsField(
title="Write Face Sets",
description="Write face sets with the geometry."
)
writeNormals: bool = SettingsField(
title="Write Normals",
description="Write normals with the deforming geometry."
)
writeUVSets: bool = SettingsField(
title="Write UV Sets",
description=(
"Write all uv sets on MFnMeshes as vector 2 indexed geometry "
"parameters with face varying scope."
)
)
writeVisibility: bool = SettingsField(
title="Write Visibility",
description=(
"Visibility state will be stored in the Alembic file. Otherwise "
"everything written out is treated as visible."
)
)
preRoll: bool = SettingsField(
title="Pre Roll",
description=(
"When enabled, the pre roll start frame is used to pre roll the "
"When enabled, the pre roll start frame is used to being the "
"evaluation of the mesh. From the pre roll start frame to the "
"alembic start frame, will not be written to disk. This can be "
"used for simulation run up."
)
)
preRollStartFrame: int = SettingsField(
title="Pre Roll Start Frame",
description=(
"The frame to start scene evaluation at. This is used to set the "
"starting frame for time dependent translations and can be used to"
" evaluate run-up that isn't actually translated.\n"
"NOTE: Pre Roll needs to be enabled for this start frame "
"to be considered."
)
)
dataFormat: str = SettingsField(
enum_resolver=extract_alembic_data_format_enum,
title="Data Format",
description="The data format to use to write the file."
)
bake_attributes: list[str] = SettingsField(
default_factory=list, title="Bake Attributes",
description="List of attributes that will be included in the alembic "
@ -311,6 +465,73 @@ class ExtractAlembicModel(BaseSettingsModel):
description="List of attribute prefixes for attributes that will be "
"included in the alembic export.",
)
attr: str = SettingsField(
title="Custom Attributes",
placeholder="attr1;attr2",
description=(
"Attributes matching by name will be included in the Alembic "
"export. Attributes should be separated by semi-colon `;`"
)
)
attrPrefix: str = SettingsField(
title="Custom Attributes Prefix",
placeholder="prefix1;prefix2",
description=(
"Attributes starting with these prefixes will be included in the "
"Alembic export. Attributes should be separated by semi-colon `;`"
)
)
userAttr: str = SettingsField(
title="User Attr",
placeholder="attr1;attr2",
description=(
"Attributes matching by name will be included in the Alembic "
"export. Attributes should be separated by semi-colon `;`"
)
)
userAttrPrefix: str = SettingsField(
title="User Attr Prefix",
placeholder="prefix1;prefix2",
description=(
"Attributes starting with these prefixes will be included in the "
"Alembic export. Attributes should be separated by semi-colon `;`"
)
)
melPerFrameCallback: str = SettingsField(
title="Mel Per Frame Callback",
description=(
"When each frame (and the static frame) is evaluated the string "
"specified is evaluated as a Mel command."
)
)
melPostJobCallback: str = SettingsField(
title="Mel Post Job Callback",
description=(
"When the translation has finished the string specified is "
"evaluated as a Mel command."
)
)
pythonPerFrameCallback: str = SettingsField(
title="Python Per Frame Callback",
description=(
"When each frame (and the static frame) is evaluated the string "
"specified is evaluated as a python command."
)
)
pythonPostJobCallback: str = SettingsField(
title="Python Post Job Callback",
description=(
"When the translation has finished the string specified is "
"evaluated as a python command."
)
)
overrides: list[str] = SettingsField(
enum_resolver=extract_alembic_overrides_enum,
title="Exposed Overrides",
description=(
"Expose the attribute in this list to the user when publishing."
)
)
class ExtractObjModel(BaseSettingsModel):
@ -670,15 +891,19 @@ class PublishersModel(BaseSettingsModel):
default_factory=BasicValidateModel,
title="Validate Alembic Visible Node",
)
ValidateAlembicDefaultsPointcache: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Alembic Defaults Pointcache"
)
ValidateAlembicDefaultsAnimation: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Alembic Defaults Animation"
)
ExtractProxyAlembic: ExtractProxyAlembicModel = SettingsField(
default_factory=ExtractProxyAlembicModel,
title="Extract Proxy Alembic",
section="Model Extractors",
)
ExtractAlembic: ExtractAlembicModel = SettingsField(
default_factory=ExtractAlembicModel,
title="Extract Alembic",
)
ExtractObj: ExtractObjModel = SettingsField(
default_factory=ExtractObjModel,
title="Extract OBJ"
@ -813,6 +1038,10 @@ class PublishersModel(BaseSettingsModel):
default_factory=ExtractModelModel,
title="Extract Model (Maya Scene)"
)
ExtractAlembic: ExtractAlembicModel = SettingsField(
default_factory=ExtractAlembicModel,
title="Extract Alembic"
)
DEFAULT_SUFFIX_NAMING = {
@ -1204,16 +1433,6 @@ DEFAULT_PUBLISH_SETTINGS = {
"proxyAbc"
]
},
"ExtractAlembic": {
"enabled": True,
"families": [
"pointcache",
"model",
"vrayproxy.alembic"
],
"bake_attributes": [],
"bake_attribute_prefixes": []
},
"ExtractObj": {
"enabled": False,
"optional": True,
@ -1334,6 +1553,16 @@ DEFAULT_PUBLISH_SETTINGS = {
"optional": False,
"validate_shapes": True
},
"ValidateAlembicDefaultsPointcache": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateAlembicDefaultsAnimation": {
"enabled": True,
"optional": True,
"active": True
},
"ExtractPlayblast": DEFAULT_PLAYBLAST_SETTING,
"ExtractMayaSceneRaw": {
"enabled": True,
@ -1375,6 +1604,52 @@ DEFAULT_PUBLISH_SETTINGS = {
"ExtractModel": {
"enabled": True,
"optional": True,
"active": True,
"active": True
},
"ExtractAlembic": {
"enabled": True,
"families": [
"pointcache",
"model",
"vrayproxy.alembic"
],
"attr": "",
"attrPrefix": "",
"autoSubd": False,
"bake_attributes": [],
"bake_attribute_prefixes": [],
"dataFormat": "ogawa",
"eulerFilter": False,
"melPerFrameCallback": "",
"melPostJobCallback": "",
"overrides": [
"attr",
"attrPrefix",
"renderableOnly",
"visibleOnly",
"worldSpace",
"writeColorSets",
"writeFaceSets",
"writeNormals"
],
"preRoll": False,
"preRollStartFrame": 0,
"pythonPerFrameCallback": "",
"pythonPostJobCallback": "",
"renderableOnly": False,
"stripNamespaces": True,
"uvsOnly": False,
"uvWrite": False,
"userAttr": "",
"userAttrPrefix": "",
"verbose": False,
"visibleOnly": False,
"wholeFrameGeo": False,
"worldSpace": True,
"writeColorSets": False,
"writeFaceSets": False,
"writeNormals": True,
"writeUVSets": False,
"writeVisibility": False
}
}