Merge branch 'develop' into enhancement/maya_collect_look

This commit is contained in:
Toke Jepsen 2024-04-10 08:39:50 +01:00 committed by GitHub
commit bd7f6be8f9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
61 changed files with 1201 additions and 525 deletions

View file

@ -4,6 +4,7 @@ import os
import sys
import code
import traceback
from pathlib import Path
import click
import acre
@ -11,7 +12,7 @@ import acre
from ayon_core import AYON_CORE_ROOT
from ayon_core.addon import AddonsManager
from ayon_core.settings import get_general_environments
from ayon_core.lib import initialize_ayon_connection
from ayon_core.lib import initialize_ayon_connection, is_running_from_build
from .cli_commands import Commands
@ -167,16 +168,27 @@ def run(script):
if not script:
print("Error: missing path to script file.")
return
# Remove first argument if it is the same as AYON executable
# - Forward compatibility with future AYON versions.
# - Current AYON launcher keeps the arguments with first argument but
# future versions might remove it.
first_arg = sys.argv[0]
if is_running_from_build():
comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py")
else:
comp_path = os.getenv("AYON_EXECUTABLE")
# Compare paths and remove first argument if it is the same as AYON
if Path(first_arg).resolve() == Path(comp_path).resolve():
sys.argv.pop(0)
args = sys.argv
args.remove("run")
args.remove(script)
sys.argv = args
# Remove 'run' command from sys.argv
sys.argv.remove("run")
args_string = " ".join(args[1:])
print(f"... running: {script} {args_string}")
runpy.run_path(script, run_name="__main__", )
args_string = " ".join(sys.argv[1:])
print(f"... running: {script} {args_string}")
runpy.run_path(script, run_name="__main__")
@main_cli.command()

View file

@ -36,23 +36,23 @@ class HostDirmap(object):
host_name,
project_name,
project_settings=None,
sync_module=None
sitesync_addon=None
):
self.host_name = host_name
self.project_name = project_name
self._project_settings = project_settings
self._sync_module = sync_module
self._sitesync_addon = sitesync_addon
# to limit reinit of Modules
self._sync_module_discovered = sync_module is not None
self._sitesync_addon_discovered = sitesync_addon is not None
self._log = None
@property
def sync_module(self):
if not self._sync_module_discovered:
self._sync_module_discovered = True
def sitesync_addon(self):
if not self._sitesync_addon_discovered:
self._sitesync_addon_discovered = True
manager = AddonsManager()
self._sync_module = manager.get("sync_server")
return self._sync_module
self._sitesync_addon = manager.get("sitesync")
return self._sitesync_addon
@property
def project_settings(self):
@ -158,25 +158,25 @@ class HostDirmap(object):
"""
project_name = self.project_name
sync_module = self.sync_module
sitesync_addon = self.sitesync_addon
mapping = {}
if (
sync_module is None
or not sync_module.enabled
or project_name not in sync_module.get_enabled_projects()
sitesync_addon is None
or not sitesync_addon.enabled
or project_name not in sitesync_addon.get_enabled_projects()
):
return mapping
active_site = sync_module.get_local_normalized_site(
sync_module.get_active_site(project_name))
remote_site = sync_module.get_local_normalized_site(
sync_module.get_remote_site(project_name))
active_site = sitesync_addon.get_local_normalized_site(
sitesync_addon.get_active_site(project_name))
remote_site = sitesync_addon.get_local_normalized_site(
sitesync_addon.get_remote_site(project_name))
self.log.debug(
"active {} - remote {}".format(active_site, remote_site)
)
if active_site == "local" and active_site != remote_site:
sync_settings = sync_module.get_sync_project_setting(
sync_settings = sitesync_addon.get_sync_project_setting(
project_name,
exclude_locals=False,
cached=False)
@ -194,7 +194,7 @@ class HostDirmap(object):
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
remote_provider = sync_module.get_provider_for_site(
remote_provider = sitesync_addon.get_provider_for_site(
project_name, remote_site
)
# dirmap has sense only with regular disk provider, in the workfile

View file

@ -16,6 +16,12 @@ from ayon_core.pipeline import (
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
from ayon_core.pipeline.workfile import get_workdir
from ayon_api import (
get_project,
get_folder_by_path,
get_task_by_name
)
class GenericCreateSaver(Creator):
@ -125,6 +131,8 @@ class GenericCreateSaver(Creator):
product_name = data["productName"]
if (
original_product_name != product_name
or tool.GetData("openpype.task") != data["task"]
or tool.GetData("openpype.folderPath") != data["folderPath"]
or original_format != data["creator_attributes"]["image_format"]
):
self._configure_saver_tool(data, tool, product_name)
@ -145,7 +153,30 @@ class GenericCreateSaver(Creator):
folder_path = formatting_data["folderPath"]
folder_name = folder_path.rsplit("/", 1)[-1]
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
# If the folder path and task do not match the current context then the
# workdir is not just the `AYON_WORKDIR`. Hence, we need to actually
# compute the resulting workdir
if (
data["folderPath"] == self.create_context.get_current_folder_path()
and data["task"] == self.create_context.get_current_task_name()
):
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
else:
# TODO: Optimize this logic
project_name = self.create_context.get_current_project_name()
project_entity = get_project(project_name)
folder_entity = get_folder_by_path(project_name,
data["folderPath"])
task_entity = get_task_by_name(project_name,
folder_id=folder_entity["id"],
task_name=data["task"])
workdir = get_workdir(
project_entity=project_entity,
folder_entity=folder_entity,
task_entity=task_entity,
host_name=self.create_context.host_name,
)
formatting_data.update({
"workdir": workdir,
"frame": "0" * frame_padding,

View file

@ -0,0 +1,36 @@
import os
from ayon_core.lib import PreLaunchHook
from ayon_core.hosts.fusion import FUSION_HOST_DIR
class FusionLaunchMenuHook(PreLaunchHook):
"""Launch AYON menu on start of Fusion"""
app_groups = ["fusion"]
order = 9
def execute(self):
# Prelaunch hook is optional
settings = self.data["project_settings"][self.host_name]
if not settings["hooks"]["FusionLaunchMenuHook"]["enabled"]:
return
variant = self.application.name
if variant.isnumeric():
version = int(variant)
if version < 18:
print("Skipping launch of OpenPype menu on Fusion start "
"because Fusion version below 18.0 does not support "
"/execute argument on launch. "
f"Version detected: {version}")
return
else:
print(f"Application variant is not numeric: {variant}. "
"Validation for Fusion version 18+ for /execute "
"prelaunch argument skipped.")
path = os.path.join(FUSION_HOST_DIR,
"deploy",
"MenuScripts",
"launch_menu.py").replace("\\", "/")
script = f"fusion:RunScript('{path}')"
self.launch_context.launch_args.extend(["/execute", script])

View file

@ -137,7 +137,7 @@ class CreateShotClip(phiero.Creator):
"value": ["<track_name>", "main", "bg", "fg", "bg",
"animatic"],
"type": "QComboBox",
"label": "pRODUCT Name",
"label": "Product Name",
"target": "ui",
"toolTip": "chose product name pattern, if <track_name> is selected, name of track layer will be used", # noqa
"order": 0},
@ -159,7 +159,7 @@ class CreateShotClip(phiero.Creator):
"type": "QCheckBox",
"label": "Include audio",
"target": "tag",
"toolTip": "Process productS with corresponding audio", # noqa
"toolTip": "Process products with corresponding audio", # noqa
"order": 3},
"sourceResolution": {
"value": False,

View file

@ -4,12 +4,12 @@ import pyblish.api
from ayon_core.pipeline import publish
class ExtractThumnail(publish.Extractor):
class ExtractThumbnail(publish.Extractor):
"""
Extractor for track item's tumnails
Extractor for track item's tumbnails
"""
label = "Extract Thumnail"
label = "Extract Thumbnail"
order = pyblish.api.ExtractorOrder
families = ["plate", "take"]
hosts = ["hiero"]
@ -48,7 +48,7 @@ class ExtractThumnail(publish.Extractor):
self.log.debug(
"__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame))
self.log.info("Thumnail was generated to: {}".format(thumb_path))
self.log.info("Thumbnail was generated to: {}".format(thumb_path))
thumb_representation = {
'files': thumb_file,
'stagingDir': staging_dir,

View file

@ -38,15 +38,15 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
context_label = "{} > {}".format(*context)
instance_label = "{} > {}".format(folderPath, task)
message = (
"Instance '{}' publishes to different context than current "
"context: {}. Current context: {}".format(
"Instance '{}' publishes to different folder or task "
"than current context: {}. Current context: {}".format(
instance.name, instance_label, context_label
)
)
raise PublishValidationError(
message=message,
description=(
"## Publishing to a different context data\n"
"## Publishing to a different context folder or task\n"
"There are publish instances present which are publishing "
"into a different folder path or task than your current context.\n\n"
"Usually this is not what you want but there can be cases "

View file

@ -37,7 +37,7 @@ from ayon_core.pipeline import (
AYON_CONTAINER_ID,
)
from ayon_core.lib import NumberDef
from ayon_core.pipeline.context_tools import get_current_folder_entity
from ayon_core.pipeline.context_tools import get_current_task_entity
from ayon_core.pipeline.create import CreateContext
from ayon_core.lib.profiles_filtering import filter_profiles
@ -1519,24 +1519,30 @@ def extract_alembic(file,
# region ID
def get_id_required_nodes(referenced_nodes=False, nodes=None):
"""Filter out any node which are locked (reference) or readOnly
def get_id_required_nodes(referenced_nodes=False,
nodes=None,
existing_ids=True):
"""Return nodes that should receive a `cbId` attribute.
This includes only mesh and curve nodes, parent transforms of the shape
nodes, file texture nodes and object sets (including shading engines).
This filters out any node which is locked, referenced, read-only,
intermediate object.
Args:
referenced_nodes (bool): set True to filter out reference nodes
referenced_nodes (bool): set True to include referenced nodes
nodes (list, Optional): nodes to consider
existing_ids (bool): set True to include nodes with `cbId` attribute
Returns:
nodes (set): list of filtered nodes
"""
lookup = None
if nodes is None:
# Consider all nodes
nodes = cmds.ls()
else:
# Build a lookup for the only allowed nodes in output based
# on `nodes` input of the function (+ ensure long names)
lookup = set(cmds.ls(nodes, long=True))
if nodes is not None and not nodes:
# User supplied an empty `nodes` list to check so all we can
# do is return the empty result
return set()
def _node_type_exists(node_type):
try:
@ -1545,63 +1551,142 @@ def get_id_required_nodes(referenced_nodes=False, nodes=None):
except RuntimeError:
return False
def iterate(maya_iterator):
while not maya_iterator.isDone():
yield maya_iterator.thisNode()
maya_iterator.next()
# `readOnly` flag is obsolete as of Maya 2016 therefore we explicitly
# remove default nodes and reference nodes
camera_shapes = ["frontShape", "sideShape", "topShape", "perspShape"]
default_camera_shapes = {
"frontShape", "sideShape", "topShape", "perspShape"
}
ignore = set()
if not referenced_nodes:
ignore |= set(cmds.ls(long=True, referencedNodes=True))
# list all defaultNodes to filter out from the rest
ignore |= set(cmds.ls(long=True, defaultNodes=True))
ignore |= set(cmds.ls(camera_shapes, long=True))
# Remove Turtle from the result of `cmds.ls` if Turtle is loaded
# TODO: This should be a less specific check for a single plug-in.
if _node_type_exists("ilrBakeLayer"):
ignore |= set(cmds.ls(type="ilrBakeLayer", long=True))
# Establish set of nodes types to include
types = ["objectSet", "file", "mesh", "nurbsCurve", "nurbsSurface"]
# The filtered types do not include transforms because we only want the
# parent transforms that have a child shape that we filtered to, so we
# include the parents here
types = ["mesh", "nurbsCurve", "nurbsSurface", "file", "objectSet"]
# Check if plugin nodes are available for Maya by checking if the plugin
# is loaded
if cmds.pluginInfo("pgYetiMaya", query=True, loaded=True):
types.append("pgYetiMaya")
# We *always* ignore intermediate shapes, so we filter them out directly
nodes = cmds.ls(nodes, type=types, long=True, noIntermediate=True)
iterator_type = OpenMaya.MIteratorType()
# This tries to be closest matching API equivalents of `types` variable
iterator_type.filterList = [
OpenMaya.MFn.kMesh, # mesh
OpenMaya.MFn.kNurbsSurface, # nurbsSurface
OpenMaya.MFn.kNurbsCurve, # nurbsCurve
OpenMaya.MFn.kFileTexture, # file
OpenMaya.MFn.kSet, # objectSet
OpenMaya.MFn.kPluginShape # pgYetiMaya
]
it = OpenMaya.MItDependencyNodes(iterator_type)
# The items which need to pass the id to their parent
# Add the collected transform to the nodes
dag = cmds.ls(nodes, type="dagNode", long=True) # query only dag nodes
transforms = cmds.listRelatives(dag,
parent=True,
fullPath=True) or []
fn_dep = OpenMaya.MFnDependencyNode()
fn_dag = OpenMaya.MFnDagNode()
result = set()
nodes = set(nodes)
nodes |= set(transforms)
def _should_include_parents(obj):
"""Whether to include parents of obj in output"""
if not obj.hasFn(OpenMaya.MFn.kShape):
return False
nodes -= ignore # Remove the ignored nodes
if not nodes:
return nodes
fn_dag.setObject(obj)
if fn_dag.isIntermediateObject:
return False
# Ensure only nodes from the input `nodes` are returned when a
# filter was applied on function call because we also iterated
# to parents and alike
if lookup is not None:
nodes &= lookup
# Skip default cameras
if (
obj.hasFn(OpenMaya.MFn.kCamera) and
fn_dag.name() in default_camera_shapes
):
return False
# Avoid locked nodes
nodes_list = list(nodes)
locked = cmds.lockNode(nodes_list, query=True, lock=True)
for node, lock in zip(nodes_list, locked):
if lock:
log.warning("Skipping locked node: %s" % node)
nodes.remove(node)
return True
return nodes
def _add_to_result_if_valid(obj):
"""Add to `result` if the object should be included"""
fn_dep.setObject(obj)
if not existing_ids and fn_dep.hasAttribute("cbId"):
return
if not referenced_nodes and fn_dep.isFromReferencedFile:
return
if fn_dep.isDefaultNode:
return
if fn_dep.isLocked:
return
# Skip default cameras
if (
obj.hasFn(OpenMaya.MFn.kCamera) and
fn_dep.name() in default_camera_shapes
):
return
if obj.hasFn(OpenMaya.MFn.kDagNode):
# DAG nodes
fn_dag.setObject(obj)
# Skip intermediate objects
if fn_dag.isIntermediateObject:
return
# DAG nodes can be instanced and thus may have multiple paths.
# We need to identify each path
paths = OpenMaya.MDagPath.getAllPathsTo(obj)
for dag in paths:
path = dag.fullPathName()
result.add(path)
else:
# Dependency node
path = fn_dep.name()
result.add(path)
for obj in iterate(it):
# For any non-intermediate shape node always include the parent
# even if we exclude the shape itself (e.g. when locked, default)
if _should_include_parents(obj):
fn_dag.setObject(obj)
parents = [
fn_dag.parent(index) for index in range(fn_dag.parentCount())
]
for parent_obj in parents:
_add_to_result_if_valid(parent_obj)
_add_to_result_if_valid(obj)
if not result:
return result
# Exclude some additional types
exclude_types = []
if _node_type_exists("ilrBakeLayer"):
# Remove Turtle from the result if Turtle is loaded
exclude_types.append("ilrBakeLayer")
if exclude_types:
exclude_nodes = set(cmds.ls(nodes, long=True, type=exclude_types))
if exclude_nodes:
result -= exclude_nodes
# Filter to explicit input nodes if provided
if nodes is not None:
# The amount of input nodes to filter to can be large and querying
# many nodes can be slow in Maya. As such we want to try and reduce
# it as much as possible, so we include the type filter to try and
# reduce the result of `maya.cmds.ls` here.
nodes = set(cmds.ls(nodes, long=True, type=types + ["dagNode"]))
if nodes:
result &= nodes
else:
return set()
return result
def get_id(node):
@ -2115,22 +2200,6 @@ def get_related_sets(node):
"""
# Ignore specific suffices
ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"]
# Default nodes to ignore
defaults = {"defaultLightSet", "defaultObjectSet"}
# Ids to ignore
ignored = {
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
AYON_INSTANCE_ID,
AYON_CONTAINER_ID,
}
view_sets = get_isolate_view_sets()
sets = cmds.listSets(object=node, extendToShape=False)
if not sets:
return []
@ -2141,6 +2210,14 @@ def get_related_sets(node):
# returned by `cmds.listSets(allSets=True)`
sets = cmds.ls(sets)
# Ids to ignore
ignored = {
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
AYON_INSTANCE_ID,
AYON_CONTAINER_ID,
}
# Ignore `avalon.container`
sets = [
s for s in sets
@ -2149,21 +2226,31 @@ def get_related_sets(node):
or cmds.getAttr(f"{s}.id") not in ignored
)
]
if not sets:
return sets
# Exclude deformer sets (`type=2` for `maya.cmds.listSets`)
deformer_sets = cmds.listSets(object=node,
extendToShape=False,
type=2) or []
deformer_sets = set(deformer_sets) # optimize lookup
sets = [s for s in sets if s not in deformer_sets]
exclude_sets = cmds.listSets(object=node,
extendToShape=False,
type=2) or []
exclude_sets = set(exclude_sets) # optimize lookup
# Default nodes to ignore
exclude_sets.update({"defaultLightSet", "defaultObjectSet"})
# Filter out the sets to exclude
sets = [s for s in sets if s not in exclude_sets]
# Ignore when the set has a specific suffix
sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)]
ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON")
sets = [s for s in sets if not s.endswith(ignore_suffices)]
if not sets:
return sets
# Ignore viewport filter view sets (from isolate select and
# viewports)
view_sets = get_isolate_view_sets()
sets = [s for s in sets if s not in view_sets]
sets = [s for s in sets if s not in defaults]
return sets
@ -2434,12 +2521,10 @@ def set_scene_fps(fps, update=True):
cmds.currentUnit(time=unit, updateAnimation=update)
# Set time slider data back to previous state
cmds.playbackOptions(edit=True, minTime=start_frame)
cmds.playbackOptions(edit=True, maxTime=end_frame)
# Set animation data
cmds.playbackOptions(edit=True, animationStartTime=animation_start)
cmds.playbackOptions(edit=True, animationEndTime=animation_end)
cmds.playbackOptions(minTime=start_frame,
maxTime=end_frame,
animationStartTime=animation_start,
animationEndTime=animation_end)
cmds.currentTime(current_frame, edit=True, update=True)
@ -2629,21 +2714,21 @@ def reset_frame_range(playback=True, render=True, fps=True):
def reset_scene_resolution():
"""Apply the scene resolution from the project definition
scene resolution can be overwritten by an folder if the folder.attrib
contains any information regarding scene resolution .
The scene resolution will be retrieved from the current task entity's
attributes.
Returns:
None
"""
folder_attributes = get_current_folder_entity()["attrib"]
task_attributes = get_current_task_entity(fields={"attrib"})["attrib"]
# Set resolution
width = folder_attributes.get("resolutionWidth", 1920)
height = folder_attributes.get("resolutionHeight", 1080)
pixelAspect = folder_attributes.get("pixelAspect", 1)
width = task_attributes.get("resolutionWidth", 1920)
height = task_attributes.get("resolutionHeight", 1080)
pixel_aspect = task_attributes.get("pixelAspect", 1)
set_scene_resolution(width, height, pixelAspect)
set_scene_resolution(width, height, pixel_aspect)
def set_context_settings(
@ -3129,7 +3214,7 @@ def load_capture_preset(data):
return options
def get_attr_in_layer(attr, layer):
def get_attr_in_layer(attr, layer, as_string=True):
"""Return attribute value in specified renderlayer.
Same as cmds.getAttr but this gets the attribute's value in a
@ -3147,6 +3232,7 @@ def get_attr_in_layer(attr, layer):
Args:
attr (str): attribute name, ex. "node.attribute"
layer (str): layer name
as_string (bool): whether attribute should convert to a string value
Returns:
The return value from `maya.cmds.getAttr`
@ -3156,7 +3242,8 @@ def get_attr_in_layer(attr, layer):
try:
if cmds.mayaHasRenderSetup():
from . import lib_rendersetup
return lib_rendersetup.get_attr_in_layer(attr, layer)
return lib_rendersetup.get_attr_in_layer(
attr, layer, as_string=as_string)
except AttributeError:
pass
@ -3164,7 +3251,7 @@ def get_attr_in_layer(attr, layer):
current_layer = cmds.editRenderLayerGlobals(query=True,
currentRenderLayer=True)
if layer == current_layer:
return cmds.getAttr(attr)
return cmds.getAttr(attr, asString=as_string)
connections = cmds.listConnections(attr,
plugs=True,
@ -3215,7 +3302,7 @@ def get_attr_in_layer(attr, layer):
value *= conversion
return value
return cmds.getAttr(attr)
return cmds.getAttr(attr, asString=as_string)
def fix_incompatible_containers():
@ -3244,33 +3331,46 @@ def update_content_on_context_change():
"""
This will update scene content to match new folder on context change
"""
scene_sets = cmds.listSets(allSets=True)
folder_entity = get_current_folder_entity()
folder_attributes = folder_entity["attrib"]
new_folder_path = folder_entity["path"]
for s in scene_sets:
try:
if cmds.getAttr("{}.id".format(s)) in {
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
}:
attr = cmds.listAttr(s)
print(s)
if "folderPath" in attr:
print(
" - setting folder to: [ {} ]".format(new_folder_path)
)
cmds.setAttr(
"{}.folderPath".format(s),
new_folder_path, type="string"
)
if "frameStart" in attr:
cmds.setAttr("{}.frameStart".format(s),
folder_attributes["frameStart"])
if "frameEnd" in attr:
cmds.setAttr("{}.frameEnd".format(s),
folder_attributes["frameEnd"],)
except ValueError:
pass
host = registered_host()
create_context = CreateContext(host)
folder_entity = get_current_task_entity(fields={"attrib"})
instance_values = {
"folderPath": create_context.get_current_folder_path(),
"task": create_context.get_current_task_name(),
}
creator_attribute_values = {
"frameStart": folder_entity["attrib"]["frameStart"],
"frameEnd": folder_entity["attrib"]["frameEnd"],
}
has_changes = False
for instance in create_context.instances:
for key, value in instance_values.items():
if key not in instance or instance[key] == value:
continue
# Update instance value
print(f"Updating {instance.product_name} {key} to: {value}")
instance[key] = value
has_changes = True
creator_attributes = instance.creator_attributes
for key, value in creator_attribute_values.items():
if (
key not in creator_attributes
or creator_attributes[key] == value
):
continue
# Update instance creator attribute value
print(f"Updating {instance.product_name} {key} to: {value}")
instance[key] = value
has_changes = True
if has_changes:
create_context.save_changes()
def show_message(title, msg):
@ -4004,17 +4104,26 @@ def len_flattened(components):
return n
def get_all_children(nodes):
def get_all_children(nodes, ignore_intermediate_objects=False):
"""Return all children of `nodes` including each instanced child.
Using maya.cmds.listRelatives(allDescendents=True) includes only the first
instance. As such, this function acts as an optimal replacement with a
focus on a fast query.
Args:
nodes (iterable): List of nodes to get children for.
ignore_intermediate_objects (bool): Ignore any children that
are intermediate objects.
Returns:
set: Children of input nodes.
"""
sel = OpenMaya.MSelectionList()
traversed = set()
iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst)
fn_dag = OpenMaya.MFnDagNode()
for node in nodes:
if node in traversed:
@ -4031,6 +4140,13 @@ def get_all_children(nodes):
iterator.next() # noqa: B305
while not iterator.isDone():
if ignore_intermediate_objects:
fn_dag.setObject(iterator.currentItem())
if fn_dag.isIntermediateObject:
iterator.prune()
iterator.next() # noqa: B305
continue
path = iterator.fullPathName()
if path in traversed:
@ -4041,7 +4157,7 @@ def get_all_children(nodes):
traversed.add(path)
iterator.next() # noqa: B305
return list(traversed)
return traversed
def get_capture_preset(

View file

@ -297,7 +297,7 @@ class ARenderProducts:
"""
return self._get_attr("defaultRenderGlobals", attribute)
def _get_attr(self, node_attr, attribute=None):
def _get_attr(self, node_attr, attribute=None, as_string=True):
"""Return the value of the attribute in the renderlayer
For readability this allows passing in the attribute in two ways.
@ -317,7 +317,7 @@ class ARenderProducts:
else:
plug = "{}.{}".format(node_attr, attribute)
return lib.get_attr_in_layer(plug, layer=self.layer)
return lib.get_attr_in_layer(plug, layer=self.layer, as_string=as_string)
@staticmethod
def extract_separator(file_prefix):
@ -1133,9 +1133,24 @@ class RenderProductsRedshift(ARenderProducts):
aovs = list(set(aovs) - set(ref_aovs))
products = []
global_aov_enabled = bool(
self._get_attr("redshiftOptions.aovGlobalEnableMode", as_string=False)
)
colorspace = lib.get_color_management_output_transform()
if not global_aov_enabled:
# only beauty output
for camera in cameras:
products.insert(0,
RenderProduct(productName="",
ext=ext,
multipart=self.multipart,
camera=camera,
colorspace=colorspace))
return products
light_groups_enabled = False
has_beauty_aov = False
colorspace = lib.get_color_management_output_transform()
for aov in aovs:
enabled = self._get_attr(aov, "enabled")
if not enabled:

View file

@ -77,7 +77,7 @@ def get_rendersetup_layer(layer):
if conn.endswith(".legacyRenderLayer")), None)
def get_attr_in_layer(node_attr, layer):
def get_attr_in_layer(node_attr, layer, as_string=True):
"""Return attribute value in Render Setup layer.
This will only work for attributes which can be
@ -124,7 +124,7 @@ def get_attr_in_layer(node_attr, layer):
node = history_overrides[-1] if history_overrides else override
node_attr_ = node + ".original"
return get_attribute(node_attr_, asString=True)
return get_attribute(node_attr_, asString=as_string)
layer = get_rendersetup_layer(layer)
rs = renderSetup.instance()
@ -144,7 +144,7 @@ def get_attr_in_layer(node_attr, layer):
# we will let it error out.
rs.switchToLayer(current_layer)
return get_attribute(node_attr, asString=True)
return get_attribute(node_attr, asString=as_string)
overrides = get_attr_overrides(node_attr, layer)
default_layer_value = get_default_layer_value(node_attr)

View file

@ -580,7 +580,8 @@ def on_save():
_remove_workfile_lock()
# Generate ids of the current context on nodes in the scene
nodes = lib.get_id_required_nodes(referenced_nodes=False)
nodes = lib.get_id_required_nodes(referenced_nodes=False,
existing_ids=False)
for node, new_id in lib.generate_ids(nodes):
lib.set_id(node, new_id, overwrite=False)
@ -653,10 +654,6 @@ def on_task_changed():
"Can't set project for new context because path does not exist: {}"
).format(workdir))
with lib.suspended_refresh():
lib.set_context_settings()
lib.update_content_on_context_change()
global _about_to_save
if not lib.IS_HEADLESS and _about_to_save:
# Let's prompt the user to update the context settings or not

View file

@ -286,7 +286,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
if not container:
return
roots = cmds.sets(container, q=True)
roots = cmds.sets(container, q=True) or []
ref_node = None
try:
ref_node = get_reference_node(roots)

View file

@ -40,8 +40,15 @@ class CreateRenderlayer(plugin.RenderlayerCreator):
def create(self, product_name, instance_data, pre_create_data):
# Only allow a single render instance to exist
if self._get_singleton_node():
raise CreatorError("A Render instance already exists - only "
"one can be configured.")
raise CreatorError(
"A Render instance already exists - only one can be "
"configured.\n\n"
"To render multiple render layers, create extra Render Setup "
"Layers via Maya's Render Setup UI.\n"
"Then refresh the publisher to detect the new layers for "
"rendering.\n\n"
"With a render instance present all Render Setup layers in "
"your workfile are renderable instances.")
# Apply default project render settings on create
if self.render_settings.get("apply_render_settings"):

View file

@ -125,6 +125,11 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader):
)
]
@classmethod
def apply_settings(cls, project_settings):
super(ImportMayaLoader, cls).apply_settings(project_settings)
cls.enabled = cls.load_settings["import_loader"].get("enabled", True)
def load(self, context, name=None, namespace=None, data=None):
import maya.cmds as cmds

View file

@ -9,7 +9,9 @@ instance.
import json
import sys
import six
import contextlib
from ayon_core.lib import BoolDef, EnumDef
from ayon_core.pipeline import (
load,
get_representation_path
@ -21,6 +23,31 @@ from maya import cmds
import maya.app.renderSetup.model.renderSetup as renderSetup
@contextlib.contextmanager
def mark_all_imported(enabled):
"""Mark all imported nodes accepted by removing the `imported` attribute"""
if not enabled:
yield
return
node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True)
# Get node before load, then we can disable `imported`
# attribute on all new render setup layers after import
before = cmds.ls(type=node_types, long=True)
try:
yield
finally:
after = cmds.ls(type=node_types, long=True)
for node in (node for node in after if node not in before):
if cmds.attributeQuery("imported",
node=node,
exists=True):
plug = "{}.imported".format(node)
if cmds.getAttr(plug):
cmds.deleteAttr(plug)
class RenderSetupLoader(load.LoaderPlugin):
"""Load json preset for RenderSetup overwriting current one."""
@ -32,48 +59,79 @@ class RenderSetupLoader(load.LoaderPlugin):
icon = "tablet"
color = "orange"
options = [
BoolDef("accept_import",
label="Accept import on load",
tooltip=(
"By default importing or pasting Render Setup collections "
"will display them italic in the Render Setup list.\nWith "
"this enabled the load will directly mark the import "
"'accepted' and remove the italic view."
),
default=True),
BoolDef("load_managed",
label="Load Managed",
tooltip=(
"Containerize the rendersetup on load so it can be "
"'updated' later."
),
default=True),
EnumDef("import_mode",
label="Import mode",
items={
renderSetup.DECODE_AND_OVERWRITE: (
"Flush existing render setup and "
"add without any namespace"
),
renderSetup.DECODE_AND_MERGE: (
"Merge with the existing render setup objects and "
"rename the unexpected objects"
),
renderSetup.DECODE_AND_RENAME: (
"Renaming all decoded render setup objects to not "
"conflict with the existing render setup"
),
},
default=renderSetup.DECODE_AND_OVERWRITE)
]
def load(self, context, name, namespace, data):
"""Load RenderSetup settings."""
# from ayon_core.hosts.maya.api.lib import namespaced
folder_name = context["folder"]["name"]
namespace = namespace or lib.unique_namespace(
folder_name + "_",
prefix="_" if folder_name[0].isdigit() else "",
suffix="_",
)
path = self.filepath_from_context(context)
accept_import = data.get("accept_import", True)
import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE)
self.log.info(">>> loading json [ {} ]".format(path))
with open(path, "r") as file:
renderSetup.instance().decode(
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
with mark_all_imported(accept_import):
with open(path, "r") as file:
renderSetup.instance().decode(
json.load(file), import_mode, None)
nodes = []
null = cmds.sets(name="null_SET", empty=True)
nodes.append(null)
if data.get("load_managed", True):
self.log.info(">>> containerising [ {} ]".format(name))
folder_name = context["folder"]["name"]
namespace = namespace or lib.unique_namespace(
folder_name + "_",
prefix="_" if folder_name[0].isdigit() else "",
suffix="_",
)
self[:] = nodes
if not nodes:
return
self.log.info(">>> containerising [ {} ]".format(name))
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
return containerise(
name=name,
namespace=namespace,
nodes=[],
context=context,
loader=self.__class__.__name__)
def remove(self, container):
"""Remove RenderSetup settings instance."""
from maya import cmds
container_name = container["objectName"]
self.log.info("Removing '%s' from Maya.." % container["name"])
container_content = cmds.sets(container_name, query=True)
container_content = cmds.sets(container_name, query=True) or []
nodes = cmds.ls(container_content, long=True)
nodes.append(container_name)

View file

@ -46,11 +46,18 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
self.log.debug("data: {}".format(instance.data))
def get_hierarchy(self, nodes):
"""Return nodes with all their children"""
"""Return nodes with all their children.
Arguments:
nodes (List[str]): List of nodes to collect children hierarchy for
Returns:
list: Input nodes with their children hierarchy
"""
nodes = cmds.ls(nodes, long=True)
if not nodes:
return []
children = get_all_children(nodes)
# Make sure nodes merged with children only
# contains unique entries
return list(set(nodes + children))
children = get_all_children(nodes, ignore_intermediate_objects=True)
return list(children.union(nodes))

View file

@ -1,5 +1,3 @@
import json
from maya import cmds
import pyblish.api
@ -11,18 +9,24 @@ class CollectFileDependencies(pyblish.api.ContextPlugin):
label = "Collect File Dependencies"
order = pyblish.api.CollectorOrder - 0.49
hosts = ["maya"]
families = ["renderlayer"]
@classmethod
def apply_settings(cls, project_settings, system_settings):
# Disable plug-in if not used for deadline submission anyway
settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa
cls.enabled = settings.get("asset_dependencies", True)
def process(self, context):
dependencies = []
dependencies = set()
for node in cmds.ls(type="file"):
path = cmds.getAttr("{}.{}".format(node, "fileTextureName"))
if path not in dependencies:
dependencies.append(path)
dependencies.add(path)
for node in cmds.ls(type="AlembicNode"):
path = cmds.getAttr("{}.{}".format(node, "abc_File"))
if path not in dependencies:
dependencies.append(path)
dependencies.add(path)
context.data["fileDependencies"] = dependencies
self.log.debug(json.dumps(dependencies, indent=4))
context.data["fileDependencies"] = list(dependencies)

View file

@ -48,15 +48,15 @@ class CollectNewInstances(pyblish.api.InstancePlugin):
# Collect members
members = cmds.ls(members, long=True) or []
# Collect full hierarchy
dag_members = cmds.ls(members, type="dagNode", long=True)
children = get_all_children(dag_members)
children = cmds.ls(children, noIntermediate=True, long=True)
parents = (
self.get_all_parents(members)
if creator_attributes.get("includeParentHierarchy", True)
else []
)
members_hierarchy = list(set(members + children + parents))
children = get_all_children(dag_members,
ignore_intermediate_objects=True)
members_hierarchy = set(members)
members_hierarchy.update(children)
if creator_attributes.get("includeParentHierarchy", True):
members_hierarchy.update(self.get_all_parents(dag_members))
instance[:] = members_hierarchy
@ -97,16 +97,16 @@ class CollectNewInstances(pyblish.api.InstancePlugin):
"""Get all parents by using string operations (optimization)
Args:
nodes (list): the nodes which are found in the objectSet
nodes (iterable): the nodes which are found in the objectSet
Returns:
list
set
"""
parents = []
parents = set()
for node in nodes:
splitted = node.split("|")
items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))]
parents.extend(items)
parents.update(items)
return list(set(parents))
return parents

View file

@ -1,14 +1,18 @@
import inspect
import pyblish.api
from maya import cmds
from ayon_core.pipeline.publish import (
context_plugin_should_run,
PublishValidationError,
OptionalPyblishPluginMixin
)
class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin,
OptionalPyblishPluginMixin):
"""Validate if current render layer has a renderable camera
"""Validate if current render layer has a renderable camera.
There is a bug in Redshift which occurs when the current render layer
at file open has no renderable camera. The error raised is as follows:
@ -32,8 +36,39 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin,
if not context_plugin_should_run(self, context):
return
layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
# This validator only makes sense when publishing renderlayer instances
# with Redshift. We skip validation if there isn't any.
if not any(self.is_active_redshift_render_instance(instance)
for instance in context):
return
cameras = cmds.ls(type="camera", long=True)
renderable = any(c for c in cameras if cmds.getAttr(c + ".renderable"))
assert renderable, ("Current render layer '%s' has no renderable "
"camera" % layer)
if not renderable:
layer = cmds.editRenderLayerGlobals(query=True,
currentRenderLayer=True)
raise PublishValidationError(
"Current render layer '{}' has no renderable camera".format(
layer
),
description=inspect.getdoc(self)
)
@staticmethod
def is_active_redshift_render_instance(instance) -> bool:
"""Return whether instance is an active renderlayer instance set to
render with Redshift renderer."""
if not instance.data.get("active", True):
return False
# Check this before families just because it's a faster check
if not instance.data.get("renderer") == "redshift":
return False
families = set()
families.add(instance.data.get("family"))
families.update(instance.data.get("families", []))
if "renderlayer" not in families:
return False
return True

View file

@ -11,8 +11,6 @@ from ayon_core.pipeline.publish import (
OptionalPyblishPluginMixin
)
from maya import cmds
class ValidateInstanceInContext(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
@ -38,17 +36,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
return
folder_path = instance.data.get("folderPath")
context_folder_path = self.get_context_folder_path(instance)
if folder_path != context_folder_path:
task = instance.data.get("task")
context = self.get_context(instance)
if (folder_path, task) != context:
context_label = "{} > {}".format(*context)
instance_label = "{} > {}".format(folder_path, task)
raise PublishValidationError(
message=(
"Instance '{}' publishes to different folder than current"
"Instance '{}' publishes to different context than current"
" context: {}. Current context: {}".format(
instance.name, folder_path, context_folder_path
instance.name, instance_label, context_label
)
),
description=(
"## Publishing to a different folder\n"
"## Publishing to a different context data\n"
"There are publish instances present which are publishing "
"into a different folder than your current context.\n\n"
"Usually this is not what you want but there can be cases "
@ -64,14 +65,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
@classmethod
def repair(cls, instance):
context_folder_path = cls.get_context_folder_path(instance)
instance_node = instance.data["instance_node"]
cmds.setAttr(
"{}.folderPath".format(instance_node),
context_folder_path,
type="string"
context_folder_path, context_task = cls.get_context(
instance)
create_context = instance.context.data["create_context"]
instance_id = instance.data["instance_id"]
created_instance = create_context.get_instance_by_id(
instance_id
)
created_instance["folderPath"] = context_folder_path
created_instance["task"] = context_task
create_context.save_changes()
@staticmethod
def get_context_folder_path(instance):
return instance.context.data["folderPath"]
def get_context(instance):
"""Return asset, task from publishing context data"""
context = instance.context
return context.data["folderPath"], context.data["task"]

View file

@ -1,3 +1,5 @@
import inspect
from maya import cmds
import pyblish.api
@ -14,8 +16,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Adheres to the content of 'model' product type
- Must have one top group. (configurable)
- Must only contain: transforms, meshes and groups
See `get_description` for more details.
"""
@ -28,13 +29,16 @@ class ValidateModelContent(pyblish.api.InstancePlugin,
validate_top_group = True
optional = False
allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator')
@classmethod
def get_invalid(cls, instance):
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return [instance.data["name"]]
cls.log.error("Model instance has no nodes. "
"It is not allowed to be empty")
return [instance.data["instance_node"]]
# All children will be included in the extracted export so we also
# validate *all* descendents of the set members and we skip any
@ -46,30 +50,42 @@ class ValidateModelContent(pyblish.api.InstancePlugin,
content_instance = list(set(content_instance + descendants))
# Ensure only valid node types
allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator')
nodes = cmds.ls(content_instance, long=True)
valid = cmds.ls(content_instance, long=True, type=allowed)
valid = cmds.ls(content_instance, long=True, type=cls.allowed)
invalid = set(nodes) - set(valid)
if invalid:
cls.log.error("These nodes are not allowed: %s" % invalid)
# List as bullet points
invalid_bullets = "\n".join(f"- {node}" for node in invalid)
cls.log.error(
"These nodes are not allowed:\n{}\n\n"
"The valid node types are: {}".format(
invalid_bullets, ", ".join(cls.allowed))
)
return list(invalid)
if not valid:
cls.log.error("No valid nodes in the instance")
return True
cls.log.error(
"No valid nodes in the model instance.\n"
"The valid node types are: {}".format(", ".join(cls.allowed))
)
return [instance.data["instance_node"]]
# Ensure it has shapes
shapes = cmds.ls(valid, long=True, shapes=True)
if not shapes:
cls.log.error("No shapes in the model instance")
return True
return [instance.data["instance_node"]]
# Top group
top_parents = set([x.split("|")[1] for x in content_instance])
# Ensure single top group
top_parents = {x.split("|", 2)[1] for x in content_instance}
if cls.validate_top_group and len(top_parents) != 1:
cls.log.error("Must have exactly one top group")
return top_parents
cls.log.error(
"A model instance must have exactly one top group. "
"Found top groups: {}".format(", ".join(top_parents))
)
return list(top_parents)
def _is_visible(node):
"""Return whether node is visible"""
@ -101,5 +117,21 @@ class ValidateModelContent(pyblish.api.InstancePlugin,
if invalid:
raise PublishValidationError(
title="Model content is invalid",
message="See log for more details"
message="Model content is invalid. See log for more details.",
description=self.get_description()
)
@classmethod
def get_description(cls):
return inspect.cleandoc(f"""
### Model content is invalid
Your model instance does not adhere to the rules of a
model product type:
- Must have at least one visible shape in it, like a mesh.
- Must have one root node. When exporting multiple meshes they
must be inside a group.
- May only contain the following node types:
{", ".join(cls.allowed)}
""")

View file

@ -60,7 +60,8 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin):
# We do want to check the referenced nodes as it might be
# part of the end product.
id_nodes = lib.get_id_required_nodes(referenced_nodes=True,
nodes=instance[:])
invalid = [n for n in id_nodes if not lib.get_id(n)]
return invalid
nodes=instance[:],
# Exclude those with already
# existing ids
existing_ids=False)
return id_nodes

View file

@ -37,27 +37,27 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
("Found folder ids which are not related to "
"current project in instance: `{}`").format(instance.name))
"Found folder ids which are not related to "
"current project in instance: `{}`".format(instance.name))
@classmethod
def get_invalid(cls, instance):
invalid = []
nodes = instance[:]
if not nodes:
return
# Get all id required nodes
id_required_nodes = lib.get_id_required_nodes(referenced_nodes=True,
nodes=instance[:])
id_required_nodes = lib.get_id_required_nodes(referenced_nodes=False,
nodes=nodes)
if not id_required_nodes:
return
# check ids against database ids
project_name = instance.context.data["projectName"]
folder_entities = ayon_api.get_folders(project_name, fields={"id"})
folder_ids = {
folder_entity["id"]
for folder_entity in folder_entities
}
folder_ids = cls.get_project_folder_ids(context=instance.context)
# Get all asset IDs
invalid = []
for node in id_required_nodes:
cb_id = lib.get_id(node)
@ -71,3 +71,31 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
invalid.append(node)
return invalid
@classmethod
def get_project_folder_ids(cls, context):
"""Return all folder ids in the current project.
Arguments:
context (pyblish.api.Context): The publish context.
Returns:
set[str]: All folder ids in the current project.
"""
# We query the database only for the first instance instead of
# per instance by storing a cache in the context
key = "__cache_project_folder_ids"
if key in context.data:
return context.data[key]
# check ids against database
project_name = context.data["projectName"]
folder_entities = ayon_api.get_folders(project_name, fields={"id"})
folder_ids = {
folder_entity["id"]
for folder_entity in folder_entities
}
context.data[key] = folder_ids
return folder_ids

View file

@ -8,6 +8,8 @@ from ayon_core.pipeline.publish import (
import ayon_core.hosts.maya.api.action
from ayon_core.hosts.maya.api import lib
from maya import cmds
class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
"""Validate the nodes in the instance have a unique Colorbleed Id
@ -41,7 +43,7 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
if invalid:
label = "Nodes found with non-unique folder ids"
raise PublishValidationError(
message="{}: {}".format(label, invalid),
message="{}, see log".format(label),
title="Non-unique folder ids on nodes",
description="{}\n- {}".format(label,
"\n- ".join(sorted(invalid)))
@ -54,7 +56,6 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
# Check only non intermediate shapes
# todo: must the instance itself ensure to have no intermediates?
# todo: how come there are intermediates?
from maya import cmds
instance_members = cmds.ls(instance, noIntermediate=True, long=True)
# Collect each id with their members
@ -67,10 +68,14 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
# Take only the ids with more than one member
invalid = list()
_iteritems = getattr(ids, "iteritems", ids.items)
for _ids, members in _iteritems():
for members in ids.values():
if len(members) > 1:
cls.log.error("ID found on multiple nodes: '%s'" % members)
members_text = "\n".join(
"- {}".format(member) for member in sorted(members)
)
cls.log.error(
"ID found on multiple nodes:\n{}".format(members_text)
)
invalid.extend(members)
return invalid

View file

@ -84,19 +84,11 @@ class ValidateResolution(pyblish.api.InstancePlugin,
@classmethod
def get_folder_resolution(cls, instance):
folder_attributes = instance.data["folderEntity"]["attrib"]
if (
"resolutionWidth" in folder_attributes
and "resolutionHeight" in folder_attributes
and "pixelAspect" in folder_attributes
):
width = folder_attributes["resolutionWidth"]
height = folder_attributes["resolutionHeight"]
pixelAspect = folder_attributes["pixelAspect"]
return int(width), int(height), float(pixelAspect)
# Defaults if not found in asset document or project document
return 1920, 1080, 1.0
task_attributes = instance.data["taskEntity"]["attrib"]
width = task_attributes["resolutionWidth"]
height = task_attributes["resolutionHeight"]
pixel_aspect = task_attributes["pixelAspect"]
return int(width), int(height), float(pixel_aspect)
@classmethod
def repair(cls, instance):

View file

@ -49,8 +49,9 @@ def get_selected_nodes():
"""Get information from current selection"""
selection = cmds.ls(selection=True, long=True)
hierarchy = lib.get_all_children(selection)
return list(set(selection + hierarchy))
hierarchy = lib.get_all_children(selection,
ignore_intermediate_objects=True)
return list(hierarchy.union(selection))
def get_all_asset_nodes():

View file

@ -2627,11 +2627,11 @@ class NukeDirmap(HostDirmap):
class DirmapCache:
"""Caching class to get settings and sync_module easily and only once."""
"""Caching class to get settings and sitesync easily and only once."""
_project_name = None
_project_settings = None
_sync_module_discovered = False
_sync_module = None
_sitesync_addon_discovered = False
_sitesync_addon = None
_mapping = None
@classmethod
@ -2647,11 +2647,11 @@ class DirmapCache:
return cls._project_settings
@classmethod
def sync_module(cls):
if not cls._sync_module_discovered:
cls._sync_module_discovered = True
cls._sync_module = AddonsManager().get("sync_server")
return cls._sync_module
def sitesync_addon(cls):
if not cls._sitesync_addon_discovered:
cls._sitesync_addon_discovered = True
cls._sitesync_addon = AddonsManager().get("sitesync")
return cls._sitesync_addon
@classmethod
def mapping(cls):
@ -2673,7 +2673,7 @@ def dirmap_file_name_filter(file_name):
"nuke",
DirmapCache.project_name(),
DirmapCache.project_settings(),
DirmapCache.sync_module(),
DirmapCache.sitesync_addon(),
)
if not DirmapCache.mapping():
DirmapCache.set_mapping(dirmap_processor.get_mappings())

View file

@ -130,6 +130,18 @@ class LoadClip(plugin.NukeLoader):
first = 1
last = first + duration
# If a slate is present, the frame range is 1 frame longer for movies,
# but file sequences its the first frame that is 1 frame lower.
slate_frames = repre_entity["data"].get("slateFrames", 0)
extension = "." + repre_entity["context"]["ext"]
if extension in VIDEO_EXTENSIONS:
last += slate_frames
files_count = len(repre_entity["files"])
if extension in IMAGE_EXTENSIONS and files_count != 1:
first -= slate_frames
# Fallback to folder name when namespace is None
if namespace is None:
namespace = context["folder"]["name"]
@ -167,7 +179,9 @@ class LoadClip(plugin.NukeLoader):
repre_entity
)
self._set_range_to_node(read_node, first, last, start_at_workfile)
self._set_range_to_node(
read_node, first, last, start_at_workfile, slate_frames
)
version_name = version_entity["version"]
if version_name < 0:
@ -402,14 +416,21 @@ class LoadClip(plugin.NukeLoader):
for member in members:
nuke.delete(member)
def _set_range_to_node(self, read_node, first, last, start_at_workfile):
def _set_range_to_node(
self, read_node, first, last, start_at_workfile, slate_frames=0
):
read_node['origfirst'].setValue(int(first))
read_node['first'].setValue(int(first))
read_node['origlast'].setValue(int(last))
read_node['last'].setValue(int(last))
# set start frame depending on workfile or version
self._loader_shift(read_node, start_at_workfile)
if start_at_workfile:
read_node['frame_mode'].setValue("start at")
start_frame = self.script_start - slate_frames
read_node['frame'].setValue(str(start_frame))
def _make_retimes(self, parent_node, version_data):
''' Create all retime and timewarping nodes with copied animation '''
@ -466,18 +487,6 @@ class LoadClip(plugin.NukeLoader):
for i, n in enumerate(dependent_nodes):
last_node.setInput(i, n)
def _loader_shift(self, read_node, workfile_start=False):
""" Set start frame of read node to a workfile start
Args:
read_node (nuke.Node): The nuke's read node
workfile_start (bool): set workfile start frame if true
"""
if workfile_start:
read_node['frame_mode'].setValue("start at")
read_node['frame'].setValue(str(self.script_start))
def _get_node_name(self, context):
folder_entity = context["folder"]
product_name = context["product"]["name"]

View file

@ -300,6 +300,10 @@ class ExtractSlateFrame(publish.Extractor):
self.log.debug(
"__ matching_repre: {}".format(pformat(matching_repre)))
data = matching_repre.get("data", {})
data["slateFrames"] = 1
matching_repre["data"] = data
self.log.info("Added slate frame to representation files")
def add_comment_slate_node(self, instance, node):

View file

@ -67,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
"FTRACK_SERVER",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
"AYON_SG_USERNAME",
"KITSU_LOGIN",
"KITSU_PWD"
]

View file

@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
"FTRACK_SERVER",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
"AYON_SG_USERNAME",
"KITSU_LOGIN",
"KITSU_PWD"
]

View file

@ -65,7 +65,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
"FTRACK_SERVER",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
"AYON_SG_USERNAME",
]
priority = 50

View file

@ -447,7 +447,7 @@ class CacheItem:
class Anatomy(BaseAnatomy):
_sync_server_addon_cache = CacheItem()
_sitesync_addon_cache = CacheItem()
_project_cache = collections.defaultdict(CacheItem)
_default_site_id_cache = collections.defaultdict(CacheItem)
_root_overrides_cache = collections.defaultdict(
@ -482,13 +482,13 @@ class Anatomy(BaseAnatomy):
return copy.deepcopy(project_cache.data)
@classmethod
def get_sync_server_addon(cls):
if cls._sync_server_addon_cache.is_outdated:
def get_sitesync_addon(cls):
if cls._sitesync_addon_cache.is_outdated:
manager = AddonsManager()
cls._sync_server_addon_cache.update_data(
manager.get_enabled_addon("sync_server")
cls._sitesync_addon_cache.update_data(
manager.get_enabled_addon("sitesync")
)
return cls._sync_server_addon_cache.data
return cls._sitesync_addon_cache.data
@classmethod
def _get_studio_roots_overrides(cls, project_name):
@ -525,8 +525,8 @@ class Anatomy(BaseAnatomy):
"""
# First check if sync server is available and enabled
sync_server = cls.get_sync_server_addon()
if sync_server is None or not sync_server.enabled:
sitesync_addon = cls.get_sitesync_addon()
if sitesync_addon is None or not sitesync_addon.enabled:
# QUESTION is ok to force 'studio' when site sync is not enabled?
site_name = "studio"
@ -535,7 +535,7 @@ class Anatomy(BaseAnatomy):
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
project_cache.update_data(
sync_server.get_active_site_type(project_name)
sitesync_addon.get_active_site_type(project_name)
)
site_name = project_cache.data
@ -549,7 +549,7 @@ class Anatomy(BaseAnatomy):
)
else:
# Ask sync server to get roots overrides
roots_overrides = sync_server.get_site_root_overrides(
roots_overrides = sitesync.get_site_root_overrides(
project_name, site_name
)
site_cache.update_data(roots_overrides)

View file

@ -91,9 +91,15 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
longest_key = max(self.templates.keys(), key=len)
dropdown.setMinimumContentsLength(len(longest_key))
template_label = QtWidgets.QLabel()
template_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor))
template_label.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
template_dir_label = QtWidgets.QLabel()
template_dir_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor))
template_dir_label.setTextInteractionFlags(
QtCore.Qt.TextSelectableByMouse)
template_file_label = QtWidgets.QLabel()
template_file_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor))
template_file_label.setTextInteractionFlags(
QtCore.Qt.TextSelectableByMouse)
renumber_frame = QtWidgets.QCheckBox()
@ -123,7 +129,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
input_layout.addRow("Selected representations", selected_label)
input_layout.addRow("Delivery template", dropdown)
input_layout.addRow("Template value", template_label)
input_layout.addRow("Directory template", template_dir_label)
input_layout.addRow("File template", template_file_label)
input_layout.addRow("Renumber Frame", renumber_frame)
input_layout.addRow("Renumber start frame", first_frame_start)
input_layout.addRow("Root", root_line_edit)
@ -151,7 +158,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
layout.addWidget(text_area)
self.selected_label = selected_label
self.template_label = template_label
self.template_dir_label = template_dir_label
self.template_file_label = template_file_label
self.dropdown = dropdown
self.first_frame_start = first_frame_start
self.renumber_frame = renumber_frame
@ -282,11 +290,13 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
"""Adds list of delivery templates from Anatomy to dropdown."""
templates = {}
for template_name, value in anatomy.templates["delivery"].items():
path_template = value["path"]
if (
not isinstance(path_template, str)
or not path_template.startswith('{root')
):
directory_template = value["directory"]
if not directory_template.startswith("{root"):
self.log.warning(
"Skipping template '%s' because directory template does "
"not start with `{root` in value: %s",
template_name, directory_template
)
continue
templates[template_name] = value
@ -350,7 +360,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
name = self.dropdown.currentText()
template_value = self.templates.get(name)
if template_value:
self.template_label.setText(template_value)
self.template_dir_label.setText(template_value["directory"])
self.template_file_label.setText(template_value["file"])
self.btn_delivery.setEnabled(bool(self._get_selected_repres()))
def _update_progress(self, uploaded):

View file

@ -1,5 +1,5 @@
from pyblish import api
from ayon_core.settings import get_current_project_settings
from ayon_core.settings import get_project_settings
class CollectSettings(api.ContextPlugin):
@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin):
label = "Collect Settings"
def process(self, context):
context.data["project_settings"] = get_current_project_settings()
project_name = context.data["projectName"]
self.log.debug(
"Collecting settings for project: {}".format(project_name)
)
project_settings = get_project_settings(project_name)
context.data["project_settings"] = project_settings

View file

@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor):
Extractor to create video with pre-defined burnins from
existing extracted video representation.
It will work only on represenations having `burnin = True` or
It will work only on representations having `burnin = True` or
`tags` including `burnin`
"""
@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor):
burnin_defs = copy.deepcopy(src_burnin_defs)
# Filter output definition by `burnin` represetation key
# Filter output definition by `burnin` representation key
repre_linked_burnins = [
burnin_def
for burnin_def in burnin_defs
@ -378,6 +378,7 @@ class ExtractBurnin(publish.Extractor):
# Prepare subprocess arguments
args = list(executable_args)
args.append(temporary_json_filepath)
args.append("--headless")
self.log.debug("Executing: {}".format(" ".join(args)))
# Run burnin script
@ -547,7 +548,7 @@ class ExtractBurnin(publish.Extractor):
return burnin_data, temp_data
def repres_is_valid(self, repre):
"""Validation if representaion should be processed.
"""Validation if representation should be processed.
Args:
repre (dict): Representation which should be checked.
@ -579,7 +580,7 @@ class ExtractBurnin(publish.Extractor):
tags (list): Tags of processed representation.
Returns:
list: Containg all burnin definitions matching entered tags.
list: Contain all burnin definitions matching entered tags.
"""
filtered_burnins = []
@ -604,7 +605,7 @@ class ExtractBurnin(publish.Extractor):
Store data to `temp_data` for keys "full_input_path" which is full path
to source files optionally with sequence formatting,
"full_output_path" full path to otput with optionally with sequence
"full_output_path" full path to output with optionally with sequence
formatting, "full_input_paths" list of all source files which will be
deleted when burnin script ends, "repre_files" list of output
filenames.
@ -754,7 +755,7 @@ class ExtractBurnin(publish.Extractor):
profile (dict): Profile from presets matching current context.
Returns:
list: Containg all valid output definitions.
list: Contain all valid output definitions.
"""
filtered_burnin_defs = []
@ -775,7 +776,7 @@ class ExtractBurnin(publish.Extractor):
):
self.log.debug((
"Skipped burnin definition \"{}\". Family"
" fiters ({}) does not match current instance families: {}"
" filters ({}) does not match current instance families: {}"
).format(
filename_suffix, str(families_filters), str(families)
))

View file

@ -32,6 +32,35 @@ from ayon_core.pipeline.publish import (
from ayon_core.pipeline.publish.lib import add_repre_files_for_cleanup
def frame_to_timecode(frame: int, fps: float) -> str:
"""Convert a frame number and FPS to editorial timecode (HH:MM:SS:FF).
Unlike `ayon_core.pipeline.editorial.frames_to_timecode` this does not
rely on the `opentimelineio` package, so it can be used across hosts that
do not have it available.
Args:
frame (int): The frame number to be converted.
fps (float): The frames per second of the video.
Returns:
str: The timecode in HH:MM:SS:FF format.
"""
# Calculate total seconds
total_seconds = frame / fps
# Extract hours, minutes, and seconds
hours = int(total_seconds // 3600)
minutes = int((total_seconds % 3600) // 60)
seconds = int(total_seconds % 60)
# Adjust for non-integer FPS by rounding the remaining frames appropriately
remaining_frames = round((total_seconds - int(total_seconds)) * fps)
# Format and return the timecode
return f"{hours:02d}:{minutes:02d}:{seconds:02d}:{remaining_frames:02d}"
class ExtractReview(pyblish.api.InstancePlugin):
"""Extracting Review mov file for Ftrack
@ -390,7 +419,16 @@ class ExtractReview(pyblish.api.InstancePlugin):
# add outputName to anatomy format fill_data
fill_data.update({
"output": output_name,
"ext": output_ext
"ext": output_ext,
# By adding `timecode` as data we can use it
# in the ffmpeg arguments for `--timecode` so that editorial
# like Resolve or Premiere can detect the start frame for e.g.
# review output files
"timecode": frame_to_timecode(
frame=temp_data["frame_start_handle"],
fps=float(instance.data["fps"])
)
})
try: # temporary until oiiotool is supported cross platform

View file

@ -0,0 +1,22 @@
import pyblish.api
from ayon_core.pipeline import publish
class ExtractSlateData(publish.Extractor):
"""Add slate data for integration."""
label = "Slate Data"
# Offset from ExtractReviewSlate and ExtractGenerateSlate.
order = pyblish.api.ExtractorOrder + 0.49
families = ["slate", "review"]
hosts = ["nuke", "shell"]
def process(self, instance):
for representation in instance.data.get("representations", []):
if "slate-frame" not in representation.get("tags", []):
continue
data = representation.get("data", {})
data["slateFrames"] = 1
representation["data"] = data

View file

@ -871,7 +871,7 @@ class FrontendLoaderController(_BaseLoaderController):
# Site sync functions
@abstractmethod
def is_site_sync_enabled(self, project_name=None):
def is_sitesync_enabled(self, project_name=None):
"""Is site sync enabled.
Site sync addon can be enabled but can be disabled per project.

View file

@ -113,7 +113,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
self._products_model = ProductsModel(self)
self._loader_actions_model = LoaderActionsModel(self)
self._thumbnails_model = ThumbnailsModel()
self._site_sync_model = SiteSyncModel(self)
self._sitesync_model = SiteSyncModel(self)
@property
def log(self):
@ -149,7 +149,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
self._loader_actions_model.reset()
self._projects_model.reset()
self._thumbnails_model.reset()
self._site_sync_model.reset()
self._sitesync_model.reset()
self._projects_model.refresh()
@ -240,7 +240,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
project_name, representation_ids)
)
action_items.extend(self._site_sync_model.get_site_sync_action_items(
action_items.extend(self._sitesync_model.get_sitesync_action_items(
project_name, representation_ids)
)
@ -254,8 +254,8 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
version_ids,
representation_ids
):
if self._site_sync_model.is_site_sync_action(identifier):
self._site_sync_model.trigger_action_item(
if self._sitesync_model.is_sitesync_action(identifier):
self._sitesync_model.trigger_action_item(
identifier,
project_name,
representation_ids
@ -368,24 +368,24 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
self._loaded_products_cache.update_data(product_ids)
return self._loaded_products_cache.get_data()
def is_site_sync_enabled(self, project_name=None):
return self._site_sync_model.is_site_sync_enabled(project_name)
def is_sitesync_enabled(self, project_name=None):
return self._sitesync_model.is_sitesync_enabled(project_name)
def get_active_site_icon_def(self, project_name):
return self._site_sync_model.get_active_site_icon_def(project_name)
return self._sitesync_model.get_active_site_icon_def(project_name)
def get_remote_site_icon_def(self, project_name):
return self._site_sync_model.get_remote_site_icon_def(project_name)
return self._sitesync_model.get_remote_site_icon_def(project_name)
def get_version_sync_availability(self, project_name, version_ids):
return self._site_sync_model.get_version_sync_availability(
return self._sitesync_model.get_version_sync_availability(
project_name, version_ids
)
def get_representations_sync_status(
self, project_name, representation_ids
):
return self._site_sync_model.get_representations_sync_status(
return self._sitesync_model.get_representations_sync_status(
project_name, representation_ids
)

View file

@ -1,7 +1,7 @@
from .selection import SelectionModel
from .products import ProductsModel
from .actions import LoaderActionsModel
from .site_sync import SiteSyncModel
from .sitesync import SiteSyncModel
__all__ = (

View file

@ -36,7 +36,7 @@ class SiteSyncModel:
self._controller = controller
self._site_icons = None
self._site_sync_enabled_cache = NestedCacheItem(
self._sitesync_enabled_cache = NestedCacheItem(
levels=1, lifetime=self.lifetime
)
self._active_site_cache = NestedCacheItem(
@ -57,17 +57,17 @@ class SiteSyncModel:
)
manager = AddonsManager()
self._site_sync_addon = manager.get("sync_server")
self._sitesync_addon = manager.get("sitesync")
def reset(self):
self._site_icons = None
self._site_sync_enabled_cache.reset()
self._sitesync_enabled_cache.reset()
self._active_site_cache.reset()
self._remote_site_cache.reset()
self._version_availability_cache.reset()
self._repre_status_cache.reset()
def is_site_sync_enabled(self, project_name=None):
def is_sitesync_enabled(self, project_name=None):
"""Site sync is enabled for a project.
Returns false if site sync addon is not available or enabled
@ -82,13 +82,13 @@ class SiteSyncModel:
bool: Site sync is enabled.
"""
if not self._is_site_sync_addon_enabled():
if not self._is_sitesync_addon_enabled():
return False
cache = self._site_sync_enabled_cache[project_name]
cache = self._sitesync_enabled_cache[project_name]
if not cache.is_valid:
enabled = True
if project_name:
enabled = self._site_sync_addon.is_project_enabled(
enabled = self._sitesync_addon.is_project_enabled(
project_name, single=True
)
cache.update_data(enabled)
@ -107,8 +107,8 @@ class SiteSyncModel:
cache = self._active_site_cache[project_name]
if not cache.is_valid:
site_name = None
if project_name and self._is_site_sync_addon_enabled():
site_name = self._site_sync_addon.get_active_site(project_name)
if project_name and self._is_sitesync_addon_enabled():
site_name = self._sitesync_addon.get_active_site(project_name)
cache.update_data(site_name)
return cache.get_data()
@ -125,8 +125,8 @@ class SiteSyncModel:
cache = self._remote_site_cache[project_name]
if not cache.is_valid:
site_name = None
if project_name and self._is_site_sync_addon_enabled():
site_name = self._site_sync_addon.get_remote_site(project_name)
if project_name and self._is_sitesync_addon_enabled():
site_name = self._sitesync_addon.get_remote_site(project_name)
cache.update_data(site_name)
return cache.get_data()
@ -140,7 +140,7 @@ class SiteSyncModel:
Union[dict[str, Any], None]: Site icon definition.
"""
if not project_name or not self.is_site_sync_enabled(project_name):
if not project_name or not self.is_sitesync_enabled(project_name):
return None
active_site = self.get_active_site(project_name)
return self._get_site_icon_def(project_name, active_site)
@ -155,14 +155,14 @@ class SiteSyncModel:
Union[dict[str, Any], None]: Site icon definition.
"""
if not project_name or not self.is_site_sync_enabled(project_name):
if not project_name or not self.is_sitesync_enabled(project_name):
return None
remote_site = self.get_remote_site(project_name)
return self._get_site_icon_def(project_name, remote_site)
def _get_site_icon_def(self, project_name, site_name):
# use different icon for studio even if provider is 'local_drive'
if site_name == self._site_sync_addon.DEFAULT_SITE:
if site_name == self._sitesync_addon.DEFAULT_SITE:
provider = "studio"
else:
provider = self._get_provider_for_site(project_name, site_name)
@ -179,7 +179,7 @@ class SiteSyncModel:
dict[str, tuple[int, int]]
"""
if not self.is_site_sync_enabled(project_name):
if not self.is_sitesync_enabled(project_name):
return {
version_id: _default_version_availability()
for version_id in version_ids
@ -217,7 +217,7 @@ class SiteSyncModel:
dict[str, tuple[float, float]]
"""
if not self.is_site_sync_enabled(project_name):
if not self.is_sitesync_enabled(project_name):
return {
repre_id: _default_repre_status()
for repre_id in representation_ids
@ -242,7 +242,7 @@ class SiteSyncModel:
output[repre_id] = repre_cache.get_data()
return output
def get_site_sync_action_items(self, project_name, representation_ids):
def get_sitesync_action_items(self, project_name, representation_ids):
"""
Args:
@ -253,7 +253,7 @@ class SiteSyncModel:
list[ActionItem]: Actions that can be shown in loader.
"""
if not self.is_site_sync_enabled(project_name):
if not self.is_sitesync_enabled(project_name):
return []
repres_status = self.get_representations_sync_status(
@ -289,7 +289,7 @@ class SiteSyncModel:
return action_items
def is_site_sync_action(self, identifier):
def is_sitesync_action(self, identifier):
"""Should be `identifier` handled by SiteSync.
Args:
@ -353,22 +353,22 @@ class SiteSyncModel:
)
elif identifier == REMOVE_IDENTIFIER:
self._site_sync_addon.remove_site(
self._sitesync_addon.remove_site(
project_name,
repre_id,
active_site,
remove_local_files=True
)
def _is_site_sync_addon_enabled(self):
def _is_sitesync_addon_enabled(self):
"""
Returns:
bool: Site sync addon is enabled.
"""
if self._site_sync_addon is None:
if self._sitesync_addon is None:
return False
return self._site_sync_addon.enabled
return self._sitesync_addon.enabled
def _get_provider_for_site(self, project_name, site_name):
"""Provider for a site.
@ -381,9 +381,9 @@ class SiteSyncModel:
Union[str, None]: Provider name.
"""
if not self._is_site_sync_addon_enabled():
if not self._is_sitesync_addon_enabled():
return None
return self._site_sync_addon.get_provider_for_site(
return self._sitesync_addon.get_provider_for_site(
project_name, site_name
)
@ -398,7 +398,7 @@ class SiteSyncModel:
return None
if self._site_icons is None:
self._site_icons = self._site_sync_addon.get_site_icons()
self._site_icons = self._sitesync_addon.get_site_icons()
return self._site_icons.get(provider)
def _refresh_version_availability(self, project_name, version_ids):
@ -406,7 +406,7 @@ class SiteSyncModel:
return
project_cache = self._version_availability_cache[project_name]
avail_by_id = self._site_sync_addon.get_version_availability(
avail_by_id = self._sitesync_addon.get_version_availability(
project_name,
version_ids,
self.get_active_site(project_name),
@ -425,7 +425,7 @@ class SiteSyncModel:
return
project_cache = self._repre_status_cache[project_name]
status_by_repre_id = (
self._site_sync_addon.get_representations_sync_state(
self._sitesync_addon.get_representations_sync_state(
project_name,
representation_ids,
self.get_active_site(project_name),
@ -496,7 +496,7 @@ class SiteSyncModel:
)
def _add_site(self, project_name, repre_entity, site_name, product_type):
self._site_sync_addon.add_site(
self._sitesync_addon.add_site(
project_name, repre_entity["id"], site_name, force=True
)
@ -513,7 +513,7 @@ class SiteSyncModel:
try:
print("Adding {} to linked representation: {}".format(
site_name, link_repre_id))
self._site_sync_addon.add_site(
self._sitesync_addon.add_site(
project_name,
link_repre_id,
site_name,

View file

@ -73,7 +73,7 @@ class ProductsModel(QtGui.QStandardItemModel):
published_time_col = column_labels.index("Time")
folders_label_col = column_labels.index("Folder")
in_scene_col = column_labels.index("In scene")
site_sync_avail_col = column_labels.index("Availability")
sitesync_avail_col = column_labels.index("Availability")
def __init__(self, controller):
super(ProductsModel, self).__init__()

View file

@ -139,9 +139,9 @@ class ProductsWidget(QtWidgets.QWidget):
products_view.setItemDelegateForColumn(
products_model.in_scene_col, in_scene_delegate)
site_sync_delegate = SiteSyncDelegate()
sitesync_delegate = SiteSyncDelegate()
products_view.setItemDelegateForColumn(
products_model.site_sync_avail_col, site_sync_delegate)
products_model.sitesync_avail_col, sitesync_delegate)
main_layout = QtWidgets.QHBoxLayout(self)
main_layout.setContentsMargins(0, 0, 0, 0)
@ -176,7 +176,7 @@ class ProductsWidget(QtWidgets.QWidget):
self._version_delegate = version_delegate
self._time_delegate = time_delegate
self._in_scene_delegate = in_scene_delegate
self._site_sync_delegate = site_sync_delegate
self._sitesync_delegate = sitesync_delegate
self._selected_project_name = None
self._selected_folder_ids = set()
@ -192,8 +192,8 @@ class ProductsWidget(QtWidgets.QWidget):
products_model.in_scene_col,
not controller.is_loaded_products_supported()
)
self._set_site_sync_visibility(
self._controller.is_site_sync_enabled()
self._set_sitesync_visibility(
self._controller.is_sitesync_enabled()
)
def set_name_filter(self, name):
@ -229,10 +229,10 @@ class ProductsWidget(QtWidgets.QWidget):
def refresh(self):
self._refresh_model()
def _set_site_sync_visibility(self, site_sync_enabled):
def _set_sitesync_visibility(self, sitesync_enabled):
self._products_view.setColumnHidden(
self._products_model.site_sync_avail_col,
not site_sync_enabled
self._products_model.sitesync_avail_col,
not sitesync_enabled
)
def _fill_version_editor(self):
@ -395,10 +395,10 @@ class ProductsWidget(QtWidgets.QWidget):
def _on_folders_selection_change(self, event):
project_name = event["project_name"]
site_sync_enabled = self._controller.is_site_sync_enabled(
sitesync_enabled = self._controller.is_sitesync_enabled(
project_name
)
self._set_site_sync_visibility(site_sync_enabled)
self._set_sitesync_visibility(sitesync_enabled)
self._selected_project_name = project_name
self._selected_folder_ids = event["folder_ids"]
self._refresh_model()

View file

@ -307,8 +307,8 @@ class RepresentationsWidget(QtWidgets.QWidget):
self._repre_model = repre_model
self._repre_proxy_model = repre_proxy_model
self._set_site_sync_visibility(
self._controller.is_site_sync_enabled()
self._set_sitesync_visibility(
self._controller.is_sitesync_enabled()
)
self._set_multiple_folders_selected(False)
@ -320,19 +320,19 @@ class RepresentationsWidget(QtWidgets.QWidget):
def _on_project_change(self, event):
self._selected_project_name = event["project_name"]
site_sync_enabled = self._controller.is_site_sync_enabled(
sitesync_enabled = self._controller.is_sitesync_enabled(
self._selected_project_name
)
self._set_site_sync_visibility(site_sync_enabled)
self._set_sitesync_visibility(sitesync_enabled)
def _set_site_sync_visibility(self, site_sync_enabled):
def _set_sitesync_visibility(self, sitesync_enabled):
self._repre_view.setColumnHidden(
self._repre_model.active_site_column,
not site_sync_enabled
not sitesync_enabled
)
self._repre_view.setColumnHidden(
self._repre_model.remote_site_column,
not site_sync_enabled
not sitesync_enabled
)
def _set_multiple_folders_selected(self, selected_multiple_folders):

View file

@ -28,7 +28,7 @@ class SceneInventoryController:
self._current_folder_id = None
self._current_folder_set = False
self._site_sync_model = SiteSyncModel(self)
self._sitesync_model = SiteSyncModel(self)
# Switch dialog requirements
self._hierarchy_model = HierarchyModel(self)
self._event_system = self._create_event_system()
@ -47,7 +47,7 @@ class SceneInventoryController:
self._current_folder_id = None
self._current_folder_set = False
self._site_sync_model.reset()
self._sitesync_model.reset()
self._hierarchy_model.reset()
def get_current_context(self):
@ -89,22 +89,22 @@ class SceneInventoryController:
return []
# Site Sync methods
def is_sync_server_enabled(self):
return self._site_sync_model.is_sync_server_enabled()
def is_sitesync_enabled(self):
return self._sitesync_model.is_sitesync_enabled()
def get_sites_information(self):
return self._site_sync_model.get_sites_information()
return self._sitesync_model.get_sites_information()
def get_site_provider_icons(self):
return self._site_sync_model.get_site_provider_icons()
return self._sitesync_model.get_site_provider_icons()
def get_representations_site_progress(self, representation_ids):
return self._site_sync_model.get_representations_site_progress(
return self._sitesync_model.get_representations_site_progress(
representation_ids
)
def resync_representations(self, representation_ids, site_type):
return self._site_sync_model.resync_representations(
return self._sitesync_model.resync_representations(
representation_ids, site_type
)

View file

@ -1,4 +1,4 @@
from .site_sync import SiteSyncModel
from .sitesync import SiteSyncModel
__all__ = (

View file

@ -9,30 +9,30 @@ class SiteSyncModel:
def __init__(self, controller):
self._controller = controller
self._sync_server_module = NOT_SET
self._sync_server_enabled = None
self._sitesync_addon = NOT_SET
self._sitesync_enabled = None
self._active_site = NOT_SET
self._remote_site = NOT_SET
self._active_site_provider = NOT_SET
self._remote_site_provider = NOT_SET
def reset(self):
self._sync_server_module = NOT_SET
self._sync_server_enabled = None
self._sitesync_addon = NOT_SET
self._sitesync_enabled = None
self._active_site = NOT_SET
self._remote_site = NOT_SET
self._active_site_provider = NOT_SET
self._remote_site_provider = NOT_SET
def is_sync_server_enabled(self):
def is_sitesync_enabled(self):
"""Site sync is enabled.
Returns:
bool: Is enabled or not.
"""
self._cache_sync_server_module()
return self._sync_server_enabled
self._cache_sitesync_addon()
return self._sitesync_enabled
def get_site_provider_icons(self):
"""Icon paths per provider.
@ -41,10 +41,10 @@ class SiteSyncModel:
dict[str, str]: Path by provider name.
"""
if not self.is_sync_server_enabled():
if not self.is_sitesync_enabled():
return {}
site_sync_addon = self._get_sync_server_module()
return site_sync_addon.get_site_icons()
sitesync_addon = self._get_sitesync_addon()
return sitesync_addon.get_site_icons()
def get_sites_information(self):
return {
@ -65,11 +65,11 @@ class SiteSyncModel:
}
for repre_id in representation_ids
}
if not self.is_sync_server_enabled():
if not self.is_sitesync_enabled():
return output
project_name = self._controller.get_current_project_name()
site_sync = self._get_sync_server_module()
sitesync_addon = self._get_sitesync_addon()
repre_entities = ayon_api.get_representations(
project_name, representation_ids
)
@ -78,7 +78,7 @@ class SiteSyncModel:
for repre_entity in repre_entities:
repre_output = output[repre_entity["id"]]
result = site_sync.get_progress_for_repre(
result = sitesync_addon.get_progress_for_repre(
repre_entity, active_site, remote_site
)
repre_output["active_site"] = result[active_site]
@ -95,7 +95,7 @@ class SiteSyncModel:
"""
project_name = self._controller.get_current_project_name()
site_sync = self._get_sync_server_module()
sitesync_addon = self._get_sitesync_addon()
active_site = self._get_active_site()
remote_site = self._get_remote_site()
progress = self.get_representations_site_progress(
@ -115,22 +115,22 @@ class SiteSyncModel:
site = remote_site
if check_progress == 1:
site_sync.add_site(
sitesync_addon.add_site(
project_name, repre_id, site, force=True
)
def _get_sync_server_module(self):
self._cache_sync_server_module()
return self._sync_server_module
def _get_sitesync_addon(self):
self._cache_sitesync_addon()
return self._sitesync_addon
def _cache_sync_server_module(self):
if self._sync_server_module is not NOT_SET:
return self._sync_server_module
def _cache_sitesync_addon(self):
if self._sitesync_addon is not NOT_SET:
return self._sitesync_addon
manager = AddonsManager()
site_sync = manager.get("sync_server")
sync_enabled = site_sync is not None and site_sync.enabled
self._sync_server_module = site_sync
self._sync_server_enabled = sync_enabled
sitesync_addon = manager.get("sitesync")
sync_enabled = sitesync_addon is not None and sitesync_addon.enabled
self._sitesync_addon = sitesync_addon
self._sitesync_enabled = sync_enabled
def _get_active_site(self):
if self._active_site is NOT_SET:
@ -157,19 +157,19 @@ class SiteSyncModel:
remote_site = None
active_site_provider = None
remote_site_provider = None
if self.is_sync_server_enabled():
site_sync = self._get_sync_server_module()
if self.is_sitesync_enabled():
sitesync_addon = self._get_sitesync_addon()
project_name = self._controller.get_current_project_name()
active_site = site_sync.get_active_site(project_name)
remote_site = site_sync.get_remote_site(project_name)
active_site = sitesync_addon.get_active_site(project_name)
remote_site = sitesync_addon.get_remote_site(project_name)
active_site_provider = "studio"
remote_site_provider = "studio"
if active_site != "studio":
active_site_provider = site_sync.get_provider_for_site(
active_site_provider = sitesync_addon.get_provider_for_site(
project_name, active_site
)
if remote_site != "studio":
remote_site_provider = site_sync.get_provider_for_site(
remote_site_provider = sitesync_addon.get_provider_for_site(
project_name, remote_site
)

View file

@ -311,9 +311,9 @@ class SceneInventoryView(QtWidgets.QTreeView):
menu.addAction(remove_action)
self._handle_sync_server(menu, repre_ids)
self._handle_sitesync(menu, repre_ids)
def _handle_sync_server(self, menu, repre_ids):
def _handle_sitesync(self, menu, repre_ids):
"""Adds actions for download/upload when SyncServer is enabled
Args:
@ -324,7 +324,7 @@ class SceneInventoryView(QtWidgets.QTreeView):
(OptionMenu)
"""
if not self._controller.is_sync_server_enabled():
if not self._controller.is_sitesync_enabled():
return
menu.addSeparator()

View file

@ -70,7 +70,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
view = SceneInventoryView(controller, self)
view.setModel(proxy)
sync_enabled = controller.is_sync_server_enabled()
sync_enabled = controller.is_sitesync_enabled()
view.setColumnHidden(model.active_site_col, not sync_enabled)
view.setColumnHidden(model.remote_site_col, not sync_enabled)

View file

@ -659,16 +659,7 @@ class BaseWorkfileController(
folder_id != self.get_current_folder_id()
or task_name != self.get_current_task_name()
):
folder_entity = ayon_api.get_folder_by_id(
event_data["project_name"],
event_data["folder_id"],
)
task_entity = ayon_api.get_task_by_name(
event_data["project_name"],
event_data["folder_id"],
event_data["task_name"]
)
change_current_context(folder_entity, task_entity)
self._change_current_context(project_name, folder_id, task_id)
self._host_open_workfile(filepath)
@ -710,16 +701,8 @@ class BaseWorkfileController(
folder_id != self.get_current_folder_id()
or task_name != self.get_current_task_name()
):
folder_entity = ayon_api.get_folder_by_id(
project_name, folder["id"]
)
task_entity = ayon_api.get_task_by_name(
project_name, folder["id"], task_name
)
change_current_context(
folder_entity,
task_entity,
template_key=template_key
self._change_current_context(
project_name, folder_id, task_id, template_key
)
# Save workfile
@ -744,4 +727,18 @@ class BaseWorkfileController(
# Trigger after save events
emit_event("workfile.save.after", event_data, source="workfiles.tool")
self.reset()
def _change_current_context(
self, project_name, folder_id, task_id, template_key=None
):
# Change current context
folder_entity = self.get_folder_entity(project_name, folder_id)
task_entity = self.get_task_entity(project_name, task_id)
change_current_context(
folder_entity,
task_entity,
template_key=template_key
)
self._current_folder_id = folder_entity["id"]
self._current_folder_path = folder_entity["path"]
self._current_task_name = task_entity["name"]

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "0.3.0-dev.1"
__version__ = "0.3.1-dev.1"

View file

@ -1,6 +1,6 @@
name = "core"
title = "Core"
version = "0.3.0-dev.1"
version = "0.3.1-dev.1"
client_dir = "ayon_core"

View file

@ -5,7 +5,7 @@
[tool.poetry]
name = "ayon-core"
version = "0.3.0"
version = "0.3.1"
description = ""
authors = ["Ynput Team <team@ynput.io>"]
readme = "README.md"

View file

@ -173,6 +173,7 @@ def _product_types_enum():
"rig",
"setdress",
"take",
"usd",
"usdShade",
"vdbcache",
"vrayproxy",

View file

@ -3,6 +3,7 @@ import json
import copy
from ayon_server.addons import BaseServerAddon, AddonLibrary
from ayon_server.entities.core import attribute_library
from ayon_server.lib.postgres import Postgres
from .version import __version__
@ -118,9 +119,28 @@ class ApplicationsAddon(BaseServerAddon):
)
async def setup(self):
need_restart = await self.create_applications_attribute()
need_restart = await self.create_required_attributes()
if need_restart:
self.request_server_restart()
await self._update_enums()
def _get_applications_def(self):
return {
"name": "applications",
"type": "list_of_strings",
"title": "Applications",
"scope": ["project"],
"enum":[],
}
def _get_tools_def(self):
return {
"name": "tools",
"type": "list_of_strings",
"title": "Tools",
"scope": ["project", "folder", "task"],
"enum":[],
}
async def create_applications_attribute(self) -> bool:
"""Make sure there are required attributes which ftrack addon needs.
@ -129,6 +149,73 @@ class ApplicationsAddon(BaseServerAddon):
bool: 'True' if an attribute was created or updated.
"""
need_restart = await self.create_required_attributes()
await self._update_enums()
return need_restart
async def create_required_attributes(self) -> bool:
"""Make sure there are required 'applications' and 'tools' attributes.
This only checks for the existence of the attributes, it does not populate
them with any data. When an attribute is added, server needs to be restarted,
while adding enum data to the attribute does not require a restart.
Returns:
bool: 'True' if an attribute was created or updated.
"""
# keep track of the last attribute position (for adding new attributes)
apps_attribute_data = self._get_applications_def()
tools_attribute_data = self._get_tools_def()
apps_attrib_name = apps_attribute_data["name"]
tools_attrib_name = tools_attribute_data["name"]
async with Postgres.acquire() as conn, conn.transaction():
query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;"
result = (await conn.fetch(query))[0]
attributes_to_create = {}
if not result["has_applications"]:
attributes_to_create[apps_attrib_name] = {
"scope": apps_attribute_data["scope"],
"data": {
"title": apps_attribute_data["title"],
"type": apps_attribute_data["type"],
"enum": [],
}
}
if not result["has_tools"]:
attributes_to_create[tools_attrib_name] = {
"scope": tools_attribute_data["scope"],
"data": {
"title": tools_attribute_data["title"],
"type": tools_attribute_data["type"],
"enum": [],
},
}
needs_restart = False
# when any of the required attributes are not present, add them
# and return 'True' to indicate that server needs to be restarted
for name, payload in attributes_to_create.items():
insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING"
await conn.execute(
insert_query,
name,
payload["scope"],
payload["data"],
)
needs_restart = True
return needs_restart
async def _update_enums(self):
"""Updates applications and tools enums based on the addon settings.
This method is called when the addon is started (after we are sure that the
'applications' and 'tools' attributes exist) and when the addon settings are
updated (using on_settings_updated method).
"""
instance = AddonLibrary.getinstance()
app_defs = instance.data.get(self.name)
all_applications = []
@ -148,33 +235,32 @@ class ApplicationsAddon(BaseServerAddon):
merge_groups(all_applications, app_groups)
merge_groups(all_tools, studio_settings["tool_groups"])
query = "SELECT name, position, scope, data from public.attributes"
apps_attrib_name = "applications"
tools_attrib_name = "tools"
apps_enum = get_enum_items_from_groups(all_applications)
tools_enum = get_enum_items_from_groups(all_tools)
apps_attribute_data = {
"type": "list_of_strings",
"title": "Applications",
"enum": apps_enum
"enum": apps_enum,
}
tools_attribute_data = {
"type": "list_of_strings",
"title": "Tools",
"enum": tools_enum
"enum": tools_enum,
}
apps_scope = ["project"]
tools_scope = ["project", "folder", "task"]
apps_match_position = None
apps_matches = False
tools_match_position = None
tools_matches = False
position = 1
async for row in Postgres.iterate(query):
position += 1
async for row in Postgres.iterate(
"SELECT name, position, scope, data from public.attributes"
):
if row["name"] == apps_attrib_name:
# Check if scope is matching ftrack addon requirements
if (
@ -182,7 +268,6 @@ class ApplicationsAddon(BaseServerAddon):
and row["data"].get("enum") == apps_enum
):
apps_matches = True
apps_match_position = row["position"]
elif row["name"] == tools_attrib_name:
if (
@ -190,45 +275,41 @@ class ApplicationsAddon(BaseServerAddon):
and row["data"].get("enum") == tools_enum
):
tools_matches = True
tools_match_position = row["position"]
if apps_matches and tools_matches:
return False
return
postgre_query = "\n".join((
"INSERT INTO public.attributes",
" (name, position, scope, data)",
"VALUES",
" ($1, $2, $3, $4)",
"ON CONFLICT (name)",
"DO UPDATE SET",
" scope = $3,",
" data = $4",
))
if not apps_matches:
# Reuse position from found attribute
if apps_match_position is None:
apps_match_position = position
position += 1
await Postgres.execute(
postgre_query,
apps_attrib_name,
apps_match_position,
"""
UPDATE attributes SET
scope = $1,
data = $2
WHERE
name = $3
""",
apps_scope,
apps_attribute_data,
apps_attrib_name,
)
if not tools_matches:
if tools_match_position is None:
tools_match_position = position
position += 1
await Postgres.execute(
postgre_query,
tools_attrib_name,
tools_match_position,
"""
UPDATE attributes SET
scope = $1,
data = $2
WHERE
name = $3
""",
tools_scope,
tools_attribute_data,
tools_attrib_name,
)
return True
# Reset attributes cache on server
await attribute_library.load()
async def on_settings_changed(self, *args, **kwargs):
_ = args, kwargs
await self._update_enums()

View file

@ -1 +1 @@
__version__ = "0.1.8"
__version__ = "0.1.9"

View file

@ -75,6 +75,12 @@ class HooksModel(BaseSettingsModel):
default_factory=HookOptionalModel,
title="Install PySide2"
)
FusionLaunchMenuHook: HookOptionalModel = SettingsField(
default_factory=HookOptionalModel,
title="Launch AYON Menu on Fusion Start",
description="Launch the AYON menu on Fusion application startup. "
"This is only supported for Fusion 18+"
)
class CreateSaverModel(CreateSaverPluginModel):
@ -143,6 +149,9 @@ DEFAULT_VALUES = {
"hooks": {
"InstallPySideToFusion": {
"enabled": True
},
"FusionLaunchMenuHook": {
"enabled": False
}
},
"create": {

View file

@ -1 +1 @@
__version__ = "0.1.4"
__version__ = "0.1.5"

View file

@ -2,6 +2,10 @@ from ayon_server.settings import BaseSettingsModel, SettingsField
from ayon_server.types import ColorRGB_float, ColorRGBA_uint8
class LoaderEnabledModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
class ColorsSetting(BaseSettingsModel):
model: ColorRGBA_uint8 = SettingsField(
(209, 132, 30, 1.0), title="Model:")
@ -94,6 +98,7 @@ class ReferenceLoaderModel(BaseSettingsModel):
class ImportLoaderModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
namespace: str = SettingsField(title="Namespace")
group_name: str = SettingsField(title="Group name")
@ -113,6 +118,89 @@ class LoadersModel(BaseSettingsModel):
title="Import Loader"
)
# Enable/disable loaders
ArnoldStandinLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Arnold Standin Loader"
)
AssemblyLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Assembly Loader"
)
AudioLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Audio Loader"
)
GpuCacheLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="GPU Cache Loader"
)
FileNodeLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="File Node (Image) Loader"
)
ImagePlaneLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Image Plane Loader"
)
LookLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Look Loader"
)
MatchmoveLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Matchmove Loader"
)
MultiverseUsdLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Multiverse USD Loader"
)
MultiverseUsdOverLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Multiverse USD Override Loader"
)
RedshiftProxyLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Redshift Proxy Loader"
)
RenderSetupLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Render Setup Loader"
)
LoadVDBtoArnold: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="VDB to Arnold Loader"
)
LoadVDBtoRedShift: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="VDB to Redshift Loader"
)
LoadVDBtoVRay: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="VDB to V-Ray Loader"
)
VRayProxyLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Vray Proxy Loader"
)
VRaySceneLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="VrayScene Loader"
)
XgenLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Xgen Loader"
)
YetiCacheLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Yeti Cache Loader"
)
YetiRigLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
title="Yeti Rig Loader"
)
DEFAULT_LOADERS_SETTING = {
"colors": {
"model": [209, 132, 30, 1.0],
@ -154,8 +242,29 @@ DEFAULT_LOADERS_SETTING = {
"display_handle": True
},
"import_loader": {
"enabled": True,
"namespace": "{folder[name]}_{product[name]}_##_",
"group_name": "_GRP",
"display_handle": True
}
},
"ArnoldStandinLoader": {"enabled": True},
"AssemblyLoader": {"enabled": True},
"AudioLoader": {"enabled": True},
"FileNodeLoader": {"enabled": True},
"GpuCacheLoader": {"enabled": True},
"ImagePlaneLoader": {"enabled": True},
"LookLoader": {"enabled": True},
"MatchmoveLoader": {"enabled": True},
"MultiverseUsdLoader": {"enabled": True},
"MultiverseUsdOverLoader": {"enabled": True},
"RedshiftProxyLoader": {"enabled": True},
"RenderSetupLoader": {"enabled": True},
"LoadVDBtoArnold": {"enabled": True},
"LoadVDBtoRedShift": {"enabled": True},
"LoadVDBtoVRay": {"enabled": True},
"VRayProxyLoader": {"enabled": True},
"VRaySceneLoader": {"enabled": True},
"XgenLoader": {"enabled": True},
"YetiCacheLoader": {"enabled": True},
"YetiRigLoader": {"enabled": True},
}

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring addon version."""
__version__ = "0.1.13"
__version__ = "0.1.14"