Merge branch 'develop' of https://github.com/ynput/ayon-core into feature/AY-4801_traypublisher-publish-editorial-exchange-package-product

This commit is contained in:
Petr Kalis 2024-05-07 17:06:05 +02:00
commit 2a7ffa9cad
56 changed files with 1128 additions and 774 deletions

View file

@ -24,7 +24,7 @@ class AERenderInstance(RenderInstance):
class CollectAERender(publish.AbstractCollectRender):
order = pyblish.api.CollectorOrder + 0.405
order = pyblish.api.CollectorOrder + 0.100
label = "Collect After Effects Render Layers"
hosts = ["aftereffects"]
@ -145,6 +145,7 @@ class CollectAERender(publish.AbstractCollectRender):
if "review" in instance.families:
# to skip ExtractReview locally
instance.families.remove("review")
instance.deadline = inst.data.get("deadline")
instances.append(instance)

View file

@ -143,13 +143,19 @@ def deselect_all():
if obj.mode != 'OBJECT':
modes.append((obj, obj.mode))
bpy.context.view_layer.objects.active = obj
bpy.ops.object.mode_set(mode='OBJECT')
context_override = create_blender_context(active=obj)
with bpy.context.temp_override(**context_override):
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='DESELECT')
context_override = create_blender_context()
with bpy.context.temp_override(**context_override):
bpy.ops.object.select_all(action='DESELECT')
for p in modes:
bpy.context.view_layer.objects.active = p[0]
bpy.ops.object.mode_set(mode=p[1])
context_override = create_blender_context(active=p[0])
with bpy.context.temp_override(**context_override):
bpy.ops.object.mode_set(mode=p[1])
bpy.context.view_layer.objects.active = active

View file

@ -169,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False):
def _on_repair():
attributes = dict()
for key, comp_key, _label in validations:
value = folder_value[key]
value = folder_attributes[key]
comp_key_full = "Comp.FrameFormat.{}".format(comp_key)
attributes[comp_key_full] = value
comp.SetPrefs(attributes)

View file

@ -115,6 +115,7 @@ class CollectFusionRender(
if "review" in instance.families:
# to skip ExtractReview locally
instance.families.remove("review")
instance.deadline = inst.data.get("deadline")
instances.append(instance)

View file

@ -177,7 +177,10 @@ class CollectFarmRender(publish.AbstractCollectRender):
outputFormat=info[1],
outputStartFrame=info[3],
leadingZeros=info[2],
ignoreFrameHandleCheck=True
ignoreFrameHandleCheck=True,
#todo: inst is not available, must be determined, fix when
#reworking to Publisher
# deadline=inst.data.get("deadline")
)
render_instance.context = context

View file

@ -51,13 +51,12 @@ def open_file(filepath):
project = hiero.core.projects()[-1]
# open project file
hiero.core.openProject(filepath.replace(os.path.sep, "/"))
# close previous project
project.close()
# Close previous project if its different to the current project.
filepath = filepath.replace(os.path.sep, "/")
if project.path().replace(os.path.sep, "/") != filepath:
# open project file
hiero.core.openProject(filepath)
project.close()
return True

View file

@ -7,7 +7,8 @@ from ayon_core.hosts.houdini.api import lib
class CollectDataforCache(pyblish.api.InstancePlugin):
"""Collect data for caching to Deadline."""
order = pyblish.api.CollectorOrder + 0.04
# Run after Collect Frames
order = pyblish.api.CollectorOrder + 0.11
families = ["ass", "pointcache",
"mantraifd", "redshiftproxy",
"vdbcache"]

View file

@ -17,7 +17,7 @@ class CollectFrames(pyblish.api.InstancePlugin):
label = "Collect Frames"
families = ["vdbcache", "imagesequence", "ass",
"mantraifd", "redshiftproxy", "review",
"bgeo"]
"pointcache"]
def process(self, instance):

View file

@ -28,10 +28,15 @@ class ExtractAlembic(publish.Extractor):
staging_dir = os.path.dirname(output)
instance.data["stagingDir"] = staging_dir
file_name = os.path.basename(output)
if instance.data.get("frames"):
# list of files
files = instance.data["frames"]
else:
# single file
files = os.path.basename(output)
# We run the render
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
self.log.info("Writing alembic '%s' to '%s'" % (files,
staging_dir))
render_rop(ropnode)
@ -42,7 +47,7 @@ class ExtractAlembic(publish.Extractor):
representation = {
'name': 'abc',
'ext': 'abc',
'files': file_name,
'files': files,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -1299,7 +1299,7 @@ def is_visible(node,
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(
node))
if override_enabled and override_visibility:
if override_enabled and not override_visibility:
return False
if parentHidden:
@ -4212,3 +4212,23 @@ def create_rig_animation_instance(
variant=namespace,
pre_create_data={"use_selection": True}
)
def get_node_index_under_parent(node: str) -> int:
"""Return the index of a DAG node under its parent.
Arguments:
node (str): A DAG Node path.
Returns:
int: The DAG node's index under its parents or world
"""
node = cmds.ls(node, long=True)[0] # enforce long names
parent = node.rsplit("|", 1)[0]
if not parent:
return cmds.ls(assemblies=True, long=True).index(node)
else:
return cmds.listRelatives(parent,
children=True,
fullPath=True).index(node)

View file

@ -1,3 +1,5 @@
import json
from maya import cmds
from ayon_core.pipeline import (
@ -8,13 +10,15 @@ from ayon_core.pipeline import (
)
from ayon_core.pipeline.workfile.workfile_template_builder import (
TemplateAlreadyImported,
AbstractTemplateBuilder
AbstractTemplateBuilder,
PlaceholderPlugin,
PlaceholderItem,
)
from ayon_core.tools.workfile_template_build import (
WorkfileBuildPlaceholderDialog,
)
from .lib import get_main_window
from .lib import read, imprint, get_main_window
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
@ -86,6 +90,162 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
return True
class MayaPlaceholderPlugin(PlaceholderPlugin):
"""Base Placeholder Plugin for Maya with one unified cache.
Creates a locator as placeholder node, which during populate provide
all of its attributes defined on the locator's transform in
`placeholder.data` and where `placeholder.scene_identifier` is the
full path to the node.
Inherited classes must still implement `populate_placeholder`
"""
use_selection_as_parent = True
item_class = PlaceholderItem
def _create_placeholder_name(self, placeholder_data):
return self.identifier.replace(".", "_")
def _collect_scene_placeholders(self):
nodes_by_identifier = self.builder.get_shared_populate_data(
"placeholder_nodes"
)
if nodes_by_identifier is None:
# Cache placeholder data to shared data
nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True)
nodes_by_identifier = {}
for node in nodes:
identifier = cmds.getAttr("{}.plugin_identifier".format(node))
nodes_by_identifier.setdefault(identifier, []).append(node)
# Set the cache
self.builder.set_shared_populate_data(
"placeholder_nodes", nodes_by_identifier
)
return nodes_by_identifier
def create_placeholder(self, placeholder_data):
parent = None
if self.use_selection_as_parent:
selection = cmds.ls(selection=True)
if len(selection) > 1:
raise ValueError(
"More than one node is selected. "
"Please select only one to define the parent."
)
parent = selection[0] if selection else None
placeholder_data["plugin_identifier"] = self.identifier
placeholder_name = self._create_placeholder_name(placeholder_data)
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
if parent:
placeholder = cmds.parent(placeholder, selection[0])[0]
self.imprint(placeholder, placeholder_data)
def update_placeholder(self, placeholder_item, placeholder_data):
node_name = placeholder_item.scene_identifier
changed_values = {}
for key, value in placeholder_data.items():
if value != placeholder_item.data.get(key):
changed_values[key] = value
# Delete attributes to ensure we imprint new data with correct type
for key in changed_values.keys():
placeholder_item.data[key] = value
if cmds.attributeQuery(key, node=node_name, exists=True):
attribute = "{}.{}".format(node_name, key)
cmds.deleteAttr(attribute)
self.imprint(node_name, changed_values)
def collect_placeholders(self):
placeholders = []
nodes_by_identifier = self._collect_scene_placeholders()
for node in nodes_by_identifier.get(self.identifier, []):
# TODO do data validations and maybe upgrades if they are invalid
placeholder_data = self.read(node)
placeholders.append(
self.item_class(scene_identifier=node,
data=placeholder_data,
plugin=self)
)
return placeholders
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Hide placeholder, add them to placeholder set.
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# Hide placeholder and add them to placeholder set
node = placeholder.scene_identifier
# If we just populate the placeholders from current scene, the
# placeholder set will not be created so account for that.
if not cmds.objExists(PLACEHOLDER_SET):
cmds.sets(name=PLACEHOLDER_SET, empty=True)
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr("{}.hiddenInOutliner".format(node), True)
def delete_placeholder(self, placeholder):
"""Remove placeholder if building was successful
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin.
"""
node = placeholder.scene_identifier
# To avoid that deleting a placeholder node will have Maya delete
# any objectSets the node was a member of we will first remove it
# from any sets it was a member of. This way the `PLACEHOLDERS_SET`
# will survive long enough
sets = cmds.listSets(o=node) or []
for object_set in sets:
cmds.sets(node, remove=object_set)
cmds.delete(node)
def imprint(self, node, data):
"""Imprint call for placeholder node"""
# Complicated data that can't be represented as flat maya attributes
# we write to json strings, e.g. multiselection EnumDef
for key, value in data.items():
if isinstance(value, (list, tuple, dict)):
data[key] = "JSON::{}".format(json.dumps(value))
imprint(node, data)
def read(self, node):
"""Read call for placeholder node"""
data = read(node)
# Complicated data that can't be represented as flat maya attributes
# we read from json strings, e.g. multiselection EnumDef
for key, value in data.items():
if isinstance(value, str) and value.startswith("JSON::"):
value = value[len("JSON::"):] # strip of JSON:: prefix
data[key] = json.loads(value)
return data
def build_workfile_template(*args):
builder = MayaTemplateBuilder(registered_host())
builder.build_template()

View file

@ -1,3 +1,5 @@
from maya import cmds
from ayon_core.hosts.maya.api import (
lib,
plugin
@ -87,16 +89,24 @@ class CreateArnoldSceneSource(plugin.MayaCreator):
return defs
class CreateArnoldSceneSourceProxy(CreateArnoldSceneSource):
"""Arnold Scene Source Proxy
This product type facilitates working with proxy geometry in the viewport.
"""
identifier = "io.openpype.creators.maya.assproxy"
label = "Arnold Scene Source Proxy"
product_type = "assProxy"
icon = "cube"
def create(self, product_name, instance_data, pre_create_data):
from maya import cmds
instance = super(CreateArnoldSceneSource, self).create(
product_name, instance_data, pre_create_data
)
instance_node = instance.get("instance_node")
content = cmds.sets(name=instance_node + "_content_SET", empty=True)
proxy = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
cmds.sets([content, proxy], forceElement=instance_node)
cmds.sets([proxy], forceElement=instance_node)

View file

@ -12,6 +12,7 @@ from ayon_core.hosts.maya.api.lib import (
unique_namespace,
get_attribute_input,
maintained_selection,
get_fps_for_current_context
)
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type
@ -29,7 +30,13 @@ class ArnoldStandinLoader(load.LoaderPlugin):
"""Load as Arnold standin"""
product_types = {
"ass", "animation", "model", "proxyAbc", "pointcache", "usd"
"ass",
"assProxy",
"animation",
"model",
"proxyAbc",
"pointcache",
"usd"
}
representations = {"ass", "abc", "usda", "usdc", "usd"}
@ -95,8 +102,10 @@ class ArnoldStandinLoader(load.LoaderPlugin):
sequence = is_sequence(os.listdir(os.path.dirname(repre_path)))
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
fps = float(version_attributes.get("fps")) or 25
cmds.setAttr(standin_shape + ".abcFPS", fps)
fps = (
version_attributes.get("fps") or get_fps_for_current_context()
)
cmds.setAttr(standin_shape + ".abcFPS", float(fps))
nodes = [root, standin, standin_shape]
if operator is not None:
@ -128,6 +137,18 @@ class ArnoldStandinLoader(load.LoaderPlugin):
proxy_path = "/".join([os.path.dirname(path), proxy_basename])
return proxy_basename, proxy_path
def _update_operators(self, string_replace_operator, proxy_basename, path):
cmds.setAttr(
string_replace_operator + ".match",
proxy_basename.split(".")[0],
type="string"
)
cmds.setAttr(
string_replace_operator + ".replace",
os.path.basename(path).split(".")[0],
type="string"
)
def _setup_proxy(self, shape, path, namespace):
proxy_basename, proxy_path = self._get_proxy_path(path)
@ -150,16 +171,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
"*.(@node=='{}')".format(node_type),
type="string"
)
cmds.setAttr(
string_replace_operator + ".match",
proxy_basename,
type="string"
)
cmds.setAttr(
string_replace_operator + ".replace",
os.path.basename(path),
type="string"
)
self._update_operators(string_replace_operator, proxy_basename, path)
cmds.connectAttr(
string_replace_operator + ".out",
@ -194,18 +206,9 @@ class ArnoldStandinLoader(load.LoaderPlugin):
path = get_representation_path(repre_entity)
proxy_basename, proxy_path = self._get_proxy_path(path)
# Whether there is proxy or so, we still update the string operator.
# Whether there is proxy or not, we still update the string operator.
# If no proxy exists, the string operator won't replace anything.
cmds.setAttr(
string_replace_operator + ".match",
proxy_basename,
type="string"
)
cmds.setAttr(
string_replace_operator + ".replace",
os.path.basename(path),
type="string"
)
self._update_operators(string_replace_operator, proxy_basename, path)
dso_path = path
if os.path.exists(proxy_path):

View file

@ -10,21 +10,23 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
# Offset to be after renderable camera collection.
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Arnold Scene Source"
families = ["ass"]
families = ["ass", "assProxy"]
def process(self, instance):
objsets = instance.data["setMembers"]
instance.data["members"] = []
for set_member in instance.data["setMembers"]:
if cmds.nodeType(set_member) != "objectSet":
instance.data["members"].extend(self.get_hierarchy(set_member))
continue
for objset in objsets:
objset = str(objset)
members = cmds.sets(objset, query=True)
members = cmds.sets(set_member, query=True)
members = cmds.ls(members, long=True)
if members is None:
self.log.warning("Skipped empty instance: \"%s\" " % objset)
self.log.warning(
"Skipped empty instance: \"%s\" " % set_member
)
continue
if objset.endswith("content_SET"):
instance.data["contentMembers"] = self.get_hierarchy(members)
if objset.endswith("proxy_SET"):
if set_member.endswith("proxy_SET"):
instance.data["proxy"] = self.get_hierarchy(members)
# Use camera in object set if present else default to render globals
@ -33,7 +35,7 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)]
if renderable:
camera = renderable[0]
for node in instance.data["contentMembers"]:
for node in instance.data["members"]:
camera_shapes = cmds.listRelatives(
node, shapes=True, type="camera"
)
@ -46,18 +48,11 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
self.log.debug("data: {}".format(instance.data))
def get_hierarchy(self, nodes):
"""Return nodes with all their children.
Arguments:
nodes (List[str]): List of nodes to collect children hierarchy for
Returns:
list: Input nodes with their children hierarchy
"""
"""Return nodes with all their children"""
nodes = cmds.ls(nodes, long=True)
if not nodes:
return []
children = get_all_children(nodes, ignore_intermediate_objects=True)
return list(children.union(nodes))
children = get_all_children(nodes)
# Make sure nodes merged with children only
# contains unique entries
return list(set(nodes + list(children)))

View file

@ -17,8 +17,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
families = ["ass"]
asciiAss = False
def process(self, instance):
staging_dir = self.staging_dir(instance)
def _pre_process(self, instance, staging_dir):
file_path = os.path.join(staging_dir, "{}.ass".format(instance.name))
# Mask
@ -70,24 +69,38 @@ class ExtractArnoldSceneSource(publish.Extractor):
"mask": mask
}
filenames, nodes_by_id = self._extract(
instance.data["contentMembers"], attribute_data, kwargs
)
if "representations" not in instance.data:
instance.data["representations"] = []
return attribute_data, kwargs
def process(self, instance):
staging_dir = self.staging_dir(instance)
attribute_data, kwargs = self._pre_process(instance, staging_dir)
filenames = self._extract(
instance.data["members"], attribute_data, kwargs
)
self._post_process(
instance, filenames, staging_dir, kwargs["startFrame"]
)
def _post_process(self, instance, filenames, staging_dir, frame_start):
nodes_by_id = self._nodes_by_id(instance[:])
representation = {
"name": "ass",
"ext": "ass",
"files": filenames if len(filenames) > 1 else filenames[0],
"stagingDir": staging_dir,
"frameStart": kwargs["startFrame"]
"frameStart": frame_start
}
instance.data["representations"].append(representation)
json_path = os.path.join(staging_dir, "{}.json".format(instance.name))
json_path = os.path.join(
staging_dir, "{}.json".format(instance.name)
)
with open(json_path, "w") as f:
json.dump(nodes_by_id, f)
@ -104,13 +117,68 @@ class ExtractArnoldSceneSource(publish.Extractor):
"Extracted instance {} to: {}".format(instance.name, staging_dir)
)
# Extract proxy.
if not instance.data.get("proxy", []):
return
def _nodes_by_id(self, nodes):
nodes_by_id = defaultdict(list)
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
for node in nodes:
id = lib.get_id(node)
filenames, _ = self._extract(
if id is None:
continue
# Converting Maya hierarchy separator "|" to Arnold separator "/".
nodes_by_id[id].append(node.replace("|", "/"))
return nodes_by_id
def _extract(self, nodes, attribute_data, kwargs):
filenames = []
with lib.attribute_values(attribute_data):
with lib.maintained_selection():
self.log.debug(
"Writing: {}".format(nodes)
)
cmds.select(nodes, noExpand=True)
self.log.debug(
"Extracting ass sequence with: {}".format(kwargs)
)
exported_files = cmds.arnoldExportAss(**kwargs)
for file in exported_files:
filenames.append(os.path.split(file)[1])
self.log.debug("Exported: {}".format(filenames))
return filenames
class ExtractArnoldSceneSourceProxy(ExtractArnoldSceneSource):
"""Extract the content of the instance to an Arnold Scene Source file."""
label = "Extract Arnold Scene Source Proxy"
hosts = ["maya"]
families = ["assProxy"]
asciiAss = True
def process(self, instance):
staging_dir = self.staging_dir(instance)
attribute_data, kwargs = self._pre_process(instance, staging_dir)
filenames, _ = self._duplicate_extract(
instance.data["members"], attribute_data, kwargs
)
self._post_process(
instance, filenames, staging_dir, kwargs["startFrame"]
)
kwargs["filename"] = os.path.join(
staging_dir, "{}_proxy.ass".format(instance.name)
)
filenames, _ = self._duplicate_extract(
instance.data["proxy"], attribute_data, kwargs
)
@ -125,12 +193,11 @@ class ExtractArnoldSceneSource(publish.Extractor):
instance.data["representations"].append(representation)
def _extract(self, nodes, attribute_data, kwargs):
def _duplicate_extract(self, nodes, attribute_data, kwargs):
self.log.debug(
"Writing {} with:\n{}".format(kwargs["filename"], kwargs)
)
filenames = []
nodes_by_id = defaultdict(list)
# Duplicating nodes so they are direct children of the world. This
# makes the hierarchy of any exported ass file the same.
with lib.delete_after() as delete_bin:
@ -147,7 +214,9 @@ class ExtractArnoldSceneSource(publish.Extractor):
if not shapes:
continue
duplicate_transform = cmds.duplicate(node)[0]
basename = cmds.duplicate(node)[0]
parents = cmds.ls(node, long=True)[0].split("|")[:-1]
duplicate_transform = "|".join(parents + [basename])
if cmds.listRelatives(duplicate_transform, parent=True):
duplicate_transform = cmds.parent(
@ -172,28 +241,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
duplicate_nodes.extend(shapes)
delete_bin.append(duplicate_transform)
# Copy cbId to mtoa_constant.
for node in duplicate_nodes:
# Converting Maya hierarchy separator "|" to Arnold
# separator "/".
nodes_by_id[lib.get_id(node)].append(node.replace("|", "/"))
with lib.attribute_values(attribute_data):
with lib.maintained_selection():
self.log.debug(
"Writing: {}".format(duplicate_nodes)
)
cmds.select(duplicate_nodes, noExpand=True)
self.log.debug(
"Extracting ass sequence with: {}".format(kwargs)
)
exported_files = cmds.arnoldExportAss(**kwargs)
for file in exported_files:
filenames.append(os.path.split(file)[1])
self.log.debug("Exported: {}".format(filenames))
nodes_by_id = self._nodes_by_id(duplicate_nodes)
filenames = self._extract(duplicate_nodes, attribute_data, kwargs)
return filenames, nodes_by_id

View file

@ -1,30 +1,56 @@
from maya import cmds
import pyblish.api
from ayon_core.pipeline.publish import (
ValidateContentsOrder, PublishValidationError
)
from ayon_core.hosts.maya.api.lib import is_visible
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
"""Validate Arnold Scene Source.
We require at least 1 root node/parent for the meshes. This is to ensure we
can duplicate the nodes and preserve the names.
Ensure no nodes are hidden.
"""
If using proxies we need the nodes to share the same names and not be
order = ValidateContentsOrder
hosts = ["maya"]
families = ["ass", "assProxy"]
label = "Validate Arnold Scene Source"
def process(self, instance):
# Validate against having nodes hidden, which will result in the
# extraction to ignore the node.
nodes = instance.data["members"] + instance.data.get("proxy", [])
nodes = [x for x in nodes if cmds.objectType(x, isAType='dagNode')]
hidden_nodes = [
x for x in nodes if not is_visible(x, intermediateObject=False)
]
if hidden_nodes:
raise PublishValidationError(
"Found hidden nodes:\n\n{}\n\nPlease unhide for"
" publishing.".format("\n".join(hidden_nodes))
)
class ValidateArnoldSceneSourceProxy(pyblish.api.InstancePlugin):
"""Validate Arnold Scene Source Proxy.
When using proxies we need the nodes to share the same names and not be
parent to the world. This ends up needing at least two groups with content
nodes and proxy nodes in another.
"""
order = ValidateContentsOrder
hosts = ["maya"]
families = ["ass"]
label = "Validate Arnold Scene Source"
families = ["assProxy"]
label = "Validate Arnold Scene Source Proxy"
def _get_nodes_by_name(self, nodes):
ungrouped_nodes = []
nodes_by_name = {}
parents = []
same_named_nodes = {}
for node in nodes:
node_split = node.split("|")
if len(node_split) == 2:
@ -35,33 +61,16 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
parents.append(parent)
node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
# Check for same same nodes, which can happen in different
# hierarchies.
if node_name in nodes_by_name:
try:
same_named_nodes[node_name].append(node)
except KeyError:
same_named_nodes[node_name] = [
nodes_by_name[node_name], node
]
nodes_by_name[node_name] = node
if same_named_nodes:
message = "Found nodes with the same name:"
for name, nodes in same_named_nodes.items():
message += "\n\n\"{}\":\n{}".format(name, "\n".join(nodes))
raise PublishValidationError(message)
return ungrouped_nodes, nodes_by_name, parents
def process(self, instance):
# Validate against nodes directly parented to world.
ungrouped_nodes = []
nodes, content_nodes_by_name, content_parents = (
self._get_nodes_by_name(instance.data["contentMembers"])
self._get_nodes_by_name(instance.data["members"])
)
ungrouped_nodes.extend(nodes)
@ -70,24 +79,21 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
)
ungrouped_nodes.extend(nodes)
# Validate against nodes directly parented to world.
if ungrouped_nodes:
raise PublishValidationError(
"Found nodes parented to the world: {}\n"
"All nodes need to be grouped.".format(ungrouped_nodes)
)
# Proxy validation.
if not instance.data.get("proxy", []):
return
# Validate for content and proxy nodes amount being the same.
if len(instance.data["contentMembers"]) != len(instance.data["proxy"]):
if len(instance.data["members"]) != len(instance.data["proxy"]):
raise PublishValidationError(
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
"be the same.".format(
len(instance.data["contentMembers"]),
len(instance.data["proxy"])
"be the same.\nContent nodes: {}\nProxy nodes:{}".format(
len(instance.data["members"]),
len(instance.data["proxy"]),
instance.data["members"],
instance.data["proxy"]
)
)

View file

@ -17,7 +17,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
order = ValidateContentsOrder
hosts = ["maya"]
families = ["ass"]
families = ["assProxy"]
label = "Validate Arnold Scene Source CBID"
actions = [RepairAction]
optional = False
@ -40,15 +40,11 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
@classmethod
def get_invalid_couples(cls, instance):
content_nodes_by_name = cls._get_nodes_by_name(
instance.data["contentMembers"]
)
proxy_nodes_by_name = cls._get_nodes_by_name(
instance.data.get("proxy", [])
)
nodes_by_name = cls._get_nodes_by_name(instance.data["members"])
proxy_nodes_by_name = cls._get_nodes_by_name(instance.data["proxy"])
invalid_couples = []
for content_name, content_node in content_nodes_by_name.items():
for content_name, content_node in nodes_by_name.items():
proxy_node = proxy_nodes_by_name.get(content_name, None)
if not proxy_node:
@ -70,7 +66,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
if not self.is_active(instance.data):
return
# Proxy validation.
if not instance.data.get("proxy", []):
if not instance.data["proxy"]:
return
# Validate for proxy nodes sharing the same cbId as content nodes.

View file

@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
PublishValidationError,
OptionalPyblishPluginMixin
)
from ayon_core.hosts.maya.api import lib
from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings
@ -37,7 +38,8 @@ def get_redshift_image_format_labels():
return mel.eval("{0}={0}".format(var))
class ValidateRenderSettings(pyblish.api.InstancePlugin):
class ValidateRenderSettings(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validates the global render settings
* File Name Prefix must start with: `<Scene>`
@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
* Frame Padding must be:
* default: 4
* Animation must be toggle on, in Render Settings - Common tab:
* Animation must be toggled on, in Render Settings - Common tab:
* vray: Animation on standard of specific
* arnold: Frame / Animation ext: Any choice without "(Single Frame)"
* redshift: Animation toggled on
@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
"""
order = ValidateContentsOrder
label = "Render Settings"
label = "Validate Render Settings"
hosts = ["maya"]
families = ["renderlayer"]
actions = [RepairAction]
optional = True
ImagePrefixes = {
'mentalray': 'defaultRenderGlobals.imageFilePrefix',
@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
DEFAULT_PREFIX = "<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>"
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:

View file

@ -1,87 +1,48 @@
import json
from maya import cmds
from ayon_core.pipeline.workfile.workfile_template_builder import (
PlaceholderPlugin,
LoadPlaceholderItem,
PlaceholderLoadMixin,
LoadPlaceholderItem
)
from ayon_core.hosts.maya.api.lib import (
read,
imprint,
get_reference_node
get_container_transforms,
get_node_parent,
get_node_index_under_parent
)
from ayon_core.hosts.maya.api.workfile_template_builder import (
MayaPlaceholderPlugin,
)
from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin):
identifier = "maya.load"
label = "Maya load"
def _collect_scene_placeholders(self):
# Cache placeholder data to shared data
placeholder_nodes = self.builder.get_shared_populate_data(
"placeholder_nodes"
)
if placeholder_nodes is None:
attributes = cmds.ls("*.plugin_identifier", long=True)
placeholder_nodes = {}
for attribute in attributes:
node_name = attribute.rpartition(".")[0]
placeholder_nodes[node_name] = (
self._parse_placeholder_node_data(node_name)
)
self.builder.set_shared_populate_data(
"placeholder_nodes", placeholder_nodes
)
return placeholder_nodes
def _parse_placeholder_node_data(self, node_name):
placeholder_data = read(node_name)
parent_name = (
cmds.getAttr(node_name + ".parent", asString=True)
or node_name.rpartition("|")[0]
or ""
)
if parent_name:
siblings = cmds.listRelatives(parent_name, children=True)
else:
siblings = cmds.ls(assemblies=True)
node_shortname = node_name.rpartition("|")[2]
current_index = cmds.getAttr(node_name + ".index", asString=True)
if current_index < 0:
current_index = siblings.index(node_shortname)
placeholder_data.update({
"parent": parent_name,
"index": current_index
})
return placeholder_data
item_class = LoadPlaceholderItem
def _create_placeholder_name(self, placeholder_data):
placeholder_name_parts = placeholder_data["builder_type"].split("_")
pos = 1
# Split builder type: context_assets, linked_assets, all_assets
prefix, suffix = placeholder_data["builder_type"].split("_", 1)
parts = [prefix]
# add family if any
placeholder_product_type = placeholder_data.get("product_type")
if placeholder_product_type is None:
placeholder_product_type = placeholder_data.get("family")
if placeholder_product_type:
placeholder_name_parts.insert(pos, placeholder_product_type)
pos += 1
parts.append(placeholder_product_type)
# add loader arguments if any
loader_args = placeholder_data["loader_args"]
if loader_args:
loader_args = json.loads(loader_args.replace('\'', '\"'))
values = [v for v in loader_args.values()]
for value in values:
placeholder_name_parts.insert(pos, value)
pos += 1
loader_args = eval(loader_args)
for value in loader_args.values():
parts.append(str(value))
placeholder_name = "_".join(placeholder_name_parts)
parts.append(suffix)
placeholder_name = "_".join(parts)
return placeholder_name.capitalize()
@ -104,68 +65,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
)
return loaded_representation_ids
def create_placeholder(self, placeholder_data):
selection = cmds.ls(selection=True)
if len(selection) > 1:
raise ValueError("More then one item are selected")
parent = selection[0] if selection else None
placeholder_data["plugin_identifier"] = self.identifier
placeholder_name = self._create_placeholder_name(placeholder_data)
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
if parent:
placeholder = cmds.parent(placeholder, selection[0])[0]
imprint(placeholder, placeholder_data)
# Add helper attributes to keep placeholder info
cmds.addAttr(
placeholder,
longName="parent",
hidden=True,
dataType="string"
)
cmds.addAttr(
placeholder,
longName="index",
hidden=True,
attributeType="short",
defaultValue=-1
)
cmds.setAttr(placeholder + ".parent", "", type="string")
def update_placeholder(self, placeholder_item, placeholder_data):
node_name = placeholder_item.scene_identifier
new_values = {}
for key, value in placeholder_data.items():
placeholder_value = placeholder_item.data.get(key)
if value != placeholder_value:
new_values[key] = value
placeholder_item.data[key] = value
for key in new_values.keys():
cmds.deleteAttr(node_name + "." + key)
imprint(node_name, new_values)
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, placeholder_data in scene_placeholders.items():
if placeholder_data.get("plugin_identifier") != self.identifier:
continue
# TODO do data validations and maybe upgrades if they are invalid
output.append(
LoadPlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_load_placeholder(placeholder)
@ -176,30 +75,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# Hide placeholder and add them to placeholder set
node = placeholder.scene_identifier
# If we just populate the placeholders from current scene, the
# placeholder set will not be created so account for that.
if not cmds.objExists(PLACEHOLDER_SET):
cmds.sets(name=PLACEHOLDER_SET, empty=True)
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr(node + ".hiddenInOutliner", True)
def delete_placeholder(self, placeholder):
"""Remove placeholder if building was successful"""
cmds.delete(placeholder.scene_identifier)
def load_succeed(self, placeholder, container):
self._parent_in_hierarchy(placeholder, container)
@ -215,56 +90,43 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
if not container:
return
roots = cmds.sets(container, q=True) or []
ref_node = None
try:
ref_node = get_reference_node(roots)
except AssertionError as e:
self.log.info(e.args[0])
# TODO: This currently returns only a single root but a loaded scene
# could technically load more than a single root
container_root = get_container_transforms(container, root=True)
nodes_to_parent = []
for root in roots:
if ref_node:
ref_root = cmds.referenceQuery(root, nodes=True)[0]
ref_root = (
cmds.listRelatives(ref_root, parent=True, path=True) or
[ref_root]
)
nodes_to_parent.extend(ref_root)
continue
if root.endswith("_RN"):
# Backwards compatibility for hardcoded reference names.
refRoot = cmds.referenceQuery(root, n=True)[0]
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
nodes_to_parent.extend(refRoot)
elif root not in cmds.listSets(allSets=True):
nodes_to_parent.append(root)
# Bugfix: The get_container_transforms does not recognize the load
# reference group currently
# TODO: Remove this when it does
parent = get_node_parent(container_root)
if parent:
container_root = parent
roots = [container_root]
elif not cmds.sets(root, q=True):
return
# Add the loaded roots to the holding sets if they exist
holding_sets = cmds.listSets(object=placeholder.scene_identifier) or []
for holding_set in holding_sets:
cmds.sets(roots, forceElement=holding_set)
# Move loaded nodes to correct index in outliner hierarchy
# Parent the roots to the place of the placeholder locator and match
# its matrix
placeholder_form = cmds.xform(
placeholder.scene_identifier,
q=True,
query=True,
matrix=True,
worldSpace=True
)
scene_parent = cmds.listRelatives(
placeholder.scene_identifier, parent=True, fullPath=True
)
for node in set(nodes_to_parent):
cmds.reorder(node, front=True)
cmds.reorder(node, relative=placeholder.data["index"])
cmds.xform(node, matrix=placeholder_form, ws=True)
if scene_parent:
cmds.parent(node, scene_parent)
else:
if cmds.listRelatives(node, parent=True):
cmds.parent(node, world=True)
scene_parent = get_node_parent(placeholder.scene_identifier)
for node in set(roots):
cmds.xform(node, matrix=placeholder_form, worldSpace=True)
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
if not holding_sets:
return
for holding_set in holding_sets:
cmds.sets(roots, forceElement=holding_set)
if scene_parent != get_node_parent(node):
if scene_parent:
node = cmds.parent(node, scene_parent)[0]
else:
node = cmds.parent(node, world=True)[0]
# Move loaded nodes in index order next to their placeholder node
cmds.reorder(node, back=True)
index = get_node_index_under_parent(placeholder.scene_identifier)
cmds.reorder(node, front=True)
cmds.reorder(node, relative=index + 1)

View file

@ -1,6 +1,8 @@
from .deadline_module import DeadlineModule
from .version import __version__
__all__ = (
"DeadlineModule",
"__version__"
)

View file

@ -49,6 +49,10 @@ def requests_post(*args, **kwargs):
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
auth = kwargs.get("auth")
if auth:
kwargs["auth"] = tuple(auth) # explicit cast to tuple
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.post(*args, **kwargs)
@ -70,6 +74,9 @@ def requests_get(*args, **kwargs):
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
auth = kwargs.get("auth")
if auth:
kwargs["auth"] = tuple(auth)
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.get(*args, **kwargs)
@ -434,9 +441,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"""Plugin entry point."""
self._instance = instance
context = instance.context
self._deadline_url = context.data.get("defaultDeadline")
self._deadline_url = instance.data.get(
"deadlineUrl", self._deadline_url)
self._deadline_url = instance.data["deadline"]["url"]
assert self._deadline_url, "Requires Deadline Webservice URL"
@ -460,7 +465,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
self.plugin_info = self.get_plugin_info()
self.aux_files = self.get_aux_files()
job_id = self.process_submission()
auth = instance.data["deadline"]["auth"]
job_id = self.process_submission(auth)
self.log.info("Submitted job to Deadline: {}.".format(job_id))
# TODO: Find a way that's more generic and not render type specific
@ -473,10 +479,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
job_info=render_job_info,
plugin_info=render_plugin_info
)
render_job_id = self.submit(payload)
render_job_id = self.submit(payload, auth)
self.log.info("Render job id: %s", render_job_id)
def process_submission(self):
def process_submission(self, auth=None):
"""Process data for submission.
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
@ -487,7 +493,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"""
payload = self.assemble_payload()
return self.submit(payload)
return self.submit(payload, auth)
@abstractmethod
def get_job_info(self):
@ -577,7 +583,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"AuxFiles": aux_files or self.aux_files
}
def submit(self, payload):
def submit(self, payload, auth):
"""Submit payload to Deadline API end-point.
This takes payload in the form of JSON file and POST it to
@ -585,6 +591,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
Args:
payload (dict): dict to become json in deadline submission.
auth (tuple): (username, password)
Returns:
str: resulting Deadline job id.
@ -594,7 +601,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"""
url = "{}/api/jobs".format(self._deadline_url)
response = requests_post(url, json=payload)
response = requests_post(url, json=payload,
auth=auth)
if not response.ok:
self.log.error("Submission failed!")
self.log.error(response.status_code)

View file

@ -19,23 +19,23 @@ class DeadlineModule(AYONAddon, IPluginPaths):
def initialize(self, studio_settings):
# This module is always enabled
deadline_urls = {}
deadline_servers_info = {}
enabled = self.name in studio_settings
if enabled:
deadline_settings = studio_settings[self.name]
deadline_urls = {
url_item["name"]: url_item["value"]
deadline_servers_info = {
url_item["name"]: url_item
for url_item in deadline_settings["deadline_urls"]
}
if enabled and not deadline_urls:
if enabled and not deadline_servers_info:
enabled = False
self.log.warning((
"Deadline Webservice URLs are not specified. Disabling addon."
))
self.enabled = enabled
self.deadline_urls = deadline_urls
self.deadline_servers_info = deadline_servers_info
def get_plugin_paths(self):
"""Deadline plugin paths."""
@ -45,13 +45,15 @@ class DeadlineModule(AYONAddon, IPluginPaths):
}
@staticmethod
def get_deadline_pools(webservice, log=None):
def get_deadline_pools(webservice, auth=None, log=None):
"""Get pools from Deadline.
Args:
webservice (str): Server url.
log (Logger)
auth (Optional[Tuple[str, str]]): Tuple containing username,
password
log (Optional[Logger]): Logger to log errors to, if provided.
Returns:
list: Pools.
List[str]: Pools.
Throws:
RuntimeError: If deadline webservice is unreachable.
@ -63,7 +65,10 @@ class DeadlineModule(AYONAddon, IPluginPaths):
argument = "{}/api/pools?NamesOnly=true".format(webservice)
try:
response = requests_get(argument)
kwargs = {}
if auth:
kwargs["auth"] = auth
response = requests_get(argument, **kwargs)
except requests.exceptions.ConnectionError as exc:
msg = 'Cannot connect to DL web service {}'.format(webservice)
log.error(msg)

View file

@ -13,17 +13,45 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
"""Collect Deadline Webservice URL from instance."""
# Run before collect_render.
order = pyblish.api.CollectorOrder + 0.005
order = pyblish.api.CollectorOrder + 0.225
label = "Deadline Webservice from the Instance"
families = ["rendering", "renderlayer"]
hosts = ["maya"]
targets = ["local"]
families = ["render",
"rendering",
"render.farm",
"renderFarm",
"renderlayer",
"maxrender",
"usdrender",
"redshift_rop",
"arnold_rop",
"mantra_rop",
"karma_rop",
"vray_rop",
"publish.hou",
"image"] # for Fusion
def process(self, instance):
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
instance.data["deadlineUrl"] = \
instance.data["deadlineUrl"].strip().rstrip("/")
if not instance.data.get("farm"):
self.log.debug("Should not be processed on farm, skipping.")
return
if not instance.data.get("deadline"):
instance.data["deadline"] = {}
# todo: separate logic should be removed, all hosts should have same
host_name = instance.context.data["hostName"]
if host_name == "maya":
deadline_url = self._collect_deadline_url(instance)
else:
deadline_url = (instance.data.get("deadlineUrl") or # backwards
instance.data.get("deadline", {}).get("url"))
if deadline_url:
instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/")
else:
instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa
self.log.debug(
"Using {} for submission.".format(instance.data["deadlineUrl"]))
"Using {} for submission".format(instance.data["deadline"]["url"]))
def _collect_deadline_url(self, render_instance):
# type: (pyblish.api.Instance) -> str
@ -49,13 +77,13 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
["project_settings"]
["deadline"]
)
default_server = render_instance.context.data["defaultDeadline"]
default_server_url = (render_instance.context.data["deadline"]
["defaultUrl"])
# QUESTION How and where is this is set? Should be removed?
instance_server = render_instance.data.get("deadlineServers")
if not instance_server:
self.log.debug("Using default server.")
return default_server
return default_server_url
# Get instance server as sting.
if isinstance(instance_server, int):
@ -66,7 +94,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
default_servers = {
url_item["name"]: url_item["value"]
for url_item in deadline_settings["deadline_urls"]
for url_item in deadline_settings["deadline_servers_info"]
}
project_servers = (
render_instance.context.data

View file

@ -18,10 +18,9 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
"""
# Run before collect_deadline_server_instance.
order = pyblish.api.CollectorOrder + 0.0025
order = pyblish.api.CollectorOrder + 0.200
label = "Default Deadline Webservice"
pass_mongo_url = False
targets = ["local"]
def process(self, context):
try:
@ -33,15 +32,17 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
deadline_settings = context.data["project_settings"]["deadline"]
deadline_server_name = deadline_settings["deadline_server"]
deadline_webservice = None
dl_server_info = None
if deadline_server_name:
deadline_webservice = deadline_module.deadline_urls.get(
dl_server_info = deadline_module.deadline_servers_info.get(
deadline_server_name)
default_deadline_webservice = deadline_module.deadline_urls["default"]
deadline_webservice = (
deadline_webservice
or default_deadline_webservice
)
if dl_server_info:
deadline_url = dl_server_info["value"]
else:
default_dl_server_info = deadline_module.deadline_servers_info[0]
deadline_url = default_dl_server_info["value"]
context.data["defaultDeadline"] = deadline_webservice.strip().rstrip("/") # noqa
context.data["deadline"] = {}
context.data["deadline"]["defaultUrl"] = (
deadline_url.strip().rstrip("/"))

View file

@ -0,0 +1,89 @@
# -*- coding: utf-8 -*-
"""Collect user credentials
Requires:
context -> project_settings
instance.data["deadline"]["url"]
Provides:
instance.data["deadline"] -> require_authentication (bool)
instance.data["deadline"] -> auth (tuple (str, str)) -
(username, password) or None
"""
import pyblish.api
from ayon_api import get_server_api_connection
from ayon_core.modules.deadline.deadline_module import DeadlineModule
from ayon_core.modules.deadline import __version__
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
"""Collects user name and password for artist if DL requires authentication
"""
order = pyblish.api.CollectorOrder + 0.250
label = "Collect Deadline User Credentials"
targets = ["local"]
hosts = ["aftereffects",
"blender",
"fusion",
"harmony",
"nuke",
"maya",
"max",
"houdini"]
families = ["render",
"rendering",
"render.farm",
"renderFarm",
"renderlayer",
"maxrender",
"usdrender",
"redshift_rop",
"arnold_rop",
"mantra_rop",
"karma_rop",
"vray_rop",
"publish.hou"]
def process(self, instance):
if not instance.data.get("farm"):
self.log.debug("Should not be processed on farm, skipping.")
return
collected_deadline_url = instance.data["deadline"]["url"]
if not collected_deadline_url:
raise ValueError("Instance doesn't have '[deadline][url]'.")
context_data = instance.context.data
deadline_settings = context_data["project_settings"]["deadline"]
deadline_server_name = None
# deadline url might be set directly from instance, need to find
# metadata for it
for deadline_info in deadline_settings["deadline_urls"]:
dl_settings_url = deadline_info["value"].strip().rstrip("/")
if dl_settings_url == collected_deadline_url:
deadline_server_name = deadline_info["name"]
break
if not deadline_server_name:
raise ValueError(f"Collected {collected_deadline_url} doesn't "
"match any site configured in Studio Settings")
instance.data["deadline"]["require_authentication"] = (
deadline_info["require_authentication"]
)
instance.data["deadline"]["auth"] = None
if not deadline_info["require_authentication"]:
return
# TODO import 'get_addon_site_settings' when available
# in public 'ayon_api'
local_settings = get_server_api_connection().get_addon_site_settings(
DeadlineModule.name, __version__)
local_settings = local_settings["local_settings"]
for server_info in local_settings:
if deadline_server_name == server_info["server_name"]:
instance.data["deadline"]["auth"] = (server_info["username"],
server_info["password"])

View file

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Deadline Authentication</title>
<description>
## Deadline authentication is required
This project has set in Settings that Deadline requires authentication.
### How to repair?
Please go to Ayon Server > Site Settings and provide your Deadline username and password.
In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator.
</description>
</error>
</root>

View file

@ -174,7 +174,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
instance.data["toBeRenderedOn"] = "deadline"
payload = self.assemble_payload()
return self.submit(payload)
return self.submit(payload,
auth=instance.data["deadline"]["auth"])
def from_published_scene(self):
"""

View file

@ -2,9 +2,10 @@ import os
import re
import json
import getpass
import requests
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_post
class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit CelAction2D scene to Deadline
@ -30,11 +31,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
context = instance.context
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
deadline_url = instance.data["deadline"]["url"]
assert deadline_url, "Requires Deadline Webservice URL"
self.deadline_url = "{}/api/jobs".format(deadline_url)
@ -197,7 +194,8 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
self.log.debug("__ expectedFiles: `{}`".format(
instance.data["expectedFiles"]))
response = requests.post(self.deadline_url, json=payload)
response = requests_post(self.deadline_url, json=payload,
auth=instance.data["deadline"]["require_authentication"])
if not response.ok:
self.log.error(

View file

@ -2,17 +2,13 @@ import os
import json
import getpass
import requests
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_post
from ayon_core.pipeline.publish import (
AYONPyblishPluginMixin
)
from ayon_core.lib import (
BoolDef,
NumberDef,
)
from ayon_core.lib import NumberDef
class FusionSubmitDeadline(
@ -64,11 +60,6 @@ class FusionSubmitDeadline(
decimals=0,
minimum=1,
maximum=10
),
BoolDef(
"suspend_publish",
default=False,
label="Suspend publish"
)
]
@ -80,10 +71,6 @@ class FusionSubmitDeadline(
attribute_values = self.get_attr_values_from_data(
instance.data)
# add suspend_publish attributeValue to instance data
instance.data["suspend_publish"] = attribute_values[
"suspend_publish"]
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
@ -94,11 +81,7 @@ class FusionSubmitDeadline(
from ayon_core.hosts.fusion.api.lib import get_frame_path
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
deadline_url = instance.data["deadline"]["url"]
assert deadline_url, "Requires Deadline Webservice URL"
# Collect all saver instances in context that are to be rendered
@ -258,7 +241,8 @@ class FusionSubmitDeadline(
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(deadline_url)
response = requests.post(url, json=payload)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, auth=auth)
if not response.ok:
raise Exception(response.text)

View file

@ -10,7 +10,6 @@ from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from ayon_core.lib import (
is_in_tests,
BoolDef,
TextDef,
NumberDef
)
@ -90,11 +89,6 @@ class HoudiniSubmitDeadline(
@classmethod
def get_attribute_defs(cls):
return [
BoolDef(
"suspend_publish",
default=False,
label="Suspend publish"
),
NumberDef(
"priority",
label="Priority",

View file

@ -187,11 +187,13 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
payload_data, project_settings)
job_infos, plugin_infos = payload
for job_info, plugin_info in zip(job_infos, plugin_infos):
self.submit(self.assemble_payload(job_info, plugin_info))
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
else:
payload = self._use_published_name(payload_data, project_settings)
job_info, plugin_info = payload
self.submit(self.assemble_payload(job_info, plugin_info))
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
def _use_published_name(self, data, project_settings):
# Not all hosts can import these modules.

View file

@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
return plugin_payload
def process_submission(self):
def process_submission(self, auth=None):
from maya import cmds
instance = self._instance
@ -332,7 +332,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
if "vrayscene" in instance.data["families"]:
self.log.debug("Submitting V-Ray scene render..")
vray_export_payload = self._get_vray_export_payload(payload_data)
export_job = self.submit(vray_export_payload)
export_job = self.submit(vray_export_payload,
instance.data["deadline"]["auth"])
payload = self._get_vray_render_payload(payload_data)
@ -351,7 +352,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
else:
# Submit main render job
job_info, plugin_info = payload
self.submit(self.assemble_payload(job_info, plugin_info))
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
def _tile_render(self, payload):
"""Submit as tile render per frame with dependent assembly jobs."""
@ -451,7 +453,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
# Submit frame tile jobs
frame_tile_job_id = {}
for frame, tile_job_payload in frame_payloads.items():
job_id = self.submit(tile_job_payload)
job_id = self.submit(tile_job_payload,
instance.data["deadline"]["auth"])
frame_tile_job_id[frame] = job_id
# Define assembly payloads
@ -559,7 +562,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
"submitting assembly job {} of {}".format(i + 1,
num_assemblies)
)
assembly_job_id = self.submit(payload)
assembly_job_id = self.submit(payload,
instance.data["deadline"]["auth"])
assembly_job_ids.append(assembly_job_id)
instance.data["assemblySubmissionJobs"] = assembly_job_ids

View file

@ -4,9 +4,9 @@ import json
import getpass
from datetime import datetime
import requests
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_post
from ayon_core.pipeline.publish import (
AYONPyblishPluginMixin
)
@ -76,11 +76,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
default=cls.use_gpu,
label="Use GPU"
),
BoolDef(
"suspend_publish",
default=False,
label="Suspend publish"
),
BoolDef(
"workfile_dependency",
default=cls.workfile_dependency,
@ -100,20 +95,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
instance.data["attributeValues"] = self.get_attr_values_from_data(
instance.data)
# add suspend_publish attributeValue to instance data
instance.data["suspend_publish"] = instance.data["attributeValues"][
"suspend_publish"]
families = instance.data["families"]
node = instance.data["transientData"]["node"]
context = instance.context
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
deadline_url = instance.data["deadline"]["url"]
assert deadline_url, "Requires Deadline Webservice URL"
self.deadline_url = "{}/api/jobs".format(deadline_url)
@ -436,7 +423,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
self.log.debug("__ expectedFiles: `{}`".format(
instance.data["expectedFiles"]))
response = requests.post(self.deadline_url, json=payload, timeout=10)
auth = instance.data["deadline"]["auth"]
response = requests_post(self.deadline_url, json=payload, timeout=10,
auth=auth)
if not response.ok:
raise Exception(response.text)

View file

@ -5,10 +5,10 @@ import json
import re
from copy import deepcopy
import requests
import ayon_api
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_post
from ayon_core.pipeline import publish
from ayon_core.lib import EnumDef, is_in_tests
from ayon_core.pipeline.version_start import get_versioning_start
@ -147,9 +147,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
instance_settings = self.get_attr_values_from_data(instance.data)
initial_status = instance_settings.get("publishJobState", "Active")
# TODO: Remove this backwards compatibility of `suspend_publish`
if instance.data.get("suspend_publish"):
initial_status = "Suspended"
args = [
"--headless",
@ -212,7 +209,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Submitting Deadline publish job ...")
url = "{}/api/jobs".format(self.deadline_url)
response = requests.post(url, json=payload, timeout=10)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, timeout=10,
auth=auth)
if not response.ok:
raise Exception(response.text)
@ -344,11 +343,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
deadline_publish_job_id = None
if submission_type == "deadline":
# get default deadline webservice url from deadline module
self.deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
self.deadline_url = instance.data.get("deadlineUrl")
self.deadline_url = instance.data["deadline"]["url"]
assert self.deadline_url, "Requires Deadline Webservice URL"
deadline_publish_job_id = \
@ -356,7 +351,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
# Inject deadline url to instances.
for inst in instances:
inst["deadlineUrl"] = self.deadline_url
if "deadline" not in inst:
inst["deadline"] = {}
inst["deadline"] = instance.data["deadline"]
# publish job file
publish_job = {

View file

@ -5,11 +5,11 @@ import json
import re
from copy import deepcopy
import requests
import clique
import ayon_api
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_post
from ayon_core.pipeline import publish
from ayon_core.lib import EnumDef, is_in_tests
from ayon_core.pipeline.version_start import get_versioning_start
@ -88,9 +88,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
hosts = ["fusion", "max", "maya", "nuke", "houdini",
"celaction", "aftereffects", "harmony", "blender"]
families = ["render.farm", "render.frames_farm",
"prerender.farm", "prerender.frames_farm",
"renderlayer", "imagesequence",
families = ["render", "render.farm", "render.frames_farm",
"prerender", "prerender.farm", "prerender.frames_farm",
"renderlayer", "imagesequence", "image",
"vrayscene", "maxrender",
"arnold_rop", "mantra_rop",
"karma_rop", "vray_rop",
@ -224,9 +224,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
instance_settings = self.get_attr_values_from_data(instance.data)
initial_status = instance_settings.get("publishJobState", "Active")
# TODO: Remove this backwards compatibility of `suspend_publish`
if instance.data.get("suspend_publish"):
initial_status = "Suspended"
args = [
"--headless",
@ -306,7 +303,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Submitting Deadline publish job ...")
url = "{}/api/jobs".format(self.deadline_url)
response = requests.post(url, json=payload, timeout=10)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, timeout=10,
auth=auth)
if not response.ok:
raise Exception(response.text)
@ -314,7 +313,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
return deadline_publish_job_id
def process(self, instance):
# type: (pyblish.api.Instance) -> None
"""Process plugin.
@ -461,18 +459,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
}
# get default deadline webservice url from deadline module
self.deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
self.deadline_url = instance.data.get("deadlineUrl")
self.deadline_url = instance.data["deadline"]["url"]
assert self.deadline_url, "Requires Deadline Webservice URL"
deadline_publish_job_id = \
self._submit_deadline_post_job(instance, render_job, instances)
# Inject deadline url to instances.
# Inject deadline url to instances to query DL for job id for overrides
for inst in instances:
inst["deadlineUrl"] = self.deadline_url
if not "deadline" in inst:
inst["deadline"] = {}
inst["deadline"] = instance.data["deadline"]
# publish job file
publish_job = {

View file

@ -1,5 +1,7 @@
import pyblish.api
from ayon_core.pipeline import PublishXmlValidationError
from openpype_modules.deadline.abstract_submit_deadline import requests_get
@ -8,27 +10,42 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
label = "Validate Deadline Web Service"
order = pyblish.api.ValidatorOrder
hosts = ["maya", "nuke"]
families = ["renderlayer", "render"]
hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"]
families = ["renderlayer", "render", "render.farm"]
# cache
responses = {}
def process(self, instance):
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
self.log.debug(
"We have deadline URL on instance {}".format(deadline_url)
)
if not instance.data.get("farm"):
self.log.debug("Should not be processed on farm, skipping.")
return
deadline_url = instance.data["deadline"]["url"]
assert deadline_url, "Requires Deadline Webservice URL"
kwargs = {}
if instance.data["deadline"]["require_authentication"]:
auth = instance.data["deadline"]["auth"]
kwargs["auth"] = auth
if not auth[0]:
raise PublishXmlValidationError(
self,
"Deadline requires authentication. "
"At least username is required to be set in "
"Site Settings.")
if deadline_url not in self.responses:
self.responses[deadline_url] = requests_get(deadline_url)
self.responses[deadline_url] = requests_get(deadline_url, **kwargs)
response = self.responses[deadline_url]
if response.status_code == 401:
raise PublishXmlValidationError(
self,
"Deadline requires authentication. "
"Provided credentials are not working. "
"Please change them in Site Settings")
assert response.ok, "Response must be ok"
assert response.text.startswith("Deadline Web Service "), (
"Web service did not respond with 'Deadline Web Service'"

View file

@ -37,8 +37,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
self.log.debug("Skipping local instance.")
return
deadline_url = self.get_deadline_url(instance)
pools = self.get_pools(deadline_url)
deadline_url = instance.data["deadline"]["url"]
pools = self.get_pools(deadline_url,
instance.data["deadline"].get("auth"))
invalid_pools = {}
primary_pool = instance.data.get("primaryPool")
@ -61,22 +62,18 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
formatting_data={"pools_str": ", ".join(pools)}
)
def get_deadline_url(self, instance):
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
if instance.data.get("deadlineUrl"):
# if custom one is set in instance, use that
deadline_url = instance.data.get("deadlineUrl")
return deadline_url
def get_pools(self, deadline_url):
def get_pools(self, deadline_url, auth):
if deadline_url not in self.pools_per_url:
self.log.debug(
"Querying available pools for Deadline url: {}".format(
deadline_url)
)
pools = DeadlineModule.get_deadline_pools(deadline_url,
auth=auth,
log=self.log)
# some DL return "none" as a pool name
if not "none" in pools:
pools.append("none")
self.log.info("Available pools: {}".format(pools))
self.pools_per_url[deadline_url] = pools

View file

@ -199,16 +199,16 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
(dict): Job info from Deadline
"""
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
deadline_url = instance.data["deadline"]["url"]
assert deadline_url, "Requires Deadline Webservice URL"
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
try:
response = requests_get(url)
kwargs = {}
auth = instance.data["deadline"]["auth"]
if auth:
kwargs["auth"] = auth
response = requests_get(url, **kwargs)
except requests.exceptions.ConnectionError:
self.log.error("Deadline is not accessible at "
"{}".format(deadline_url))

View file

@ -0,0 +1 @@
__version__ = "0.1.10"

View file

@ -1987,12 +1987,12 @@ class CreateContext:
"Folder '{}' was not found".format(folder_path)
)
task_name = None
if task_entity is None:
task_name = self.get_current_task_name()
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
current_task_name = self.get_current_task_name()
if current_task_name:
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], current_task_name
)
if pre_create_data is None:
pre_create_data = {}
@ -2018,7 +2018,7 @@ class CreateContext:
instance_data = {
"folderPath": folder_entity["path"],
"task": task_name,
"task": task_entity["name"] if task_entity else None,
"productType": creator.product_type,
"variant": variant
}

View file

@ -80,6 +80,7 @@ class RenderInstance(object):
anatomyData = attr.ib(default=None)
outputDir = attr.ib(default=None)
context = attr.ib(default=None)
deadline = attr.ib(default=None)
# The source instance the data of this render instance should merge into
source_instance = attr.ib(default=None, type=pyblish.api.Instance)
@ -215,13 +216,12 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
# add additional data
data = self.add_additional_data(data)
render_instance_dict = attr.asdict(render_instance)
# Merge into source instance if provided, otherwise create instance
instance = render_instance_dict.pop("source_instance", None)
instance = render_instance.source_instance
if instance is None:
instance = context.create_instance(render_instance.name)
render_instance_dict = attr.asdict(render_instance)
instance.data.update(render_instance_dict)
instance.data.update(data)

View file

@ -0,0 +1,263 @@
import os
import time
import collections
import ayon_api
from ayon_core.lib.local_settings import get_ayon_appdirs
FileInfo = collections.namedtuple(
"FileInfo",
("path", "size", "modification_time")
)
class ThumbnailsCache:
"""Cache of thumbnails on local storage.
Thumbnails are cached to appdirs to predefined directory. Each project has
own subfolder with thumbnails -> that's because each project has own
thumbnail id validation and file names are thumbnail ids with matching
extension. Extensions are predefined (.png and .jpeg).
Cache has cleanup mechanism which is triggered on initialized by default.
The cleanup has 2 levels:
1. soft cleanup which remove all files that are older then 'days_alive'
2. max size cleanup which remove all files until the thumbnails folder
contains less then 'max_filesize'
- this is time consuming so it's not triggered automatically
Args:
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
"""
# Lifetime of thumbnails (in seconds)
# - default 3 days
days_alive = 3
# Max size of thumbnail directory (in bytes)
# - default 2 Gb
max_filesize = 2 * 1024 * 1024 * 1024
def __init__(self, cleanup=True):
self._thumbnails_dir = None
self._days_alive_secs = self.days_alive * 24 * 60 * 60
if cleanup:
self.cleanup()
def get_thumbnails_dir(self):
"""Root directory where thumbnails are stored.
Returns:
str: Path to thumbnails root.
"""
if self._thumbnails_dir is None:
self._thumbnails_dir = get_ayon_appdirs("thumbnails")
return self._thumbnails_dir
thumbnails_dir = property(get_thumbnails_dir)
def get_thumbnails_dir_file_info(self):
"""Get information about all files in thumbnails directory.
Returns:
List[FileInfo]: List of file information about all files.
"""
thumbnails_dir = self.thumbnails_dir
files_info = []
if not os.path.exists(thumbnails_dir):
return files_info
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
files_info.append(FileInfo(
path, os.path.getsize(path), os.path.getmtime(path)
))
return files_info
def get_thumbnails_dir_size(self, files_info=None):
"""Got full size of thumbnail directory.
Args:
files_info (List[FileInfo]): Prepared file information about
files in thumbnail directory.
Returns:
int: File size of all files in thumbnail directory.
"""
if files_info is None:
files_info = self.get_thumbnails_dir_file_info()
if not files_info:
return 0
return sum(
file_info.size
for file_info in files_info
)
def cleanup(self, check_max_size=False):
"""Cleanup thumbnails directory.
Args:
check_max_size (bool): Also cleanup files to match max size of
thumbnails directory.
"""
thumbnails_dir = self.get_thumbnails_dir()
# Skip if thumbnails dir does not exist yet
if not os.path.exists(thumbnails_dir):
return
self._soft_cleanup(thumbnails_dir)
if check_max_size:
self._max_size_cleanup(thumbnails_dir)
def _soft_cleanup(self, thumbnails_dir):
current_time = time.time()
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
modification_time = os.path.getmtime(path)
if current_time - modification_time > self._days_alive_secs:
os.remove(path)
def _max_size_cleanup(self, thumbnails_dir):
files_info = self.get_thumbnails_dir_file_info()
size = self.get_thumbnails_dir_size(files_info)
if size < self.max_filesize:
return
sorted_file_info = collections.deque(
sorted(files_info, key=lambda item: item.modification_time)
)
diff = size - self.max_filesize
while diff > 0:
if not sorted_file_info:
break
file_info = sorted_file_info.popleft()
diff -= file_info.size
os.remove(file_info.path)
def get_thumbnail_filepath(self, project_name, thumbnail_id):
"""Get thumbnail by thumbnail id.
Args:
project_name (str): Name of project.
thumbnail_id (str): Thumbnail id.
Returns:
Union[str, None]: Path to thumbnail image or None if thumbnail
is not cached yet.
"""
if not thumbnail_id:
return None
for ext in (
".png",
".jpeg",
):
filepath = os.path.join(
self.thumbnails_dir, project_name, thumbnail_id + ext
)
if os.path.exists(filepath):
return filepath
return None
def get_project_dir(self, project_name):
"""Path to root directory for specific project.
Args:
project_name (str): Name of project for which root directory path
should be returned.
Returns:
str: Path to root of project's thumbnails.
"""
return os.path.join(self.thumbnails_dir, project_name)
def make_sure_project_dir_exists(self, project_name):
project_dir = self.get_project_dir(project_name)
if not os.path.exists(project_dir):
os.makedirs(project_dir)
return project_dir
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
"""Store thumbnail to cache folder.
Args:
project_name (str): Project where the thumbnail belong to.
thumbnail_id (str): Thumbnail id.
content (bytes): Byte content of thumbnail file.
mime_type (str): Type of content.
Returns:
str: Path to cached thumbnail image file.
"""
if mime_type == "image/png":
ext = ".png"
elif mime_type == "image/jpeg":
ext = ".jpeg"
else:
raise ValueError(
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
project_dir = self.make_sure_project_dir_exists(project_name)
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
with open(thumbnail_path, "wb") as stream:
stream.write(content)
current_time = time.time()
os.utime(thumbnail_path, (current_time, current_time))
return thumbnail_path
class _CacheItems:
thumbnails_cache = ThumbnailsCache()
def get_thumbnail_path(project_name, thumbnail_id):
"""Get path to thumbnail image.
Args:
project_name (str): Project where thumbnail belongs to.
thumbnail_id (Union[str, None]): Thumbnail id.
Returns:
Union[str, None]: Path to thumbnail image or None if thumbnail
id is not valid or thumbnail was not possible to receive.
"""
if not thumbnail_id:
return None
filepath = _CacheItems.thumbnails_cache.get_thumbnail_filepath(
project_name, thumbnail_id
)
if filepath is not None:
return filepath
# 'ayon_api' had a bug, public function
# 'get_thumbnail_by_id' did not return output of
# 'ServerAPI' method.
con = ayon_api.get_server_api_connection()
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
if result is not None and result.is_valid:
return _CacheItems.thumbnails_cache.store_thumbnail(
project_name,
thumbnail_id,
result.content,
result.content_type
)
return None

View file

@ -42,7 +42,7 @@ def prepare_changes(old_entity, new_entity):
Returns:
dict[str, Any]: Changes that have new entity.
"""
changes = {}
for key in set(new_entity.keys()):
@ -121,6 +121,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"setdress",
"layout",
"ass",
"assProxy",
"vdbcache",
"scene",
"vrayproxy",

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View file

@ -1,234 +1,15 @@
import os
import time
import collections
import ayon_api
import appdirs
from ayon_core.lib import NestedCacheItem
FileInfo = collections.namedtuple(
"FileInfo",
("path", "size", "modification_time")
)
class ThumbnailsCache:
"""Cache of thumbnails on local storage.
Thumbnails are cached to appdirs to predefined directory. Each project has
own subfolder with thumbnails -> that's because each project has own
thumbnail id validation and file names are thumbnail ids with matching
extension. Extensions are predefined (.png and .jpeg).
Cache has cleanup mechanism which is triggered on initialized by default.
The cleanup has 2 levels:
1. soft cleanup which remove all files that are older then 'days_alive'
2. max size cleanup which remove all files until the thumbnails folder
contains less then 'max_filesize'
- this is time consuming so it's not triggered automatically
Args:
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
"""
# Lifetime of thumbnails (in seconds)
# - default 3 days
days_alive = 3
# Max size of thumbnail directory (in bytes)
# - default 2 Gb
max_filesize = 2 * 1024 * 1024 * 1024
def __init__(self, cleanup=True):
self._thumbnails_dir = None
self._days_alive_secs = self.days_alive * 24 * 60 * 60
if cleanup:
self.cleanup()
def get_thumbnails_dir(self):
"""Root directory where thumbnails are stored.
Returns:
str: Path to thumbnails root.
"""
if self._thumbnails_dir is None:
# TODO use generic function
directory = appdirs.user_data_dir("AYON", "Ynput")
self._thumbnails_dir = os.path.join(directory, "thumbnails")
return self._thumbnails_dir
thumbnails_dir = property(get_thumbnails_dir)
def get_thumbnails_dir_file_info(self):
"""Get information about all files in thumbnails directory.
Returns:
List[FileInfo]: List of file information about all files.
"""
thumbnails_dir = self.thumbnails_dir
files_info = []
if not os.path.exists(thumbnails_dir):
return files_info
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
files_info.append(FileInfo(
path, os.path.getsize(path), os.path.getmtime(path)
))
return files_info
def get_thumbnails_dir_size(self, files_info=None):
"""Got full size of thumbnail directory.
Args:
files_info (List[FileInfo]): Prepared file information about
files in thumbnail directory.
Returns:
int: File size of all files in thumbnail directory.
"""
if files_info is None:
files_info = self.get_thumbnails_dir_file_info()
if not files_info:
return 0
return sum(
file_info.size
for file_info in files_info
)
def cleanup(self, check_max_size=False):
"""Cleanup thumbnails directory.
Args:
check_max_size (bool): Also cleanup files to match max size of
thumbnails directory.
"""
thumbnails_dir = self.get_thumbnails_dir()
# Skip if thumbnails dir does not exist yet
if not os.path.exists(thumbnails_dir):
return
self._soft_cleanup(thumbnails_dir)
if check_max_size:
self._max_size_cleanup(thumbnails_dir)
def _soft_cleanup(self, thumbnails_dir):
current_time = time.time()
for root, _, filenames in os.walk(thumbnails_dir):
for filename in filenames:
path = os.path.join(root, filename)
modification_time = os.path.getmtime(path)
if current_time - modification_time > self._days_alive_secs:
os.remove(path)
def _max_size_cleanup(self, thumbnails_dir):
files_info = self.get_thumbnails_dir_file_info()
size = self.get_thumbnails_dir_size(files_info)
if size < self.max_filesize:
return
sorted_file_info = collections.deque(
sorted(files_info, key=lambda item: item.modification_time)
)
diff = size - self.max_filesize
while diff > 0:
if not sorted_file_info:
break
file_info = sorted_file_info.popleft()
diff -= file_info.size
os.remove(file_info.path)
def get_thumbnail_filepath(self, project_name, thumbnail_id):
"""Get thumbnail by thumbnail id.
Args:
project_name (str): Name of project.
thumbnail_id (str): Thumbnail id.
Returns:
Union[str, None]: Path to thumbnail image or None if thumbnail
is not cached yet.
"""
if not thumbnail_id:
return None
for ext in (
".png",
".jpeg",
):
filepath = os.path.join(
self.thumbnails_dir, project_name, thumbnail_id + ext
)
if os.path.exists(filepath):
return filepath
return None
def get_project_dir(self, project_name):
"""Path to root directory for specific project.
Args:
project_name (str): Name of project for which root directory path
should be returned.
Returns:
str: Path to root of project's thumbnails.
"""
return os.path.join(self.thumbnails_dir, project_name)
def make_sure_project_dir_exists(self, project_name):
project_dir = self.get_project_dir(project_name)
if not os.path.exists(project_dir):
os.makedirs(project_dir)
return project_dir
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
"""Store thumbnail to cache folder.
Args:
project_name (str): Project where the thumbnail belong to.
thumbnail_id (str): Id of thumbnail.
content (bytes): Byte content of thumbnail file.
mime_data (str): Type of content.
Returns:
str: Path to cached thumbnail image file.
"""
if mime_type == "image/png":
ext = ".png"
elif mime_type == "image/jpeg":
ext = ".jpeg"
else:
raise ValueError(
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
project_dir = self.make_sure_project_dir_exists(project_name)
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
with open(thumbnail_path, "wb") as stream:
stream.write(content)
current_time = time.time()
os.utime(thumbnail_path, (current_time, current_time))
return thumbnail_path
from ayon_core.pipeline.thumbnails import get_thumbnail_path
class ThumbnailsModel:
entity_cache_lifetime = 240 # In seconds
def __init__(self):
self._thumbnail_cache = ThumbnailsCache()
self._paths_cache = collections.defaultdict(dict)
self._folders_cache = NestedCacheItem(
levels=2, lifetime=self.entity_cache_lifetime)
@ -283,28 +64,7 @@ class ThumbnailsModel:
if thumbnail_id in project_cache:
return project_cache[thumbnail_id]
filepath = self._thumbnail_cache.get_thumbnail_filepath(
project_name, thumbnail_id
)
if filepath is not None:
project_cache[thumbnail_id] = filepath
return filepath
# 'ayon_api' had a bug, public function
# 'get_thumbnail_by_id' did not return output of
# 'ServerAPI' method.
con = ayon_api.get_server_api_connection()
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
if result is None:
pass
elif result.is_valid:
filepath = self._thumbnail_cache.store_thumbnail(
project_name,
thumbnail_id,
result.content,
result.content_type
)
filepath = get_thumbnail_path(project_name, thumbnail_id)
project_cache[thumbnail_id] = filepath
return filepath

View file

@ -281,13 +281,20 @@ def prepare_app_environments(
app.environment
]
task_entity = data.get("task_entity")
folder_entity = data.get("folder_entity")
# Add tools environments
groups_by_name = {}
tool_by_group_name = collections.defaultdict(dict)
if folder_entity:
# Make sure each tool group can be added only once
for key in folder_entity["attrib"].get("tools") or []:
tools = None
if task_entity:
tools = task_entity["attrib"].get("tools")
if tools is None and folder_entity:
tools = folder_entity["attrib"].get("tools")
if tools:
for key in tools:
tool = app.manager.tools.get(key)
if not tool or not tool.is_valid_for_app(app):
continue

View file

@ -1,6 +1,6 @@
name = "applications"
title = "Applications"
version = "0.2.1"
version = "0.2.2"
ayon_server_version = ">=1.0.7"
ayon_launcher_version = ">=1.0.2"

View file

@ -1271,6 +1271,28 @@
}
]
},
"equalizer": {
"enabled": true,
"label": "3DEqualizer",
"icon": "{}/app_icons/3de4.png",
"host_name": "equalizer",
"environment": "{}",
"variants": [
{
"name": "7-1v2",
"label": "7.1v2",
"use_python_2": false,
"executables": {
"windows": [
"C:\\Program Files\\3DE4_win64_r7.1v2\\bin\\3DE4.exe"
],
"darwin": [],
"linux": []
},
"environment": "{}"
}
]
},
"additional_apps": []
}
}

View file

@ -190,6 +190,8 @@ class ApplicationsSettings(BaseSettingsModel):
default_factory=AppGroupWithPython, title="OpenRV")
zbrush: AppGroup = SettingsField(
default_factory=AppGroupWithPython, title="Zbrush")
equalizer: AppGroup = SettingsField(
default_factory=AppGroupWithPython, title="3DEqualizer")
additional_apps: list[AdditionalAppGroup] = SettingsField(
default_factory=list, title="Additional Applications")

View file

@ -1,3 +1,3 @@
name = "deadline"
title = "Deadline"
version = "0.1.10"
version = "0.1.11"

View file

@ -2,11 +2,13 @@ from typing import Type
from ayon_server.addons import BaseServerAddon
from .settings import DeadlineSettings, DEFAULT_VALUES
from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings
class Deadline(BaseServerAddon):
settings_model: Type[DeadlineSettings] = DeadlineSettings
site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings
async def get_default_settings(self):
settings_model_cls = self.get_settings_model()

View file

@ -2,9 +2,11 @@ from .main import (
DeadlineSettings,
DEFAULT_VALUES,
)
from .site_settings import DeadlineSiteSettings
__all__ = (
"DeadlineSettings",
"DeadlineSiteSettings",
"DEFAULT_VALUES",
)

View file

@ -15,12 +15,6 @@ from .publish_plugins import (
)
class ServerListSubmodel(BaseSettingsModel):
_layout = "compact"
name: str = SettingsField(title="Name")
value: str = SettingsField(title="Value")
async def defined_deadline_ws_name_enum_resolver(
addon: "BaseServerAddon",
settings_variant: str = "production",
@ -32,25 +26,40 @@ async def defined_deadline_ws_name_enum_resolver(
settings = await addon.get_studio_settings(variant=settings_variant)
ws_urls = []
ws_server_name = []
for deadline_url_item in settings.deadline_urls:
ws_urls.append(deadline_url_item.name)
ws_server_name.append(deadline_url_item.name)
return ws_urls
return ws_server_name
class ServerItemSubmodel(BaseSettingsModel):
"""Connection info about configured DL servers."""
_layout = "compact"
name: str = SettingsField(title="Name")
value: str = SettingsField(title="Url")
require_authentication: bool = SettingsField(
False,
title="Require authentication")
ssl: bool = SettingsField(False,
title="SSL")
class DeadlineSettings(BaseSettingsModel):
deadline_urls: list[ServerListSubmodel] = SettingsField(
# configured DL servers
deadline_urls: list[ServerItemSubmodel] = SettingsField(
default_factory=list,
title="System Deadline Webservice URLs",
title="System Deadline Webservice Info",
scope=["studio"],
)
# name(key) of selected server for project
deadline_server: str = SettingsField(
title="Project deadline server",
title="Project Deadline server name",
section="---",
scope=["project"],
enum_resolver=defined_deadline_ws_name_enum_resolver
)
publish: PublishPluginsModel = SettingsField(
default_factory=PublishPluginsModel,
title="Publish Plugins",
@ -62,11 +71,14 @@ class DeadlineSettings(BaseSettingsModel):
return value
DEFAULT_VALUES = {
"deadline_urls": [
{
"name": "default",
"value": "http://127.0.0.1:8082"
"value": "http://127.0.0.1:8082",
"require_authentication": False,
"ssl": False
}
],
"deadline_server": "default",

View file

@ -191,7 +191,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel):
@validator(
"limit_groups",
"env_allowed_keys",
"env_search_replace_values")
def validate_unique_names(cls, value):
ensure_unique_names(value)

View file

@ -0,0 +1,26 @@
from ayon_server.settings import (
BaseSettingsModel,
SettingsField,
)
from .main import defined_deadline_ws_name_enum_resolver
class CredentialPerServerModel(BaseSettingsModel):
"""Provide credentials for configured DL servers"""
_layout = "expanded"
server_name: str = SettingsField("",
title="DL server name",
enum_resolver=defined_deadline_ws_name_enum_resolver)
username: str = SettingsField("",
title="Username")
password: str = SettingsField("",
title="Password")
class DeadlineSiteSettings(BaseSettingsModel):
local_settings: list[CredentialPerServerModel] = SettingsField(
default_factory=list,
title="Local setting",
description="Please provide credentials for configured Deadline servers",
)

View file

@ -229,7 +229,7 @@ class ValidateAttributesModel(BaseSettingsModel):
if not success:
raise BadRequestException(
"The attibutes can't be parsed as json object"
"The attributes can't be parsed as json object"
)
return value
@ -265,7 +265,7 @@ class ValidateUnrealStaticMeshNameModel(BaseSettingsModel):
enabled: bool = SettingsField(title="ValidateUnrealStaticMeshName")
optional: bool = SettingsField(title="Optional")
validate_mesh: bool = SettingsField(title="Validate mesh names")
validate_collision: bool = SettingsField(title="Validate collison names")
validate_collision: bool = SettingsField(title="Validate collision names")
class ValidateCycleErrorModel(BaseSettingsModel):
@ -288,7 +288,7 @@ class ValidatePluginPathAttributesModel(BaseSettingsModel):
and the node attribute is <b>abc_file</b>
"""
enabled: bool = True
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
active: bool = SettingsField(title="Active")
attribute: list[ValidatePluginPathAttributesAttrModel] = SettingsField(
@ -310,6 +310,9 @@ class RendererAttributesModel(BaseSettingsModel):
class ValidateRenderSettingsModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
active: bool = SettingsField(title="Active")
arnold_render_attributes: list[RendererAttributesModel] = SettingsField(
default_factory=list, title="Arnold Render Attributes")
vray_render_attributes: list[RendererAttributesModel] = SettingsField(
@ -613,7 +616,7 @@ class ExtractGPUCacheModel(BaseSettingsModel):
title="Optimize Animations For Motion Blur"
)
writeMaterials: bool = SettingsField(title="Write Materials")
useBaseTessellation: bool = SettingsField(title="User Base Tesselation")
useBaseTessellation: bool = SettingsField(title="User Based Tessellation")
class PublishersModel(BaseSettingsModel):
@ -1171,6 +1174,9 @@ DEFAULT_PUBLISH_SETTINGS = {
]
},
"ValidateRenderSettings": {
"enabled": True,
"active": True,
"optional": False,
"arnold_render_attributes": [],
"vray_render_attributes": [],
"redshift_render_attributes": [],