mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge remote-tracking branch 'upstream/develop' into develop
This commit is contained in:
commit
7e4abce2d0
62 changed files with 1090 additions and 394 deletions
|
|
@ -702,6 +702,37 @@ class ClipLoader(LoaderPlugin):
|
|||
|
||||
_mapping = None
|
||||
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
.get("flame", {})
|
||||
.get("load", {})
|
||||
)
|
||||
|
||||
if not plugin_type_settings:
|
||||
return
|
||||
|
||||
plugin_name = cls.__name__
|
||||
|
||||
plugin_settings = None
|
||||
# Look for plugin settings in host specific settings
|
||||
if plugin_name in plugin_type_settings:
|
||||
plugin_settings = plugin_type_settings[plugin_name]
|
||||
|
||||
if not plugin_settings:
|
||||
return
|
||||
|
||||
print(">>> We have preset for {}".format(plugin_name))
|
||||
for option, value in plugin_settings.items():
|
||||
if option == "enabled" and value is False:
|
||||
print(" - is disabled by preset")
|
||||
elif option == "representations":
|
||||
continue
|
||||
else:
|
||||
print(" - setting `{}`: `{}`".format(option, value))
|
||||
setattr(cls, option, value)
|
||||
|
||||
def get_colorspace(self, context):
|
||||
"""Get colorspace name
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,10 @@ import flame
|
|||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.lib import StringTemplate
|
||||
from openpype.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
|
|
@ -14,7 +18,10 @@ class LoadClip(opfapi.ClipLoader):
|
|||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load as clip"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -4,7 +4,10 @@ import flame
|
|||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
from openpype.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
class LoadClipBatch(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
|
@ -14,7 +17,10 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load as clip to current batch"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin):
|
|||
"pointcache",
|
||||
"render"]
|
||||
representations = ["*"]
|
||||
extensions = {"*"}
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
|
|
|
|||
|
|
@ -13,7 +13,8 @@ class FusionLoadAlembicMesh(load.LoaderPlugin):
|
|||
"""Load Alembic mesh into Fusion"""
|
||||
|
||||
families = ["pointcache", "model"]
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extensions = {"abc"}
|
||||
|
||||
label = "Load alembic mesh"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ class FusionLoadFBXMesh(load.LoaderPlugin):
|
|||
"""Load FBX mesh into Fusion"""
|
||||
|
||||
families = ["*"]
|
||||
representations = ["fbx"]
|
||||
representations = ["*"]
|
||||
extensions = {"fbx"}
|
||||
|
||||
label = "Load FBX mesh"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ from openpype.hosts.fusion.api import (
|
|||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
from openpype.lib.transcoding import (
|
||||
IMAGE_EXTENSIONS,
|
||||
VIDEO_EXTENSIONS
|
||||
)
|
||||
|
||||
comp = get_current_comp()
|
||||
|
||||
|
|
@ -129,6 +133,9 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
|
||||
families = ["imagesequence", "review", "render", "plate"]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load sequence"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -20,8 +20,9 @@ class ImageSequenceLoader(load.LoaderPlugin):
|
|||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
|
||||
families = ["shot", "render", "image", "plate", "reference"]
|
||||
representations = ["jpeg", "png", "jpg"]
|
||||
families = ["shot", "render", "image", "plate", "reference", "review"]
|
||||
representations = ["*"]
|
||||
extensions = {"jpeg", "png", "jpg"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""Plugin entry point.
|
||||
|
|
|
|||
|
|
@ -6,6 +6,10 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
import openpype.hosts.hiero.api as phiero
|
||||
|
||||
|
||||
|
|
@ -17,7 +21,10 @@ class LoadClip(phiero.SequenceLoader):
|
|||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264"]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load as clip"
|
||||
order = -10
|
||||
|
|
@ -34,6 +41,38 @@ class LoadClip(phiero.SequenceLoader):
|
|||
|
||||
clip_name_template = "{asset}_{subset}_{representation}"
|
||||
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
.get("hiero", {})
|
||||
.get("load", {})
|
||||
)
|
||||
|
||||
if not plugin_type_settings:
|
||||
return
|
||||
|
||||
plugin_name = cls.__name__
|
||||
|
||||
plugin_settings = None
|
||||
# Look for plugin settings in host specific settings
|
||||
if plugin_name in plugin_type_settings:
|
||||
plugin_settings = plugin_type_settings[plugin_name]
|
||||
|
||||
if not plugin_settings:
|
||||
return
|
||||
|
||||
print(">>> We have preset for {}".format(plugin_name))
|
||||
for option, value in plugin_settings.items():
|
||||
if option == "enabled" and value is False:
|
||||
print(" - is disabled by preset")
|
||||
elif option == "representations":
|
||||
continue
|
||||
else:
|
||||
print(" - setting `{}`: `{}`".format(option, value))
|
||||
setattr(cls, option, value)
|
||||
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
# add clip name template to options
|
||||
options.update({
|
||||
|
|
|
|||
|
|
@ -19,8 +19,9 @@ from openpype.lib import Logger
|
|||
class LoadEffects(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["effectJson"]
|
||||
families = ["effect"]
|
||||
representations = ["*"]
|
||||
extension = {"json"}
|
||||
|
||||
label = "Load Effects"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import os
|
|||
import sys
|
||||
import platform
|
||||
import uuid
|
||||
import math
|
||||
import re
|
||||
|
||||
import json
|
||||
|
|
@ -2064,13 +2063,8 @@ def set_scene_resolution(width, height, pixelAspect):
|
|||
cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect)
|
||||
|
||||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
|
||||
fps = convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
set_scene_fps(fps)
|
||||
def get_frame_range():
|
||||
"""Get the current assets frame range and handles."""
|
||||
|
||||
# Set frame start/end
|
||||
project_name = legacy_io.active_project()
|
||||
|
|
@ -2097,8 +2091,26 @@ def reset_frame_range():
|
|||
if handle_end is None:
|
||||
handle_end = handles
|
||||
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
return {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end
|
||||
}
|
||||
|
||||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
|
||||
fps = convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
set_scene_fps(fps)
|
||||
|
||||
frame_range = get_frame_range()
|
||||
|
||||
frame_start = frame_range["frameStart"] - int(frame_range["handleStart"])
|
||||
frame_end = frame_range["frameEnd"] + int(frame_range["handleEnd"])
|
||||
|
||||
cmds.playbackOptions(minTime=frame_start)
|
||||
cmds.playbackOptions(maxTime=frame_end)
|
||||
|
|
@ -3562,3 +3574,34 @@ def get_color_management_output_transform():
|
|||
if preferences["output_transform_enabled"]:
|
||||
colorspace = preferences["output_transform"]
|
||||
return colorspace
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
|
|
|
|||
|
|
@ -25,16 +25,20 @@ class CreateReview(plugin.Creator):
|
|||
"depth peeling",
|
||||
"alpha cut"
|
||||
]
|
||||
useMayaTimeline = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
||||
# get basic animation data : start / end / handles / steps
|
||||
data = OrderedDict(**self.data)
|
||||
animation_data = lib.collect_animation_data(fps=True)
|
||||
for key, value in animation_data.items():
|
||||
|
||||
# Option for using Maya or asset frame range in settings.
|
||||
frame_range = lib.get_frame_range()
|
||||
if self.useMayaTimeline:
|
||||
frame_range = lib.collect_animation_data(fps=True)
|
||||
for key, value in frame_range.items():
|
||||
data[key] = value
|
||||
|
||||
data["fps"] = lib.collect_animation_data(fps=True)["fps"]
|
||||
data["review_width"] = self.Width
|
||||
data["review_height"] = self.Height
|
||||
data["isolate"] = self.isolate
|
||||
|
|
|
|||
|
|
@ -9,6 +9,9 @@ class CreateVrayProxy(plugin.Creator):
|
|||
family = "vrayproxy"
|
||||
icon = "gears"
|
||||
|
||||
vrmesh = True
|
||||
alembic = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateVrayProxy, self).__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -18,3 +21,6 @@ class CreateVrayProxy(plugin.Creator):
|
|||
|
||||
# Write vertex colors
|
||||
self.data["vertexColors"] = False
|
||||
|
||||
self.data["vrmesh"] = self.vrmesh
|
||||
self.data["alembic"] = self.alembic
|
||||
|
|
|
|||
|
|
@ -9,10 +9,16 @@ class CollectVrayProxy(pyblish.api.InstancePlugin):
|
|||
Add `pointcache` family for it.
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = 'Collect Vray Proxy'
|
||||
label = "Collect Vray Proxy"
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collector entry point."""
|
||||
if not instance.data.get('families'):
|
||||
instance.data["families"] = []
|
||||
|
||||
if instance.data.get("vrmesh"):
|
||||
instance.data["families"].append("vrayproxy.vrmesh")
|
||||
|
||||
if instance.data.get("alembic"):
|
||||
instance.data["families"].append("vrayproxy.alembic")
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["pointcache", "model", "vrayproxy"]
|
||||
families = ["pointcache", "model", "vrayproxy.alembic"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class ExtractVRayProxy(publish.Extractor):
|
|||
|
||||
label = "VRay Proxy (.vrmesh)"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayproxy"]
|
||||
families = ["vrayproxy.vrmesh"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -57,6 +57,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
|
||||
inst_start = int(instance.data.get("frameStartHandle"))
|
||||
inst_end = int(instance.data.get("frameEndHandle"))
|
||||
inst_frame_start = int(instance.data.get("frameStart"))
|
||||
inst_frame_end = int(instance.data.get("frameEnd"))
|
||||
inst_handle_start = int(instance.data.get("handleStart"))
|
||||
inst_handle_end = int(instance.data.get("handleEnd"))
|
||||
|
||||
# basic sanity checks
|
||||
assert frame_start_handle <= frame_end_handle, (
|
||||
|
|
@ -69,24 +73,37 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
if(inst_start != frame_start_handle):
|
||||
if (inst_start != frame_start_handle):
|
||||
errors.append("Instance start frame [ {} ] doesn't "
|
||||
"match the one set on instance [ {} ]: "
|
||||
"match the one set on asset [ {} ]: "
|
||||
"{}/{}/{}/{} (handle/start/end/handle)".format(
|
||||
inst_start,
|
||||
frame_start_handle,
|
||||
handle_start, frame_start, frame_end, handle_end
|
||||
))
|
||||
|
||||
if(inst_end != frame_end_handle):
|
||||
if (inst_end != frame_end_handle):
|
||||
errors.append("Instance end frame [ {} ] doesn't "
|
||||
"match the one set on instance [ {} ]: "
|
||||
"match the one set on asset [ {} ]: "
|
||||
"{}/{}/{}/{} (handle/start/end/handle)".format(
|
||||
inst_end,
|
||||
frame_end_handle,
|
||||
handle_start, frame_start, frame_end, handle_end
|
||||
))
|
||||
|
||||
checks = {
|
||||
"frame start": (frame_start, inst_frame_start),
|
||||
"frame end": (frame_end, inst_frame_end),
|
||||
"handle start": (handle_start, inst_handle_start),
|
||||
"handle end": (handle_end, inst_handle_end)
|
||||
}
|
||||
for label, values in checks.items():
|
||||
if values[0] != values[1]:
|
||||
errors.append(
|
||||
"{} on instance ({}) does not match with the asset "
|
||||
"({}).".format(label.title(), values[1], values[0])
|
||||
)
|
||||
|
||||
for e in errors:
|
||||
self.log.error(e)
|
||||
|
||||
|
|
|
|||
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder
|
||||
)
|
||||
|
||||
|
||||
class ValidateMeshEmpty(pyblish.api.InstancePlugin):
|
||||
"""Validate meshes have some vertices.
|
||||
|
||||
Its possible to have meshes without any vertices. To replicate
|
||||
this issue, delete all faces/polygons then all edges.
|
||||
"""
|
||||
|
||||
order = ValidateMeshOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Mesh Empty"
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid = cls.get_invalid(instance)
|
||||
for node in invalid:
|
||||
cmds.delete(node)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
invalid = []
|
||||
|
||||
meshes = cmds.ls(instance, type="mesh", long=True)
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"\"{}\" does not have any vertices.".format(mesh)
|
||||
)
|
||||
invalid.append(mesh)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Meshes found in instance without any vertices: %s" % invalid
|
||||
)
|
||||
|
|
@ -1,39 +1,9 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import ValidateMeshOrder
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
for c in components:
|
||||
match = re.search("\[([0-9]+):([0-9]+)\]", c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
||||
|
|
@ -57,6 +27,15 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
|||
invalid = []
|
||||
|
||||
for node in cmds.ls(instance, type='mesh'):
|
||||
num_vertices = cmds.polyEvaluate(node, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(node)
|
||||
)
|
||||
continue
|
||||
|
||||
uv = cmds.polyEvaluate(node, uv=True)
|
||||
|
||||
if uv == 0:
|
||||
|
|
|
|||
|
|
@ -28,7 +28,10 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Return the invalid edges.
|
||||
Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
Also see:
|
||||
|
||||
http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -36,8 +39,21 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
if not meshes:
|
||||
return list()
|
||||
|
||||
valid_meshes = []
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
valid_meshes.append(mesh)
|
||||
|
||||
# Get all edges
|
||||
edges = ['{0}.e[*]'.format(node) for node in meshes]
|
||||
edges = ['{0}.e[*]'.format(node) for node in valid_meshes]
|
||||
|
||||
# Filter by constraint on edge length
|
||||
invalid = lib.polyConstraint(edges,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -8,37 +6,7 @@ from openpype.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
)
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
||||
|
|
@ -87,6 +55,13 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
|||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
# Vertices from all edges
|
||||
edges = "%s.e[*]" % mesh
|
||||
vertices = cmds.polyListComponentConversion(edges, toVertex=True)
|
||||
|
|
|
|||
|
|
@ -1,27 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class ValidateVrayProxy(pyblish.api.InstancePlugin):
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'VRay Proxy Settings'
|
||||
hosts = ['maya']
|
||||
families = ['studio.vrayproxy']
|
||||
label = "VRay Proxy Settings"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("'%s' has invalid settings for VRay Proxy "
|
||||
"export!" % instance.name)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
data = instance.data
|
||||
|
||||
if not data["setMembers"]:
|
||||
cls.log.error("'%s' is empty! This is a bug" % instance.name)
|
||||
raise KnownPublishError(
|
||||
"'%s' is empty! This is a bug" % instance.name
|
||||
)
|
||||
|
||||
if data["animation"]:
|
||||
if data["frameEnd"] < data["frameStart"]:
|
||||
cls.log.error("End frame is smaller than start frame")
|
||||
raise KnownPublishError(
|
||||
"End frame is smaller than start frame"
|
||||
)
|
||||
|
||||
if not data["vrmesh"] and not data["alembic"]:
|
||||
raise KnownPublishError(
|
||||
"Both vrmesh and alembic are off. Needs at least one to"
|
||||
" publish."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
|||
"yeticache",
|
||||
"pointcache"]
|
||||
representations = ["*"]
|
||||
extension = {"*"}
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
|
|
|
|||
|
|
@ -25,8 +25,9 @@ from openpype.hosts.nuke.api import containerise, update_container
|
|||
class LoadBackdropNodes(load.LoaderPlugin):
|
||||
"""Loading Published Backdrop nodes (workfile, nukenodes)"""
|
||||
|
||||
representations = ["nk"]
|
||||
families = ["workfile", "nukenodes"]
|
||||
representations = ["*"]
|
||||
extension = {"nk"}
|
||||
|
||||
label = "Import Nuke Nodes"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ class AlembicCameraLoader(load.LoaderPlugin):
|
|||
"""
|
||||
|
||||
families = ["camera"]
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extension = {"abc"}
|
||||
|
||||
label = "Load Alembic Camera"
|
||||
icon = "camera"
|
||||
|
|
|
|||
|
|
@ -21,6 +21,10 @@ from openpype.hosts.nuke.api import (
|
|||
viewer_update_and_undo_stop,
|
||||
colorspace_exists_on_node
|
||||
)
|
||||
from openpype.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
from openpype.hosts.nuke.api import plugin
|
||||
|
||||
|
||||
|
|
@ -38,13 +42,10 @@ class LoadClip(plugin.NukeLoader):
|
|||
"prerender",
|
||||
"review"
|
||||
]
|
||||
representations = [
|
||||
"exr",
|
||||
"dpx",
|
||||
"mov",
|
||||
"review",
|
||||
"mp4"
|
||||
]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load Clip"
|
||||
order = -20
|
||||
|
|
@ -81,17 +82,17 @@ class LoadClip(plugin.NukeLoader):
|
|||
|
||||
@classmethod
|
||||
def get_representations(cls):
|
||||
return (
|
||||
cls.representations
|
||||
+ cls._representations
|
||||
+ plugin.get_review_presets_config()
|
||||
)
|
||||
return cls._representations or cls.representations
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load asset via database
|
||||
"""
|
||||
representation = context["representation"]
|
||||
# reste container id so it is always unique for each instance
|
||||
# reset container id so it is always unique for each instance
|
||||
self.reset_container_id()
|
||||
|
||||
self.log.warning(self.extensions)
|
||||
|
||||
is_sequence = len(representation["files"]) > 1
|
||||
|
||||
if is_sequence:
|
||||
|
|
|
|||
|
|
@ -22,8 +22,9 @@ from openpype.hosts.nuke.api import (
|
|||
class LoadEffects(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["effectJson"]
|
||||
families = ["effect"]
|
||||
representations = ["*"]
|
||||
extension = {"json"}
|
||||
|
||||
label = "Load Effects - nodes"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -23,8 +23,9 @@ from openpype.hosts.nuke.api import (
|
|||
class LoadEffectsInputProcess(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["effectJson"]
|
||||
families = ["effect"]
|
||||
representations = ["*"]
|
||||
extension = {"json"}
|
||||
|
||||
label = "Load Effects - Input Process"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -24,8 +24,9 @@ from openpype.hosts.nuke.api import (
|
|||
class LoadGizmo(load.LoaderPlugin):
|
||||
"""Loading nuke Gizmo"""
|
||||
|
||||
representations = ["gizmo"]
|
||||
families = ["gizmo"]
|
||||
representations = ["*"]
|
||||
extension = {"gizmo"}
|
||||
|
||||
label = "Load Gizmo"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -26,8 +26,9 @@ from openpype.hosts.nuke.api import (
|
|||
class LoadGizmoInputProcess(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["gizmo"]
|
||||
families = ["gizmo"]
|
||||
representations = ["*"]
|
||||
extension = {"gizmo"}
|
||||
|
||||
label = "Load Gizmo - Input Process"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@ from openpype.hosts.nuke.api import (
|
|||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
from openpype.lib.transcoding import (
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
|
||||
class LoadImage(load.LoaderPlugin):
|
||||
|
|
@ -33,7 +36,10 @@ class LoadImage(load.LoaderPlugin):
|
|||
"review",
|
||||
"image"
|
||||
]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "psd", "tiff"]
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
label = "Load Image"
|
||||
order = -10
|
||||
|
|
@ -58,7 +64,7 @@ class LoadImage(load.LoaderPlugin):
|
|||
|
||||
@classmethod
|
||||
def get_representations(cls):
|
||||
return cls.representations + cls._representations
|
||||
return cls._representations or cls.representations
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
self.log.info("__ options: `{}`".format(options))
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ class MatchmoveLoader(load.LoaderPlugin):
|
|||
"""
|
||||
|
||||
families = ["matchmove"]
|
||||
representations = ["py"]
|
||||
representations = ["*"]
|
||||
extension = {"py"}
|
||||
|
||||
defaults = ["Camera", "Object"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
|
|
|
|||
|
|
@ -23,7 +23,8 @@ class AlembicModelLoader(load.LoaderPlugin):
|
|||
"""
|
||||
|
||||
families = ["model", "pointcache", "animation"]
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extension = {"abc"}
|
||||
|
||||
label = "Load Alembic"
|
||||
icon = "cube"
|
||||
|
|
|
|||
|
|
@ -20,8 +20,9 @@ from openpype.hosts.nuke.api import (
|
|||
class LinkAsGroup(load.LoaderPlugin):
|
||||
"""Copy the published file to be pasted at the desired location"""
|
||||
|
||||
representations = ["nk"]
|
||||
families = ["workfile", "nukenodes"]
|
||||
representations = ["*"]
|
||||
extension = {"nk"}
|
||||
|
||||
label = "Load Precomp"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -14,7 +14,10 @@ from openpype.hosts.resolve.api.pipeline import (
|
|||
containerise,
|
||||
update_container,
|
||||
)
|
||||
|
||||
from openpype.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
class LoadClip(plugin.TimelineItemLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
|
@ -24,7 +27,11 @@ class LoadClip(plugin.TimelineItemLoader):
|
|||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264", "mov"]
|
||||
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load as clip"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -0,0 +1,114 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#include "AssetContainer.h"
|
||||
#include "AssetRegistry/AssetRegistryModule.h"
|
||||
#include "Misc/PackageName.h"
|
||||
#include "Engine.h"
|
||||
#include "Containers/UnrealString.h"
|
||||
|
||||
UAssetContainer::UAssetContainer(const FObjectInitializer& ObjectInitializer)
|
||||
: UAssetUserData(ObjectInitializer)
|
||||
{
|
||||
FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
|
||||
FString path = UAssetContainer::GetPathName();
|
||||
UE_LOG(LogTemp, Warning, TEXT("UAssetContainer %s"), *path);
|
||||
FARFilter Filter;
|
||||
Filter.PackagePaths.Add(FName(*path));
|
||||
|
||||
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UAssetContainer::OnAssetAdded);
|
||||
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UAssetContainer::OnAssetRemoved);
|
||||
AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UAssetContainer::OnAssetRenamed);
|
||||
}
|
||||
|
||||
void UAssetContainer::OnAssetAdded(const FAssetData& AssetData)
|
||||
{
|
||||
TArray<FString> split;
|
||||
|
||||
// get directory of current container
|
||||
FString selfFullPath = UAssetContainer::GetPathName();
|
||||
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
|
||||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
UE_LOG(LogTemp, Log, TEXT("asset name %s"), *assetFName);
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
||||
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
|
||||
|
||||
// take interest only in paths starting with path of current container
|
||||
if (assetDir.StartsWith(*selfDir))
|
||||
{
|
||||
// exclude self
|
||||
if (assetFName != "AssetContainer")
|
||||
{
|
||||
assets.Add(assetPath);
|
||||
assetsData.Add(AssetData);
|
||||
UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void UAssetContainer::OnAssetRemoved(const FAssetData& AssetData)
|
||||
{
|
||||
TArray<FString> split;
|
||||
|
||||
// get directory of current container
|
||||
FString selfFullPath = UAssetContainer::GetPathName();
|
||||
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
|
||||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
||||
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
|
||||
|
||||
// take interest only in paths starting with path of current container
|
||||
FString path = UAssetContainer::GetPathName();
|
||||
FString lpp = FPackageName::GetLongPackagePath(*path);
|
||||
|
||||
if (assetDir.StartsWith(*selfDir))
|
||||
{
|
||||
// exclude self
|
||||
if (assetFName != "AssetContainer")
|
||||
{
|
||||
// UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp);
|
||||
assets.Remove(assetPath);
|
||||
assetsData.Remove(AssetData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void UAssetContainer::OnAssetRenamed(const FAssetData& AssetData, const FString& str)
|
||||
{
|
||||
TArray<FString> split;
|
||||
|
||||
// get directory of current container
|
||||
FString selfFullPath = UAssetContainer::GetPathName();
|
||||
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
|
||||
|
||||
// get asset path and class
|
||||
FString assetPath = AssetData.GetFullName();
|
||||
FString assetFName = AssetData.AssetClassPath.ToString();
|
||||
|
||||
// split path
|
||||
assetPath.ParseIntoArray(split, TEXT(" "), true);
|
||||
|
||||
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
|
||||
if (assetDir.StartsWith(*selfDir))
|
||||
{
|
||||
// exclude self
|
||||
if (assetFName != "AssetContainer")
|
||||
{
|
||||
|
||||
assets.Remove(str);
|
||||
assets.Add(assetPath);
|
||||
assetsData.Remove(AssetData);
|
||||
// UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
#include "AssetContainerFactory.h"
|
||||
#include "AssetContainer.h"
|
||||
|
||||
UAssetContainerFactory::UAssetContainerFactory(const FObjectInitializer& ObjectInitializer)
|
||||
: UFactory(ObjectInitializer)
|
||||
{
|
||||
SupportedClass = UAssetContainer::StaticClass();
|
||||
bCreateNew = false;
|
||||
bEditorImport = true;
|
||||
}
|
||||
|
||||
UObject* UAssetContainerFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
|
||||
{
|
||||
UAssetContainer* AssetContainer = NewObject<UAssetContainer>(InParent, Class, Name, Flags);
|
||||
return AssetContainer;
|
||||
}
|
||||
|
||||
bool UAssetContainerFactory::ShouldShowInNewMenu() const {
|
||||
return false;
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "UObject/NoExportTypes.h"
|
||||
#include "Engine/AssetUserData.h"
|
||||
#include "AssetRegistry/AssetData.h"
|
||||
#include "AssetContainer.generated.h"
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
UCLASS(Blueprintable)
|
||||
class OPENPYPE_API UAssetContainer : public UAssetUserData
|
||||
{
|
||||
GENERATED_BODY()
|
||||
|
||||
public:
|
||||
|
||||
UAssetContainer(const FObjectInitializer& ObjectInitalizer);
|
||||
// ~UAssetContainer();
|
||||
|
||||
UPROPERTY(EditAnywhere, BlueprintReadOnly, Category="Assets")
|
||||
TArray<FString> assets;
|
||||
|
||||
// There seems to be no reflection option to expose array of FAssetData
|
||||
/*
|
||||
UPROPERTY(Transient, BlueprintReadOnly, Category = "Python", meta=(DisplayName="Assets Data"))
|
||||
TArray<FAssetData> assetsData;
|
||||
*/
|
||||
private:
|
||||
TArray<FAssetData> assetsData;
|
||||
void OnAssetAdded(const FAssetData& AssetData);
|
||||
void OnAssetRemoved(const FAssetData& AssetData);
|
||||
void OnAssetRenamed(const FAssetData& AssetData, const FString& str);
|
||||
};
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
// Fill out your copyright notice in the Description page of Project Settings.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "Factories/Factory.h"
|
||||
#include "AssetContainerFactory.generated.h"
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
UCLASS()
|
||||
class OPENPYPE_API UAssetContainerFactory : public UFactory
|
||||
{
|
||||
GENERATED_BODY()
|
||||
|
||||
public:
|
||||
UAssetContainerFactory(const FObjectInitializer& ObjectInitializer);
|
||||
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
|
||||
virtual bool ShouldShowInNewMenu() const override;
|
||||
};
|
||||
|
|
@ -419,8 +419,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
assembly_job_info.Name += " - Tile Assembly Job"
|
||||
assembly_job_info.Frames = 1
|
||||
assembly_job_info.MachineLimit = 1
|
||||
assembly_job_info.Priority = instance.data.get("tile_priority",
|
||||
self.tile_priority)
|
||||
assembly_job_info.Priority = instance.data.get(
|
||||
"tile_priority", self.tile_priority
|
||||
)
|
||||
|
||||
pool = instance.context.data["project_settings"]["deadline"]
|
||||
pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"]
|
||||
assembly_job_info.Pool = pool or instance.data.get("primaryPool", "")
|
||||
|
||||
assembly_plugin_info = {
|
||||
"CleanupTiles": 1,
|
||||
|
|
|
|||
|
|
@ -284,6 +284,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
args.append("--automatic-tests")
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": self.deadline_plugin,
|
||||
|
|
@ -297,8 +300,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"Priority": priority,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
|
|
@ -588,7 +591,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
self.log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files, additional_data):
|
||||
def _get_representations(self, instance, exp_files):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
|
|
@ -933,20 +936,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info(data.get("expectedFiles"))
|
||||
|
||||
additional_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"],
|
||||
"colorspaceTemplate": instance.data["colorspaceConfig"].replace(
|
||||
str(context.data["anatomy"].roots["work"]), "{root[work]}"
|
||||
)
|
||||
}
|
||||
|
||||
if isinstance(data.get("expectedFiles")[0], dict):
|
||||
# we cannot attach AOVs to other subsets as we consider every
|
||||
# AOV subset of its own.
|
||||
|
||||
config = instance.data["colorspaceConfig"]
|
||||
additional_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"],
|
||||
"colorspaceTemplate": config.replace(
|
||||
str(context.data["anatomy"].roots["work"]), "{root[work]}"
|
||||
)
|
||||
}
|
||||
|
||||
if len(data.get("attachTo")) > 0:
|
||||
assert len(data.get("expectedFiles")[0].keys()) == 1, (
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
# /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import tempfile
|
||||
|
|
@ -341,7 +342,7 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
|
||||
"envgroup": "farm"
|
||||
}
|
||||
|
||||
|
||||
if job.GetJobEnvironmentKeyValue('IS_TEST'):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
|
|
|
|||
|
|
@ -50,13 +50,10 @@ async def upload(module, project_name, file, representation, provider_name,
|
|||
presets=preset)
|
||||
|
||||
file_path = file.get("path", "")
|
||||
try:
|
||||
local_file_path, remote_file_path = resolve_paths(
|
||||
module, file_path, project_name,
|
||||
remote_site_name, remote_handler
|
||||
)
|
||||
except Exception as exp:
|
||||
print(exp)
|
||||
local_file_path, remote_file_path = resolve_paths(
|
||||
module, file_path, project_name,
|
||||
remote_site_name, remote_handler
|
||||
)
|
||||
|
||||
target_folder = os.path.dirname(remote_file_path)
|
||||
folder_id = remote_handler.create_folder(target_folder)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import sys
|
|||
import time
|
||||
from datetime import datetime
|
||||
import threading
|
||||
import platform
|
||||
import copy
|
||||
import signal
|
||||
from collections import deque, defaultdict
|
||||
|
|
@ -25,7 +24,11 @@ from openpype.lib import Logger, get_local_site_id
|
|||
from openpype.pipeline import AvalonMongoDB, Anatomy
|
||||
from openpype.settings.lib import (
|
||||
get_default_anatomy_settings,
|
||||
get_anatomy_settings
|
||||
get_anatomy_settings,
|
||||
get_local_settings,
|
||||
)
|
||||
from openpype.settings.constants import (
|
||||
DEFAULT_PROJECT_KEY
|
||||
)
|
||||
|
||||
from .providers.local_drive import LocalDriveHandler
|
||||
|
|
@ -639,6 +642,110 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return get_local_site_id()
|
||||
return active_site
|
||||
|
||||
def get_active_site_type(self, project_name, local_settings=None):
|
||||
"""Active site which is defined by artist.
|
||||
|
||||
Unlike 'get_active_site' is this method also checking local settings
|
||||
where might be different active site set by user. The output is limited
|
||||
to "studio" and "local".
|
||||
|
||||
This method is used by Anatomy when is decided which
|
||||
|
||||
Todos:
|
||||
Check if sync server is enabled for the project.
|
||||
- To be able to do that the sync settings MUST NOT be cached for
|
||||
all projects at once. The sync settings preparation for all
|
||||
projects is reasonable only in sync server loop.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for active site.
|
||||
local_settings (Optional[dict[str, Any]]): Prepared local settings.
|
||||
|
||||
Returns:
|
||||
Literal["studio", "local"]: Active site.
|
||||
"""
|
||||
|
||||
if not self.enabled:
|
||||
return "studio"
|
||||
|
||||
if local_settings is None:
|
||||
local_settings = get_local_settings()
|
||||
|
||||
local_project_settings = local_settings.get("projects")
|
||||
project_settings = get_project_settings(project_name)
|
||||
sync_server_settings = project_settings["global"]["sync_server"]
|
||||
if not sync_server_settings["enabled"]:
|
||||
return "studio"
|
||||
|
||||
project_active_site = sync_server_settings["config"]["active_site"]
|
||||
if not local_project_settings:
|
||||
return project_active_site
|
||||
|
||||
project_locals = local_project_settings.get(project_name) or {}
|
||||
default_locals = local_project_settings.get(DEFAULT_PROJECT_KEY) or {}
|
||||
active_site = (
|
||||
project_locals.get("active_site")
|
||||
or default_locals.get("active_site")
|
||||
)
|
||||
if active_site:
|
||||
return active_site
|
||||
return project_active_site
|
||||
|
||||
def get_site_root_overrides(
|
||||
self, project_name, site_name, local_settings=None
|
||||
):
|
||||
"""Get root overrides for project on a site.
|
||||
|
||||
Implemented to be used in 'Anatomy' for other than 'studio' site.
|
||||
|
||||
Args:
|
||||
project_name (str): Project for which root overrides should be
|
||||
received.
|
||||
site_name (str): Name of site for which should be received roots.
|
||||
local_settings (Optional[dict[str, Any]]): Prepare local settigns
|
||||
values.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Root overrides for this machine.
|
||||
"""
|
||||
|
||||
# Validate that site name is valid
|
||||
if site_name not in ("studio", "local"):
|
||||
# Considure local site id as 'local'
|
||||
if site_name != get_local_site_id():
|
||||
raise ValueError((
|
||||
"Root overrides are available only for"
|
||||
" default sites not for \"{}\""
|
||||
).format(site_name))
|
||||
site_name = "local"
|
||||
|
||||
if local_settings is None:
|
||||
local_settings = get_local_settings()
|
||||
|
||||
if not local_settings:
|
||||
return
|
||||
|
||||
local_project_settings = local_settings.get("projects") or {}
|
||||
|
||||
# Check for roots existence in local settings first
|
||||
roots_project_locals = (
|
||||
local_project_settings
|
||||
.get(project_name, {})
|
||||
)
|
||||
roots_default_locals = (
|
||||
local_project_settings
|
||||
.get(DEFAULT_PROJECT_KEY, {})
|
||||
)
|
||||
|
||||
# Skip rest of processing if roots are not set
|
||||
if not roots_project_locals and not roots_default_locals:
|
||||
return
|
||||
|
||||
# Combine roots from local settings
|
||||
roots_locals = roots_default_locals.get(site_name) or {}
|
||||
roots_locals.update(roots_project_locals.get(site_name) or {})
|
||||
return roots_locals
|
||||
|
||||
# remote sites
|
||||
def get_remote_sites(self, project_name):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import six
|
|||
import time
|
||||
|
||||
from openpype.settings.lib import (
|
||||
get_project_settings,
|
||||
get_local_settings,
|
||||
)
|
||||
from openpype.settings.constants import (
|
||||
|
|
@ -24,7 +23,7 @@ from openpype.lib.path_templates import (
|
|||
FormatObject,
|
||||
)
|
||||
from openpype.lib.log import Logger
|
||||
from openpype.lib import get_local_site_id
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -57,20 +56,13 @@ class BaseAnatomy(object):
|
|||
root_key_regex = re.compile(r"{(root?[^}]+)}")
|
||||
root_name_regex = re.compile(r"root\[([^]]+)\]")
|
||||
|
||||
def __init__(self, project_doc, local_settings, site_name):
|
||||
def __init__(self, project_doc, root_overrides=None):
|
||||
project_name = project_doc["name"]
|
||||
self.project_name = project_name
|
||||
self.project_code = project_doc["data"]["code"]
|
||||
|
||||
if (site_name and
|
||||
site_name not in ["studio", "local", get_local_site_id()]):
|
||||
raise RuntimeError("Anatomy could be created only for default "
|
||||
"local sites not for {}".format(site_name))
|
||||
|
||||
self._site_name = site_name
|
||||
|
||||
self._data = self._prepare_anatomy_data(
|
||||
project_doc, local_settings, site_name
|
||||
project_doc, root_overrides
|
||||
)
|
||||
self._templates_obj = AnatomyTemplates(self)
|
||||
self._roots_obj = Roots(self)
|
||||
|
|
@ -92,28 +84,18 @@ class BaseAnatomy(object):
|
|||
def items(self):
|
||||
return copy.deepcopy(self._data).items()
|
||||
|
||||
def _prepare_anatomy_data(self, project_doc, local_settings, site_name):
|
||||
def _prepare_anatomy_data(self, project_doc, root_overrides):
|
||||
"""Prepare anatomy data for further processing.
|
||||
|
||||
Method added to replace `{task}` with `{task[name]}` in templates.
|
||||
"""
|
||||
project_name = project_doc["name"]
|
||||
|
||||
anatomy_data = self._project_doc_to_anatomy_data(project_doc)
|
||||
|
||||
templates_data = anatomy_data.get("templates")
|
||||
if templates_data:
|
||||
# Replace `{task}` with `{task[name]}` in templates
|
||||
value_queue = collections.deque()
|
||||
value_queue.append(templates_data)
|
||||
while value_queue:
|
||||
item = value_queue.popleft()
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
|
||||
self._apply_local_settings_on_anatomy_data(anatomy_data,
|
||||
local_settings,
|
||||
project_name,
|
||||
site_name)
|
||||
self._apply_local_settings_on_anatomy_data(
|
||||
anatomy_data,
|
||||
root_overrides
|
||||
)
|
||||
|
||||
return anatomy_data
|
||||
|
||||
|
|
@ -347,7 +329,7 @@ class BaseAnatomy(object):
|
|||
return output
|
||||
|
||||
def _apply_local_settings_on_anatomy_data(
|
||||
self, anatomy_data, local_settings, project_name, site_name
|
||||
self, anatomy_data, root_overrides
|
||||
):
|
||||
"""Apply local settings on anatomy data.
|
||||
|
||||
|
|
@ -366,13 +348,138 @@ class BaseAnatomy(object):
|
|||
|
||||
Args:
|
||||
anatomy_data (dict): Data for anatomy.
|
||||
local_settings (dict): Data of local settings.
|
||||
project_name (str): Name of project for which anatomy data are.
|
||||
root_overrides (dict): Data of local settings.
|
||||
"""
|
||||
if not local_settings:
|
||||
|
||||
# Skip processing if roots for current active site are not available in
|
||||
# local settings
|
||||
if not root_overrides:
|
||||
return
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
|
||||
root_data = anatomy_data["roots"]
|
||||
for root_name, path in root_overrides.items():
|
||||
if root_name not in root_data:
|
||||
continue
|
||||
anatomy_data["roots"][root_name][current_platform] = (
|
||||
path
|
||||
)
|
||||
|
||||
|
||||
class CacheItem:
|
||||
"""Helper to cache data.
|
||||
|
||||
Helper does not handle refresh of data and does not mark data as outdated.
|
||||
Who uses the object should check of outdated state on his own will.
|
||||
"""
|
||||
|
||||
default_lifetime = 10
|
||||
|
||||
def __init__(self, lifetime=None):
|
||||
self._data = None
|
||||
self._cached = None
|
||||
self._lifetime = lifetime or self.default_lifetime
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
"""Cached data/object.
|
||||
|
||||
Returns:
|
||||
Any: Whatever was cached.
|
||||
"""
|
||||
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def is_outdated(self):
|
||||
"""Item has outdated cache.
|
||||
|
||||
Lifetime of cache item expired or was not yet set.
|
||||
|
||||
Returns:
|
||||
bool: Item is outdated.
|
||||
"""
|
||||
|
||||
if self._cached is None:
|
||||
return True
|
||||
return (time.time() - self._cached) > self._lifetime
|
||||
|
||||
def update_data(self, data):
|
||||
"""Update cache of data.
|
||||
|
||||
Args:
|
||||
data (Any): Data to cache.
|
||||
"""
|
||||
|
||||
self._data = data
|
||||
self._cached = time.time()
|
||||
|
||||
|
||||
class Anatomy(BaseAnatomy):
|
||||
_sync_server_addon_cache = CacheItem()
|
||||
_project_cache = collections.defaultdict(CacheItem)
|
||||
_default_site_id_cache = collections.defaultdict(CacheItem)
|
||||
_root_overrides_cache = collections.defaultdict(
|
||||
lambda: collections.defaultdict(CacheItem)
|
||||
)
|
||||
|
||||
def __init__(self, project_name=None, site_name=None):
|
||||
if not project_name:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
|
||||
if not project_name:
|
||||
raise ProjectNotSet((
|
||||
"Implementation bug: Project name is not set. Anatomy requires"
|
||||
" to load data for specific project."
|
||||
))
|
||||
|
||||
project_doc = self.get_project_doc_from_cache(project_name)
|
||||
root_overrides = self._get_site_root_overrides(project_name, site_name)
|
||||
|
||||
super(Anatomy, self).__init__(project_doc, root_overrides)
|
||||
|
||||
@classmethod
|
||||
def get_project_doc_from_cache(cls, project_name):
|
||||
project_cache = cls._project_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
project_cache.update_data(get_project(project_name))
|
||||
return copy.deepcopy(project_cache.data)
|
||||
|
||||
@classmethod
|
||||
def get_sync_server_addon(cls):
|
||||
if cls._sync_server_addon_cache.is_outdated:
|
||||
manager = ModulesManager()
|
||||
cls._sync_server_addon_cache.update_data(
|
||||
manager.get_enabled_module("sync_server")
|
||||
)
|
||||
return cls._sync_server_addon_cache.data
|
||||
|
||||
@classmethod
|
||||
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
|
||||
"""This would return 'studio' site override by local settings.
|
||||
|
||||
Notes:
|
||||
This logic handles local overrides of studio site which may be
|
||||
available even when sync server is not enabled.
|
||||
Handling of 'studio' and 'local' site was separated as preparation
|
||||
for AYON development where that will be received from
|
||||
separated sources.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
local_settings (Optional[dict[str, Any]]): Prepared local settings.
|
||||
|
||||
Returns:
|
||||
Union[Dict[str, str], None]): Local root overrides.
|
||||
"""
|
||||
|
||||
if local_settings is None:
|
||||
local_settings = get_local_settings()
|
||||
|
||||
local_project_settings = local_settings.get("projects") or {}
|
||||
if not local_project_settings:
|
||||
return None
|
||||
|
||||
# Check for roots existence in local settings first
|
||||
roots_project_locals = (
|
||||
|
|
@ -389,106 +496,59 @@ class BaseAnatomy(object):
|
|||
return
|
||||
|
||||
# Combine roots from local settings
|
||||
roots_locals = roots_default_locals.get(site_name) or {}
|
||||
roots_locals.update(roots_project_locals.get(site_name) or {})
|
||||
# Skip processing if roots for current active site are not available in
|
||||
# local settings
|
||||
if not roots_locals:
|
||||
return
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
|
||||
root_data = anatomy_data["roots"]
|
||||
for root_name, path in roots_locals.items():
|
||||
if root_name not in root_data:
|
||||
continue
|
||||
anatomy_data["roots"][root_name][current_platform] = (
|
||||
path
|
||||
)
|
||||
|
||||
|
||||
class Anatomy(BaseAnatomy):
|
||||
_project_cache = {}
|
||||
_site_cache = {}
|
||||
|
||||
def __init__(self, project_name=None, site_name=None):
|
||||
if not project_name:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
|
||||
if not project_name:
|
||||
raise ProjectNotSet((
|
||||
"Implementation bug: Project name is not set. Anatomy requires"
|
||||
" to load data for specific project."
|
||||
))
|
||||
|
||||
project_doc = self.get_project_doc_from_cache(project_name)
|
||||
local_settings = get_local_settings()
|
||||
if not site_name:
|
||||
site_name = self.get_site_name_from_cache(
|
||||
project_name, local_settings
|
||||
)
|
||||
|
||||
super(Anatomy, self).__init__(
|
||||
project_doc,
|
||||
local_settings,
|
||||
site_name
|
||||
)
|
||||
roots_locals = roots_default_locals.get("studio") or {}
|
||||
roots_locals.update(roots_project_locals.get("studio") or {})
|
||||
return roots_locals
|
||||
|
||||
@classmethod
|
||||
def get_project_doc_from_cache(cls, project_name):
|
||||
project_cache = cls._project_cache.get(project_name)
|
||||
if project_cache is not None:
|
||||
if time.time() - project_cache["start"] > 10:
|
||||
cls._project_cache.pop(project_name)
|
||||
project_cache = None
|
||||
def _get_site_root_overrides(cls, project_name, site_name):
|
||||
"""Get root overrides for site.
|
||||
|
||||
if project_cache is None:
|
||||
project_cache = {
|
||||
"project_doc": get_project(project_name),
|
||||
"start": time.time()
|
||||
}
|
||||
cls._project_cache[project_name] = project_cache
|
||||
Args:
|
||||
project_name (str): Project name for which root overrides should be
|
||||
received.
|
||||
site_name (Union[str, None]): Name of site for which root overrides
|
||||
should be returned.
|
||||
"""
|
||||
|
||||
return copy.deepcopy(
|
||||
cls._project_cache[project_name]["project_doc"]
|
||||
)
|
||||
# Local settings may be used more than once or may not be used at all
|
||||
# - to avoid slowdowns 'get_local_settings' is not called until it's
|
||||
# really needed
|
||||
local_settings = None
|
||||
|
||||
@classmethod
|
||||
def get_site_name_from_cache(cls, project_name, local_settings):
|
||||
site_cache = cls._site_cache.get(project_name)
|
||||
if site_cache is not None:
|
||||
if time.time() - site_cache["start"] > 10:
|
||||
cls._site_cache.pop(project_name)
|
||||
site_cache = None
|
||||
# First check if sync server is available and enabled
|
||||
sync_server = cls.get_sync_server_addon()
|
||||
if sync_server is None or not sync_server.enabled:
|
||||
# QUESTION is ok to force 'studio' when site sync is not enabled?
|
||||
site_name = "studio"
|
||||
|
||||
if site_cache:
|
||||
return site_cache["site_name"]
|
||||
elif not site_name:
|
||||
# Use sync server to receive active site name
|
||||
project_cache = cls._default_site_id_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
local_settings = get_local_settings()
|
||||
project_cache.update_data(
|
||||
sync_server.get_active_site_type(
|
||||
project_name, local_settings
|
||||
)
|
||||
)
|
||||
site_name = project_cache.data
|
||||
|
||||
local_project_settings = local_settings.get("projects")
|
||||
if not local_project_settings:
|
||||
return
|
||||
|
||||
project_locals = local_project_settings.get(project_name) or {}
|
||||
default_locals = local_project_settings.get(DEFAULT_PROJECT_KEY) or {}
|
||||
active_site = (
|
||||
project_locals.get("active_site")
|
||||
or default_locals.get("active_site")
|
||||
)
|
||||
if not active_site:
|
||||
project_settings = get_project_settings(project_name)
|
||||
active_site = (
|
||||
project_settings
|
||||
["global"]
|
||||
["sync_server"]
|
||||
["config"]
|
||||
["active_site"]
|
||||
)
|
||||
|
||||
cls._site_cache[project_name] = {
|
||||
"site_name": active_site,
|
||||
"start": time.time()
|
||||
}
|
||||
return active_site
|
||||
site_cache = cls._root_overrides_cache[project_name][site_name]
|
||||
if site_cache.is_outdated:
|
||||
if site_name == "studio":
|
||||
# Handle studio root overrides without sync server
|
||||
# - studio root overrides can be done even without sync server
|
||||
roots_overrides = cls._get_studio_roots_overrides(
|
||||
project_name, local_settings
|
||||
)
|
||||
else:
|
||||
# Ask sync server to get roots overrides
|
||||
roots_overrides = sync_server.get_site_root_overrides(
|
||||
project_name, site_name, local_settings
|
||||
)
|
||||
site_cache.update_data(roots_overrides)
|
||||
return site_cache.data
|
||||
|
||||
|
||||
class AnatomyTemplateUnsolved(TemplateUnsolved):
|
||||
|
|
|
|||
|
|
@ -21,16 +21,15 @@ class LoaderPlugin(list):
|
|||
|
||||
Arguments:
|
||||
context (dict): avalon-core:context-1.0
|
||||
name (str, optional): Use pre-defined name
|
||||
namespace (str, optional): Use pre-defined namespace
|
||||
|
||||
.. versionadded:: 4.0
|
||||
This class was introduced
|
||||
|
||||
"""
|
||||
|
||||
families = list()
|
||||
representations = list()
|
||||
families = []
|
||||
representations = []
|
||||
extensions = {"*"}
|
||||
order = 0
|
||||
is_multiple_contexts_compatible = False
|
||||
enabled = True
|
||||
|
|
@ -82,20 +81,67 @@ class LoaderPlugin(list):
|
|||
print(" - setting `{}`: `{}`".format(option, value))
|
||||
setattr(cls, option, value)
|
||||
|
||||
@classmethod
|
||||
def has_valid_extension(cls, repre_doc):
|
||||
"""Has representation document valid extension for loader.
|
||||
|
||||
Args:
|
||||
repre_doc (dict[str, Any]): Representation document.
|
||||
|
||||
Returns:
|
||||
bool: Representation has valid extension
|
||||
"""
|
||||
|
||||
if "*" in cls.extensions:
|
||||
return True
|
||||
|
||||
# Get representation main file extension from 'context'
|
||||
repre_context = repre_doc.get("context") or {}
|
||||
ext = repre_context.get("ext")
|
||||
if not ext:
|
||||
# Legacy way how to get extensions
|
||||
path = repre_doc.get("data", {}).get("path")
|
||||
if not path:
|
||||
cls.log.info(
|
||||
"Representation doesn't have known source of extension"
|
||||
" information."
|
||||
)
|
||||
return False
|
||||
|
||||
cls.log.debug("Using legacy source of extension from path.")
|
||||
ext = os.path.splitext(path)[-1].lstrip(".")
|
||||
|
||||
# If representation does not have extension then can't be valid
|
||||
if not ext:
|
||||
return False
|
||||
|
||||
valid_extensions_low = {ext.lower() for ext in cls.extensions}
|
||||
return ext.lower() in valid_extensions_low
|
||||
|
||||
@classmethod
|
||||
def is_compatible_loader(cls, context):
|
||||
"""Return whether a loader is compatible with a context.
|
||||
|
||||
On override make sure it is overriden as class or static method.
|
||||
|
||||
This checks the version's families and the representation for the given
|
||||
Loader.
|
||||
loader plugin.
|
||||
|
||||
Args:
|
||||
context (dict[str, Any]): Documents of context for which should
|
||||
be loader used.
|
||||
|
||||
Returns:
|
||||
bool
|
||||
bool: Is loader compatible for context.
|
||||
"""
|
||||
|
||||
plugin_repre_names = cls.get_representations()
|
||||
plugin_families = cls.families
|
||||
if not plugin_repre_names or not plugin_families:
|
||||
if (
|
||||
not plugin_repre_names
|
||||
or not plugin_families
|
||||
or not cls.extensions
|
||||
):
|
||||
return False
|
||||
|
||||
repre_doc = context.get("representation")
|
||||
|
|
@ -109,17 +155,27 @@ class LoaderPlugin(list):
|
|||
):
|
||||
return False
|
||||
|
||||
maj_version, _ = schema.get_schema_version(context["subset"]["schema"])
|
||||
if maj_version < 3:
|
||||
families = context["version"]["data"].get("families", [])
|
||||
else:
|
||||
families = context["subset"]["data"]["families"]
|
||||
if not cls.has_valid_extension(repre_doc):
|
||||
return False
|
||||
|
||||
plugin_families = set(plugin_families)
|
||||
return (
|
||||
"*" in plugin_families
|
||||
or any(family in plugin_families for family in families)
|
||||
)
|
||||
if "*" in plugin_families:
|
||||
return True
|
||||
|
||||
subset_doc = context["subset"]
|
||||
maj_version, _ = schema.get_schema_version(subset_doc["schema"])
|
||||
if maj_version < 3:
|
||||
families = context["version"]["data"].get("families")
|
||||
else:
|
||||
families = subset_doc["data"].get("families")
|
||||
if families is None:
|
||||
family = subset_doc["data"].get("family")
|
||||
if family:
|
||||
families = [family]
|
||||
|
||||
if not families:
|
||||
return False
|
||||
return any(family in plugin_families for family in families)
|
||||
|
||||
@classmethod
|
||||
def get_representations(cls):
|
||||
|
|
|
|||
|
|
@ -129,11 +129,14 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
colorspace_data.get("display"))
|
||||
|
||||
# both could be already collected by DCC,
|
||||
# but could be overwritten
|
||||
# but could be overwritten when transcoding
|
||||
if view:
|
||||
new_repre["colorspaceData"]["view"] = view
|
||||
if display:
|
||||
new_repre["colorspaceData"]["display"] = display
|
||||
if target_colorspace:
|
||||
new_repre["colorspaceData"]["colorspace"] = \
|
||||
target_colorspace
|
||||
|
||||
additional_command_args = (output_def["oiiotool_args"]
|
||||
["additional_command_args"])
|
||||
|
|
|
|||
|
|
@ -114,17 +114,6 @@
|
|||
"render",
|
||||
"review"
|
||||
],
|
||||
"representations": [
|
||||
"exr",
|
||||
"dpx",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"png",
|
||||
"h264",
|
||||
"mov",
|
||||
"mp4",
|
||||
"exr16fpdwaa"
|
||||
],
|
||||
"reel_group_name": "OpenPype_Reels",
|
||||
"reel_name": "Loaded",
|
||||
"clip_name_template": "{asset}_{subset}<_{output}>",
|
||||
|
|
@ -143,17 +132,6 @@
|
|||
"render",
|
||||
"review"
|
||||
],
|
||||
"representations": [
|
||||
"exr",
|
||||
"dpx",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"png",
|
||||
"h264",
|
||||
"mov",
|
||||
"mp4",
|
||||
"exr16fpdwaa"
|
||||
],
|
||||
"reel_name": "OP_LoadedReel",
|
||||
"clip_name_template": "{batch}_{asset}_{subset}<_{output}>",
|
||||
"layer_rename_template": "{asset}_{subset}<_{output}>",
|
||||
|
|
|
|||
|
|
@ -60,16 +60,6 @@
|
|||
"render",
|
||||
"review"
|
||||
],
|
||||
"representations": [
|
||||
"exr",
|
||||
"dpx",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"png",
|
||||
"h264",
|
||||
"mov",
|
||||
"mp4"
|
||||
],
|
||||
"clip_name_template": "{asset}_{subset}_{representation}"
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -179,6 +179,13 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateReview": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
],
|
||||
"useMayaTimeline": true
|
||||
},
|
||||
"CreateAss": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -199,6 +206,14 @@
|
|||
"maskColor_manager": false,
|
||||
"maskOperator": false
|
||||
},
|
||||
"CreateVrayProxy": {
|
||||
"enabled": true,
|
||||
"vrmesh": true,
|
||||
"alembic": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateMultiverseUsd": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -247,12 +262,6 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateReview": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateRig": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -268,12 +277,6 @@
|
|||
"Anim"
|
||||
]
|
||||
},
|
||||
"CreateVrayProxy": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateVRayScene": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -676,7 +679,7 @@
|
|||
"families": [
|
||||
"pointcache",
|
||||
"model",
|
||||
"vrayproxy"
|
||||
"vrayproxy.alembic"
|
||||
]
|
||||
},
|
||||
"ExtractObj": {
|
||||
|
|
|
|||
|
|
@ -494,12 +494,6 @@
|
|||
"label": "Families",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "representations",
|
||||
"label": "Representations",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
|
|
@ -552,12 +546,6 @@
|
|||
"label": "Families",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "representations",
|
||||
"label": "Representations",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -266,12 +266,6 @@
|
|||
"label": "Families",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "representations",
|
||||
"label": "Representations",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "clip_name_template",
|
||||
|
|
@ -334,4 +328,4 @@
|
|||
"name": "schema_scriptsmenu"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -240,6 +240,31 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateReview",
|
||||
"label": "Create Review",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "useMayaTimeline",
|
||||
"label": "Use Maya Timeline for Frame Range."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -332,6 +357,36 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateVrayProxy",
|
||||
"label": "Create VRay Proxy",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "vrmesh",
|
||||
"label": "VrMesh"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "alembic",
|
||||
"label": "Alembic"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_create_plugin",
|
||||
|
|
@ -368,10 +423,6 @@
|
|||
"key": "CreateRenderSetup",
|
||||
"label": "Create Render Setup"
|
||||
},
|
||||
{
|
||||
"key": "CreateReview",
|
||||
"label": "Create Review"
|
||||
},
|
||||
{
|
||||
"key": "CreateRig",
|
||||
"label": "Create Rig"
|
||||
|
|
@ -380,10 +431,6 @@
|
|||
"key": "CreateSetDress",
|
||||
"label": "Create Set Dress"
|
||||
},
|
||||
{
|
||||
"key": "CreateVrayProxy",
|
||||
"label": "Create VRay Proxy"
|
||||
},
|
||||
{
|
||||
"key": "CreateVRayScene",
|
||||
"label": "Create VRay Scene"
|
||||
|
|
|
|||
|
|
@ -339,7 +339,7 @@ class SubsetWidget(QtWidgets.QWidget):
|
|||
repre_docs = get_representations(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
fields=["name", "parent"]
|
||||
fields=["name", "parent", "data", "context"]
|
||||
)
|
||||
|
||||
repre_docs_by_version_id = {
|
||||
|
|
@ -1264,7 +1264,7 @@ class RepresentationWidget(QtWidgets.QWidget):
|
|||
repre_docs = list(get_representations(
|
||||
project_name,
|
||||
representation_ids=repre_ids,
|
||||
fields=["name", "parent"]
|
||||
fields=["name", "parent", "data", "context"]
|
||||
))
|
||||
|
||||
version_ids = [
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from openpype.client import (
|
|||
)
|
||||
from openpype.lib.events import EventSystem
|
||||
from openpype.lib.attribute_definitions import (
|
||||
UIDef,
|
||||
serialize_attr_defs,
|
||||
deserialize_attr_defs,
|
||||
)
|
||||
|
|
@ -1938,6 +1939,8 @@ class PublisherController(BasePublisherController):
|
|||
plugin_values = all_plugin_values[plugin_name]
|
||||
|
||||
for attr_def in attr_defs:
|
||||
if isinstance(attr_def, UIDef):
|
||||
continue
|
||||
if attr_def.key not in plugin_values:
|
||||
plugin_values[attr_def.key] = []
|
||||
attr_values = plugin_values[attr_def.key]
|
||||
|
|
|
|||
|
|
@ -146,6 +146,7 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
self._subset_list_view = subset_list_view
|
||||
self._subset_views_layout = subset_views_layout
|
||||
|
||||
self._create_btn = create_btn
|
||||
self._delete_btn = delete_btn
|
||||
|
||||
self._subset_attributes_widget = subset_attributes_widget
|
||||
|
|
@ -388,11 +389,13 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
def _on_publish_start(self):
|
||||
"""Publish started."""
|
||||
|
||||
self._create_btn.setEnabled(False)
|
||||
self._subset_attributes_wrap.setEnabled(False)
|
||||
|
||||
def _on_publish_reset(self):
|
||||
"""Context in controller has been refreshed."""
|
||||
|
||||
self._create_btn.setEnabled(True)
|
||||
self._subset_attributes_wrap.setEnabled(True)
|
||||
self._subset_content_widget.setEnabled(self._controller.host_is_valid)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import collections
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
import qtawesome
|
||||
|
||||
from openpype.lib.attribute_definitions import UnknownDef
|
||||
from openpype.lib.attribute_definitions import UnknownDef, UIDef
|
||||
from openpype.tools.attribute_defs import create_widget_for_attr_def
|
||||
from openpype.tools import resources
|
||||
from openpype.tools.flickcharm import FlickCharm
|
||||
|
|
@ -1442,7 +1442,16 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget):
|
|||
)
|
||||
|
||||
content_widget = QtWidgets.QWidget(self._scroll_area)
|
||||
content_layout = QtWidgets.QFormLayout(content_widget)
|
||||
attr_def_widget = QtWidgets.QWidget(content_widget)
|
||||
attr_def_layout = QtWidgets.QGridLayout(attr_def_widget)
|
||||
attr_def_layout.setColumnStretch(0, 0)
|
||||
attr_def_layout.setColumnStretch(1, 1)
|
||||
|
||||
content_layout = QtWidgets.QVBoxLayout(content_widget)
|
||||
content_layout.addWidget(attr_def_widget, 0)
|
||||
content_layout.addStretch(1)
|
||||
|
||||
row = 0
|
||||
for plugin_name, attr_defs, all_plugin_values in result:
|
||||
plugin_values = all_plugin_values[plugin_name]
|
||||
|
||||
|
|
@ -1459,8 +1468,29 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget):
|
|||
hidden_widget = True
|
||||
|
||||
if not hidden_widget:
|
||||
expand_cols = 2
|
||||
if attr_def.is_value_def and attr_def.is_label_horizontal:
|
||||
expand_cols = 1
|
||||
|
||||
col_num = 2 - expand_cols
|
||||
label = attr_def.label or attr_def.key
|
||||
content_layout.addRow(label, widget)
|
||||
if label:
|
||||
label_widget = QtWidgets.QLabel(label, content_widget)
|
||||
tooltip = attr_def.tooltip
|
||||
if tooltip:
|
||||
label_widget.setToolTip(tooltip)
|
||||
attr_def_layout.addWidget(
|
||||
label_widget, row, 0, 1, expand_cols
|
||||
)
|
||||
if not attr_def.is_label_horizontal:
|
||||
row += 1
|
||||
attr_def_layout.addWidget(
|
||||
widget, row, col_num, 1, expand_cols
|
||||
)
|
||||
row += 1
|
||||
|
||||
if isinstance(attr_def, UIDef):
|
||||
continue
|
||||
|
||||
widget.value_changed.connect(self._input_value_changed)
|
||||
|
||||
|
|
|
|||
|
|
@ -561,7 +561,8 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
return self._tabs_widget.is_current_tab(identifier)
|
||||
|
||||
def _go_to_create_tab(self):
|
||||
self._set_current_tab("create")
|
||||
if self._create_tab.isEnabled():
|
||||
self._set_current_tab("create")
|
||||
|
||||
def _go_to_publish_tab(self):
|
||||
self._set_current_tab("publish")
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.2-nightly.2"
|
||||
__version__ = "3.15.2-nightly.3"
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ function Install-Poetry() {
|
|||
}
|
||||
|
||||
$env:POETRY_HOME="$openpype_root\.poetry"
|
||||
# $env:POETRY_VERSION="1.1.15"
|
||||
$env:POETRY_VERSION="1.3.2"
|
||||
(Invoke-WebRequest -Uri https://install.python-poetry.org/ -UseBasicParsing).Content | & $($python) -
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ detect_python () {
|
|||
install_poetry () {
|
||||
echo -e "${BIGreen}>>>${RST} Installing Poetry ..."
|
||||
export POETRY_HOME="$openpype_root/.poetry"
|
||||
# export POETRY_VERSION="1.1.15"
|
||||
export POETRY_VERSION="1.3.2"
|
||||
command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; }
|
||||
curl -sSL https://install.python-poetry.org/ | python -
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,16 +28,16 @@ For [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) support you ne
|
|||
OpenPype integration for Deadline consists of two parts:
|
||||
|
||||
- The `OpenPype` Deadline Plug-in
|
||||
- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job)
|
||||
- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job)
|
||||
|
||||
The `GlobalJobPreLoad` handles populating render and publish jobs with proper environment variables using settings from the `OpenPype` Deadline Plug-in.
|
||||
|
||||
The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to
|
||||
The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to
|
||||
destinations accessible by DL process. Check permissions (must be executable and accessible by Deadline process)
|
||||
|
||||
- Enable `Tools > Super User Mode` in Deadline Monitor
|
||||
|
||||
- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype
|
||||
- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype
|
||||
executable. It is recommended to use the `openpype_console` executable as it provides a bit more logging.
|
||||
|
||||
- In case of multi OS farms, provide multiple locations, each Deadline Worker goes through the list and tries to find the first accessible
|
||||
|
|
@ -45,12 +45,22 @@ executable. It is recommended to use the `openpype_console` executable as it pro
|
|||
|
||||

|
||||
|
||||
### Pools
|
||||
|
||||
The main pools can be configured at `project_settings/deadline/publish/CollectDeadlinePools/primary_pool`, which is applied to the rendering jobs.
|
||||
|
||||
The dependent publishing job's pool uses `project_settings/deadline/publish/ProcessSubmittedJobOnFarm/deadline_pool`. If nothing is specified the pool will fallback to the primary pool above.
|
||||
|
||||
:::note maya tile rendering
|
||||
The logic for publishing job pool assignment applies to tiling jobs.
|
||||
:::
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
#### Publishing jobs fail directly in DCCs
|
||||
|
||||
- Double check that all previously described steps were finished
|
||||
- Check that `deadlinewebservice` is running on DL server
|
||||
- Check that `deadlinewebservice` is running on DL server
|
||||
- Check that user's machine has access to deadline server on configured port
|
||||
|
||||
#### Jobs are failing on DL side
|
||||
|
|
@ -61,40 +71,40 @@ Each publishing from OpenPype consists of 2 jobs, first one is rendering, second
|
|||
|
||||
- Jobs are failing with `OpenPype executable was not found` error
|
||||
|
||||
Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration)
|
||||
Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration)
|
||||
|
||||
|
||||
- Publishing job is failing with `ffmpeg not installed` error
|
||||
|
||||
|
||||
OpenPype executable has to have access to `ffmpeg` executable, check OpenPype `Setting > General`
|
||||
|
||||

|
||||
|
||||
- Both jobs finished successfully, but there is no review on Ftrack
|
||||
|
||||
Make sure that you correctly set published family to be send to Ftrack.
|
||||
Make sure that you correctly set published family to be send to Ftrack.
|
||||
|
||||

|
||||
|
||||
Example: I want send to Ftrack review of rendered images from Harmony :
|
||||
- `Host names`: "harmony"
|
||||
- `Families`: "render"
|
||||
- `Families`: "render"
|
||||
- `Add Ftrack Family` to "Enabled"
|
||||
|
||||
|
||||
Make sure that you actually configured to create review for published subset in `project_settings/ftrack/publish/CollectFtrackFamily`
|
||||
|
||||

|
||||
|
||||
Example: I want to create review for all reviewable subsets in Harmony :
|
||||
Example: I want to create review for all reviewable subsets in Harmony :
|
||||
- Add "harmony" as a new key an ".*" as a value.
|
||||
|
||||
|
||||
- Rendering jobs are stuck in 'Queued' state or failing
|
||||
|
||||
Make sure that your Deadline is not limiting specific jobs to be run only on specific machines. (Eg. only some machines have installed particular application.)
|
||||
|
||||
|
||||
Check `project_settings/deadline`
|
||||
|
||||
|
||||

|
||||
|
||||
Example: I have separated machines with "Harmony" installed into "harmony" group on Deadline. I want rendering jobs published from Harmony to run only on those machines.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue