mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
54123c0ae1
39 changed files with 985 additions and 480 deletions
|
|
@ -120,13 +120,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
|
|||
track = sitem.parentTrack().name()
|
||||
# node serialization
|
||||
node = sitem.node()
|
||||
node_serialized = self.node_serialisation(node)
|
||||
node_serialized = self.node_serialization(node)
|
||||
node_name = sitem.name()
|
||||
|
||||
if "_" in node_name:
|
||||
node_class = re.sub(r"(?:_)[_0-9]+", "", node_name) # more numbers
|
||||
else:
|
||||
node_class = re.sub(r"\d+", "", node_name) # one number
|
||||
node_class = node.Class()
|
||||
|
||||
# collect timelineIn/Out
|
||||
effect_t_in = int(sitem.timelineIn())
|
||||
|
|
@ -148,7 +144,7 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
|
|||
"node": node_serialized
|
||||
}}
|
||||
|
||||
def node_serialisation(self, node):
|
||||
def node_serialization(self, node):
|
||||
node_serialized = {}
|
||||
|
||||
# adding ignoring knob keys
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import os
|
|||
import sys
|
||||
import platform
|
||||
import uuid
|
||||
import math
|
||||
import re
|
||||
|
||||
import json
|
||||
|
|
@ -2064,13 +2063,8 @@ def set_scene_resolution(width, height, pixelAspect):
|
|||
cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect)
|
||||
|
||||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
|
||||
fps = convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
set_scene_fps(fps)
|
||||
def get_frame_range():
|
||||
"""Get the current assets frame range and handles."""
|
||||
|
||||
# Set frame start/end
|
||||
project_name = legacy_io.active_project()
|
||||
|
|
@ -2097,8 +2091,26 @@ def reset_frame_range():
|
|||
if handle_end is None:
|
||||
handle_end = handles
|
||||
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
return {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end
|
||||
}
|
||||
|
||||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
|
||||
fps = convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
set_scene_fps(fps)
|
||||
|
||||
frame_range = get_frame_range()
|
||||
|
||||
frame_start = frame_range["frameStart"] - int(frame_range["handleStart"])
|
||||
frame_end = frame_range["frameEnd"] + int(frame_range["handleEnd"])
|
||||
|
||||
cmds.playbackOptions(minTime=frame_start)
|
||||
cmds.playbackOptions(maxTime=frame_end)
|
||||
|
|
@ -3562,3 +3574,34 @@ def get_color_management_output_transform():
|
|||
if preferences["output_transform_enabled"]:
|
||||
colorspace = preferences["output_transform"]
|
||||
return colorspace
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
|
|
|
|||
|
|
@ -25,16 +25,20 @@ class CreateReview(plugin.Creator):
|
|||
"depth peeling",
|
||||
"alpha cut"
|
||||
]
|
||||
useMayaTimeline = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
||||
# get basic animation data : start / end / handles / steps
|
||||
data = OrderedDict(**self.data)
|
||||
animation_data = lib.collect_animation_data(fps=True)
|
||||
for key, value in animation_data.items():
|
||||
|
||||
# Option for using Maya or asset frame range in settings.
|
||||
frame_range = lib.get_frame_range()
|
||||
if self.useMayaTimeline:
|
||||
frame_range = lib.collect_animation_data(fps=True)
|
||||
for key, value in frame_range.items():
|
||||
data[key] = value
|
||||
|
||||
data["fps"] = lib.collect_animation_data(fps=True)["fps"]
|
||||
data["review_width"] = self.Width
|
||||
data["review_height"] = self.Height
|
||||
data["isolate"] = self.isolate
|
||||
|
|
|
|||
|
|
@ -9,6 +9,9 @@ class CreateVrayProxy(plugin.Creator):
|
|||
family = "vrayproxy"
|
||||
icon = "gears"
|
||||
|
||||
vrmesh = True
|
||||
alembic = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateVrayProxy, self).__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -18,3 +21,6 @@ class CreateVrayProxy(plugin.Creator):
|
|||
|
||||
# Write vertex colors
|
||||
self.data["vertexColors"] = False
|
||||
|
||||
self.data["vrmesh"] = self.vrmesh
|
||||
self.data["alembic"] = self.alembic
|
||||
|
|
|
|||
|
|
@ -137,6 +137,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# Create the instance
|
||||
instance = context.create_instance(objset)
|
||||
instance[:] = members_hierarchy
|
||||
instance.data["objset"] = objset
|
||||
|
||||
# Store the exact members of the object set
|
||||
instance.data["setMembers"] = members
|
||||
|
|
|
|||
|
|
@ -9,10 +9,16 @@ class CollectVrayProxy(pyblish.api.InstancePlugin):
|
|||
Add `pointcache` family for it.
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = 'Collect Vray Proxy'
|
||||
label = "Collect Vray Proxy"
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collector entry point."""
|
||||
if not instance.data.get('families'):
|
||||
instance.data["families"] = []
|
||||
|
||||
if instance.data.get("vrmesh"):
|
||||
instance.data["families"].append("vrayproxy.vrmesh")
|
||||
|
||||
if instance.data.get("alembic"):
|
||||
instance.data["families"].append("vrayproxy.alembic")
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["pointcache", "model", "vrayproxy"]
|
||||
families = ["pointcache", "model", "vrayproxy.alembic"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class ExtractVRayProxy(publish.Extractor):
|
|||
|
||||
label = "VRay Proxy (.vrmesh)"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayproxy"]
|
||||
families = ["vrayproxy.vrmesh"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -57,6 +57,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
|
||||
inst_start = int(instance.data.get("frameStartHandle"))
|
||||
inst_end = int(instance.data.get("frameEndHandle"))
|
||||
inst_frame_start = int(instance.data.get("frameStart"))
|
||||
inst_frame_end = int(instance.data.get("frameEnd"))
|
||||
inst_handle_start = int(instance.data.get("handleStart"))
|
||||
inst_handle_end = int(instance.data.get("handleEnd"))
|
||||
|
||||
# basic sanity checks
|
||||
assert frame_start_handle <= frame_end_handle, (
|
||||
|
|
@ -69,24 +73,37 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
if(inst_start != frame_start_handle):
|
||||
if (inst_start != frame_start_handle):
|
||||
errors.append("Instance start frame [ {} ] doesn't "
|
||||
"match the one set on instance [ {} ]: "
|
||||
"match the one set on asset [ {} ]: "
|
||||
"{}/{}/{}/{} (handle/start/end/handle)".format(
|
||||
inst_start,
|
||||
frame_start_handle,
|
||||
handle_start, frame_start, frame_end, handle_end
|
||||
))
|
||||
|
||||
if(inst_end != frame_end_handle):
|
||||
if (inst_end != frame_end_handle):
|
||||
errors.append("Instance end frame [ {} ] doesn't "
|
||||
"match the one set on instance [ {} ]: "
|
||||
"match the one set on asset [ {} ]: "
|
||||
"{}/{}/{}/{} (handle/start/end/handle)".format(
|
||||
inst_end,
|
||||
frame_end_handle,
|
||||
handle_start, frame_start, frame_end, handle_end
|
||||
))
|
||||
|
||||
checks = {
|
||||
"frame start": (frame_start, inst_frame_start),
|
||||
"frame end": (frame_end, inst_frame_end),
|
||||
"handle start": (handle_start, inst_handle_start),
|
||||
"handle end": (handle_end, inst_handle_end)
|
||||
}
|
||||
for label, values in checks.items():
|
||||
if values[0] != values[1]:
|
||||
errors.append(
|
||||
"{} on instance ({}) does not match with the asset "
|
||||
"({}).".format(label.title(), values[1], values[0])
|
||||
)
|
||||
|
||||
for e in errors:
|
||||
self.log.error(e)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError, RepairAction
|
||||
)
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
from openpype.hosts.maya.api.lib import imprint
|
||||
|
||||
|
||||
class ValidateInstanceAttributes(pyblish.api.InstancePlugin):
|
||||
"""Validate Instance Attributes.
|
||||
|
||||
New attributes can be introduced as new features come in. Old instances
|
||||
will need to be updated with these attributes for the documentation to make
|
||||
sense, and users do not have to recreate the instances.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["*"]
|
||||
label = "Instance Attributes"
|
||||
plugins_by_family = {
|
||||
p.family: p for p in discover_legacy_creator_plugins()
|
||||
}
|
||||
actions = [RepairAction]
|
||||
|
||||
@classmethod
|
||||
def get_missing_attributes(self, instance):
|
||||
plugin = self.plugins_by_family[instance.data["family"]]
|
||||
subset = instance.data["subset"]
|
||||
asset = instance.data["asset"]
|
||||
objset = instance.data["objset"]
|
||||
|
||||
missing_attributes = {}
|
||||
for key, value in plugin(subset, asset).data.items():
|
||||
if not cmds.objExists("{}.{}".format(objset, key)):
|
||||
missing_attributes[key] = value
|
||||
|
||||
return missing_attributes
|
||||
|
||||
def process(self, instance):
|
||||
objset = instance.data.get("objset")
|
||||
if objset is None:
|
||||
self.log.debug(
|
||||
"Skipping {} because no objectset found.".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
missing_attributes = self.get_missing_attributes(instance)
|
||||
if missing_attributes:
|
||||
raise PublishValidationError(
|
||||
"Missing attributes on {}:\n{}".format(
|
||||
objset, missing_attributes
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
imprint(instance.data["objset"], cls.get_missing_attributes(instance))
|
||||
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder
|
||||
)
|
||||
|
||||
|
||||
class ValidateMeshEmpty(pyblish.api.InstancePlugin):
|
||||
"""Validate meshes have some vertices.
|
||||
|
||||
Its possible to have meshes without any vertices. To replicate
|
||||
this issue, delete all faces/polygons then all edges.
|
||||
"""
|
||||
|
||||
order = ValidateMeshOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Mesh Empty"
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid = cls.get_invalid(instance)
|
||||
for node in invalid:
|
||||
cmds.delete(node)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
invalid = []
|
||||
|
||||
meshes = cmds.ls(instance, type="mesh", long=True)
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"\"{}\" does not have any vertices.".format(mesh)
|
||||
)
|
||||
invalid.append(mesh)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Meshes found in instance without any vertices: %s" % invalid
|
||||
)
|
||||
|
|
@ -1,39 +1,9 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import ValidateMeshOrder
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
for c in components:
|
||||
match = re.search("\[([0-9]+):([0-9]+)\]", c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
||||
|
|
@ -57,6 +27,15 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
|||
invalid = []
|
||||
|
||||
for node in cmds.ls(instance, type='mesh'):
|
||||
num_vertices = cmds.polyEvaluate(node, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(node)
|
||||
)
|
||||
continue
|
||||
|
||||
uv = cmds.polyEvaluate(node, uv=True)
|
||||
|
||||
if uv == 0:
|
||||
|
|
|
|||
|
|
@ -28,7 +28,10 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Return the invalid edges.
|
||||
Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
Also see:
|
||||
|
||||
http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -36,8 +39,21 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
if not meshes:
|
||||
return list()
|
||||
|
||||
valid_meshes = []
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
valid_meshes.append(mesh)
|
||||
|
||||
# Get all edges
|
||||
edges = ['{0}.e[*]'.format(node) for node in meshes]
|
||||
edges = ['{0}.e[*]'.format(node) for node in valid_meshes]
|
||||
|
||||
# Filter by constraint on edge length
|
||||
invalid = lib.polyConstraint(edges,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -8,37 +6,7 @@ from openpype.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
)
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
||||
|
|
@ -87,6 +55,13 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
|||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
# Vertices from all edges
|
||||
edges = "%s.e[*]" % mesh
|
||||
vertices = cmds.polyListComponentConversion(edges, toVertex=True)
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@
|
|||
"""Validate model nodes names."""
|
||||
import os
|
||||
import re
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
import platform
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
import openpype.hosts.maya.api.action
|
||||
|
|
@ -44,7 +46,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
if not cmds.ls(child, transforms=True):
|
||||
return False
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
invalid = []
|
||||
|
|
@ -94,9 +96,10 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
# load shader list file as utf-8
|
||||
shaders = []
|
||||
if not use_db:
|
||||
if cls.material_file:
|
||||
if os.path.isfile(cls.material_file):
|
||||
shader_file = open(cls.material_file, "r")
|
||||
material_file = cls.material_file[platform.system().lower()]
|
||||
if material_file:
|
||||
if os.path.isfile(material_file):
|
||||
shader_file = open(material_file, "r")
|
||||
shaders = shader_file.readlines()
|
||||
shader_file.close()
|
||||
else:
|
||||
|
|
@ -113,7 +116,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
shader_file.close()
|
||||
|
||||
# strip line endings from list
|
||||
shaders = map(lambda s: s.rstrip(), shaders)
|
||||
shaders = [s.rstrip() for s in shaders if s.rstrip()]
|
||||
|
||||
# compile regex for testing names
|
||||
regex = cls.regex
|
||||
|
|
|
|||
|
|
@ -1,27 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class ValidateVrayProxy(pyblish.api.InstancePlugin):
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'VRay Proxy Settings'
|
||||
hosts = ['maya']
|
||||
families = ['studio.vrayproxy']
|
||||
label = "VRay Proxy Settings"
|
||||
hosts = ["maya"]
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("'%s' has invalid settings for VRay Proxy "
|
||||
"export!" % instance.name)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
data = instance.data
|
||||
|
||||
if not data["setMembers"]:
|
||||
cls.log.error("'%s' is empty! This is a bug" % instance.name)
|
||||
raise KnownPublishError(
|
||||
"'%s' is empty! This is a bug" % instance.name
|
||||
)
|
||||
|
||||
if data["animation"]:
|
||||
if data["frameEnd"] < data["frameStart"]:
|
||||
cls.log.error("End frame is smaller than start frame")
|
||||
raise KnownPublishError(
|
||||
"End frame is smaller than start frame"
|
||||
)
|
||||
|
||||
if not data["vrmesh"] and not data["alembic"]:
|
||||
raise KnownPublishError(
|
||||
"Both vrmesh and alembic are off. Needs at least one to"
|
||||
" publish."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1264,7 +1264,7 @@ def convert_to_valid_instaces():
|
|||
creator_attr["farm_chunk"] = (
|
||||
node["deadlineChunkSize"].value())
|
||||
if "deadlineConcurrentTasks" in node.knobs():
|
||||
creator_attr["farm_concurency"] = (
|
||||
creator_attr["farm_concurrency"] = (
|
||||
node["deadlineConcurrentTasks"].value())
|
||||
|
||||
_remove_old_knobs(node)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from openpype.pipeline import (
|
|||
CreatedInstance
|
||||
)
|
||||
from openpype.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef
|
||||
BoolDef
|
||||
)
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
||||
|
|
@ -49,33 +46,6 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
self._get_render_target_enum(),
|
||||
self._get_reviewable_bool()
|
||||
]
|
||||
if "farm_rendering" in self.instance_attributes:
|
||||
attr_defs.extend([
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Farm rendering attributes"),
|
||||
BoolDef("suspended_publish", label="Suspended publishing"),
|
||||
NumberDef(
|
||||
"farm_priority",
|
||||
label="Priority",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=50
|
||||
),
|
||||
NumberDef(
|
||||
"farm_chunk",
|
||||
label="Chunk size",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=10
|
||||
),
|
||||
NumberDef(
|
||||
"farm_concurency",
|
||||
label="Concurent tasks",
|
||||
minimum=1,
|
||||
maximum=10,
|
||||
default=1
|
||||
)
|
||||
])
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from openpype.pipeline import (
|
|||
CreatedInstance
|
||||
)
|
||||
from openpype.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef
|
||||
BoolDef
|
||||
)
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
||||
|
|
@ -46,33 +43,6 @@ class CreateWriteRender(napi.NukeWriteCreator):
|
|||
self._get_render_target_enum(),
|
||||
self._get_reviewable_bool()
|
||||
]
|
||||
if "farm_rendering" in self.instance_attributes:
|
||||
attr_defs.extend([
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Farm rendering attributes"),
|
||||
BoolDef("suspended_publish", label="Suspended publishing"),
|
||||
NumberDef(
|
||||
"farm_priority",
|
||||
label="Priority",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=50
|
||||
),
|
||||
NumberDef(
|
||||
"farm_chunk",
|
||||
label="Chunk size",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=10
|
||||
),
|
||||
NumberDef(
|
||||
"farm_concurency",
|
||||
label="Concurent tasks",
|
||||
minimum=1,
|
||||
maximum=10,
|
||||
default=1
|
||||
)
|
||||
])
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
|
|
|
|||
|
|
@ -132,14 +132,14 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
self.log.info("Publishing rendered frames ...")
|
||||
|
||||
elif render_target == "farm":
|
||||
farm_priority = creator_attributes.get("farm_priority")
|
||||
farm_chunk = creator_attributes.get("farm_chunk")
|
||||
farm_concurency = creator_attributes.get("farm_concurency")
|
||||
instance.data.update({
|
||||
"deadlineChunkSize": farm_chunk or 1,
|
||||
"deadlinePriority": farm_priority or 50,
|
||||
"deadlineConcurrentTasks": farm_concurency or 0
|
||||
})
|
||||
farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"]
|
||||
for key in farm_keys:
|
||||
# Skip if key is not in creator attributes
|
||||
if key not in creator_attributes:
|
||||
continue
|
||||
# Add farm attributes to instance
|
||||
instance.data[key] = creator_attributes[key]
|
||||
|
||||
# Farm rendering
|
||||
instance.data["transfer"] = False
|
||||
instance.data["farm"] = True
|
||||
|
|
|
|||
|
|
@ -419,8 +419,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
assembly_job_info.Name += " - Tile Assembly Job"
|
||||
assembly_job_info.Frames = 1
|
||||
assembly_job_info.MachineLimit = 1
|
||||
assembly_job_info.Priority = instance.data.get("tile_priority",
|
||||
self.tile_priority)
|
||||
assembly_job_info.Priority = instance.data.get(
|
||||
"tile_priority", self.tile_priority
|
||||
)
|
||||
|
||||
pool = instance.context.data["project_settings"]["deadline"]
|
||||
pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"]
|
||||
assembly_job_info.Pool = pool or instance.data.get("primaryPool", "")
|
||||
|
||||
assembly_plugin_info = {
|
||||
"CleanupTiles": 1,
|
||||
|
|
|
|||
|
|
@ -9,11 +9,19 @@ import pyblish.api
|
|||
|
||||
import nuke
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import (
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.lib import is_running_from_build
|
||||
from openpype.lib import (
|
||||
is_running_from_build,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
|
||||
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Submit write to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
|
|
@ -21,10 +29,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
label = "Submit Nuke to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke", "nukestudio"]
|
||||
families = ["render.farm", "prerender.farm"]
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender.farm"]
|
||||
optional = True
|
||||
targets = ["local"]
|
||||
|
||||
|
|
@ -39,7 +47,42 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
env_allowed_keys = []
|
||||
env_search_replace_values = {}
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"use_gpu",
|
||||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
families = instance.data["families"]
|
||||
|
||||
|
|
@ -141,7 +184,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
responce_data=None
|
||||
response_data=None
|
||||
):
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
batch_name = os.path.basename(script_path)
|
||||
|
|
@ -152,8 +195,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not responce_data:
|
||||
responce_data = {}
|
||||
if not response_data:
|
||||
response_data = {}
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
|
|
@ -161,20 +204,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.data["deadlineChunkSize"]
|
||||
if chunk_size == 0 and self.chunk_size:
|
||||
chunk_size = self.chunk_size
|
||||
|
||||
# define chunk and priority
|
||||
concurrent_tasks = instance.data["deadlineConcurrentTasks"]
|
||||
if concurrent_tasks == 0 and self.concurrent_tasks:
|
||||
concurrent_tasks = self.concurrent_tasks
|
||||
|
||||
priority = instance.data["deadlinePriority"]
|
||||
if not priority:
|
||||
priority = self.priority
|
||||
|
||||
# resolve any limit groups
|
||||
limit_groups = self.get_limit_groups()
|
||||
self.log.info("Limit groups: `{}`".format(limit_groups))
|
||||
|
|
@ -193,9 +222,14 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Priority": priority,
|
||||
"ChunkSize": chunk_size,
|
||||
"ConcurrentTasks": concurrent_tasks,
|
||||
"Priority": instance.data["attributeValues"].get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": instance.data["attributeValues"].get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": instance.data["attributeValues"].get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
"Department": self.department,
|
||||
|
||||
|
|
@ -234,7 +268,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AWSAssetFile0": render_path,
|
||||
|
||||
# using GPU by default
|
||||
"UseGpu": self.use_gpu,
|
||||
"UseGpu": instance.data["attributeValues"].get(
|
||||
"use_gpu", self.use_gpu),
|
||||
|
||||
# Only the specific write node is rendered.
|
||||
"WriteNode": exe_node_name
|
||||
|
|
@ -244,11 +279,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
if responce_data.get("_id"):
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"BatchName": responce_data["Props"]["Batch"],
|
||||
"JobDependency0": responce_data["_id"],
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -284,6 +284,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
args.append("--automatic-tests")
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": self.deadline_plugin,
|
||||
|
|
@ -297,8 +300,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"Priority": priority,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
|
|
@ -588,7 +591,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
self.log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files, additional_data):
|
||||
def _get_representations(self, instance, exp_files):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
|
|
@ -933,20 +936,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info(data.get("expectedFiles"))
|
||||
|
||||
additional_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"],
|
||||
"colorspaceTemplate": instance.data["colorspaceConfig"].replace(
|
||||
str(context.data["anatomy"].roots["work"]), "{root[work]}"
|
||||
)
|
||||
}
|
||||
|
||||
if isinstance(data.get("expectedFiles")[0], dict):
|
||||
# we cannot attach AOVs to other subsets as we consider every
|
||||
# AOV subset of its own.
|
||||
|
||||
config = instance.data["colorspaceConfig"]
|
||||
additional_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"],
|
||||
"colorspaceTemplate": config.replace(
|
||||
str(context.data["anatomy"].roots["work"]), "{root[work]}"
|
||||
)
|
||||
}
|
||||
|
||||
if len(data.get("attachTo")) > 0:
|
||||
assert len(data.get("expectedFiles")[0].keys()) == 1, (
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
|
|
|
|||
|
|
@ -23,36 +23,37 @@ class ShowInKitsu(LauncherAction):
|
|||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
|
||||
# Context inputs
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session.get("AVALON_ASSET", None)
|
||||
task_name = session.get("AVALON_TASK", None)
|
||||
|
||||
project = get_project(project_name=project_name,
|
||||
fields=["data.zou_id"])
|
||||
project = get_project(
|
||||
project_name=project_name, fields=["data.zou_id"]
|
||||
)
|
||||
if not project:
|
||||
raise RuntimeError(f"Project {project_name} not found.")
|
||||
raise RuntimeError("Project {} not found.".format(project_name))
|
||||
|
||||
project_zou_id = project["data"].get("zou_id")
|
||||
if not project_zou_id:
|
||||
raise RuntimeError(f"Project {project_name} has no "
|
||||
f"connected kitsu id.")
|
||||
raise RuntimeError(
|
||||
"Project {} has no connected kitsu id.".format(project_name)
|
||||
)
|
||||
|
||||
asset_zou_name = None
|
||||
asset_zou_id = None
|
||||
asset_zou_type = 'Assets'
|
||||
asset_zou_type = "Assets"
|
||||
task_zou_id = None
|
||||
zou_sub_type = ['AssetType', 'Sequence']
|
||||
zou_sub_type = ["AssetType", "Sequence"]
|
||||
if asset_name:
|
||||
asset_zou_name = asset_name
|
||||
asset_fields = ["data.zou.id", "data.zou.type"]
|
||||
if task_name:
|
||||
asset_fields.append(f"data.tasks.{task_name}.zou.id")
|
||||
asset_fields.append("data.tasks.{}.zou.id".format(task_name))
|
||||
|
||||
asset = get_asset_by_name(project_name,
|
||||
asset_name=asset_name,
|
||||
fields=asset_fields)
|
||||
asset = get_asset_by_name(
|
||||
project_name, asset_name=asset_name, fields=asset_fields
|
||||
)
|
||||
|
||||
asset_zou_data = asset["data"].get("zou")
|
||||
|
||||
|
|
@ -67,40 +68,47 @@ class ShowInKitsu(LauncherAction):
|
|||
task_data = asset["data"]["tasks"][task_name]
|
||||
task_zou_data = task_data.get("zou", {})
|
||||
if not task_zou_data:
|
||||
self.log.debug(f"No zou task data for task: {task_name}")
|
||||
self.log.debug(
|
||||
"No zou task data for task: {}".format(task_name)
|
||||
)
|
||||
task_zou_id = task_zou_data["id"]
|
||||
|
||||
# Define URL
|
||||
url = self.get_url(project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id)
|
||||
url = self.get_url(
|
||||
project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id,
|
||||
)
|
||||
|
||||
# Open URL in webbrowser
|
||||
self.log.info(f"Opening URL: {url}")
|
||||
webbrowser.open(url,
|
||||
# Try in new tab
|
||||
new=2)
|
||||
self.log.info("Opening URL: {}".format(url))
|
||||
webbrowser.open(
|
||||
url,
|
||||
# Try in new tab
|
||||
new=2,
|
||||
)
|
||||
|
||||
def get_url(self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None):
|
||||
|
||||
shots_url = {'Shots', 'Sequence', 'Shot'}
|
||||
sub_type = {'AssetType', 'Sequence'}
|
||||
def get_url(
|
||||
self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None,
|
||||
):
|
||||
shots_url = {"Shots", "Sequence", "Shot"}
|
||||
sub_type = {"AssetType", "Sequence"}
|
||||
kitsu_module = self.get_kitsu_module()
|
||||
|
||||
# Get kitsu url with /api stripped
|
||||
kitsu_url = kitsu_module.server_url
|
||||
if kitsu_url.endswith("/api"):
|
||||
kitsu_url = kitsu_url[:-len("/api")]
|
||||
kitsu_url = kitsu_url[: -len("/api")]
|
||||
|
||||
sub_url = f"/productions/{project_id}"
|
||||
asset_type_url = "Shots" if asset_type in shots_url else "Assets"
|
||||
asset_type_url = "shots" if asset_type in shots_url else "assets"
|
||||
|
||||
if task_id:
|
||||
# Go to task page
|
||||
|
|
@ -120,6 +128,6 @@ class ShowInKitsu(LauncherAction):
|
|||
# Add search method if is a sub_type
|
||||
sub_url += f"/{asset_type_url}"
|
||||
if asset_type in sub_type:
|
||||
sub_url += f'?search={asset_name}'
|
||||
sub_url += f"?search={asset_name}"
|
||||
|
||||
return f"{kitsu_url}{sub_url}"
|
||||
|
|
|
|||
|
|
@ -13,6 +13,5 @@ class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in
|
|||
# families = ["kitsu"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
gazu.client.set_host(os.environ["KITSU_SERVER"])
|
||||
gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"])
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -12,62 +10,69 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
label = "Kitsu entities"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = context.data["assetEntity"]["data"]
|
||||
zou_asset_data = asset_data.get("zou")
|
||||
if not zou_asset_data:
|
||||
raise AssertionError("Zou asset data not found in OpenPype!")
|
||||
self.log.debug("Collected zou asset data: {}".format(zou_asset_data))
|
||||
|
||||
zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get(
|
||||
"zou"
|
||||
kitsu_project = gazu.project.get_project_by_name(
|
||||
context.data["projectName"]
|
||||
)
|
||||
if not zou_task_data:
|
||||
self.log.warning("Zou task data not found in OpenPype!")
|
||||
self.log.debug("Collected zou task data: {}".format(zou_task_data))
|
||||
|
||||
kitsu_project = gazu.project.get_project(zou_asset_data["project_id"])
|
||||
if not kitsu_project:
|
||||
raise AssertionError("Project not found in kitsu!")
|
||||
raise ValueError("Project not found in kitsu!")
|
||||
|
||||
context.data["kitsu_project"] = kitsu_project
|
||||
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
|
||||
|
||||
entity_type = zou_asset_data["type"]
|
||||
if entity_type == "Shot":
|
||||
kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"])
|
||||
else:
|
||||
kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
kitsu_entities_by_id = {}
|
||||
for instance in context:
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
continue
|
||||
|
||||
if not kitsu_entity:
|
||||
raise AssertionError("{} not found in kitsu!".format(entity_type))
|
||||
zou_asset_data = asset_doc["data"].get("zou")
|
||||
if not zou_asset_data:
|
||||
raise ValueError("Zou asset data not found in OpenPype!")
|
||||
|
||||
context.data["kitsu_entity"] = kitsu_entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(entity_type, kitsu_entity)
|
||||
)
|
||||
task_name = instance.data.get("task")
|
||||
if not task_name:
|
||||
continue
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(
|
||||
os.environ["AVALON_TASK"]
|
||||
zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou")
|
||||
self.log.debug(
|
||||
"Collected zou task data: {}".format(zou_task_data)
|
||||
)
|
||||
if not kitsu_task_type:
|
||||
raise AssertionError(
|
||||
"Task type {} not found in Kitsu!".format(
|
||||
os.environ["AVALON_TASK"]
|
||||
|
||||
entity_id = zou_asset_data["id"]
|
||||
entity = kitsu_entities_by_id.get(entity_id)
|
||||
if not entity:
|
||||
entity = gazu.entity.get_entity(entity_id)
|
||||
if not entity:
|
||||
raise ValueError(
|
||||
"{} was not found in kitsu!".format(
|
||||
zou_asset_data["name"]
|
||||
)
|
||||
)
|
||||
|
||||
kitsu_entities_by_id[entity_id] = entity
|
||||
instance.data["entity"] = entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(zou_asset_data["type"], entity)
|
||||
)
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task_id = zou_task_data["id"]
|
||||
kitsu_task = kitsu_entities_by_id.get(kitsu_task_id)
|
||||
if not kitsu_task:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
kitsu_entities_by_id[kitsu_task_id] = kitsu_task
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(task_name)
|
||||
if not kitsu_task_type:
|
||||
raise ValueError(
|
||||
"Task type {} not found in Kitsu!".format(task_name)
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
entity, kitsu_task_type
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
kitsu_entity, kitsu_task_type
|
||||
)
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
raise ValueError("Task not found in kitsu!")
|
||||
instance.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
|
|
|||
|
|
@ -8,12 +8,11 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Kitsu Note and Status"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
set_status_note = False
|
||||
note_status_shortname = "wfa"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Get comment text body
|
||||
publish_comment = context.data.get("comment")
|
||||
if not publish_comment:
|
||||
|
|
@ -21,30 +20,33 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = context.data["kitsu_task"]["task_status_id"]
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
for instance in context:
|
||||
kitsu_task = instance.data.get("kitsu_task")
|
||||
if kitsu_task is None:
|
||||
continue
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = kitsu_task["task_status"]["id"]
|
||||
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
)
|
||||
|
||||
# Add comment to kitsu task
|
||||
self.log.debug(
|
||||
"Add new note in taks id {}".format(
|
||||
context.data["kitsu_task"]["id"]
|
||||
# Add comment to kitsu task
|
||||
task_id = kitsu_task["id"]
|
||||
self.log.debug("Add new note in taks id {}".format(task_id))
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
task_id, note_status, comment=publish_comment
|
||||
)
|
||||
)
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
context.data["kitsu_task"], note_status, comment=publish_comment
|
||||
)
|
||||
|
||||
context.data["kitsu_comment"] = kitsu_comment
|
||||
instance.data["kitsu_comment"] = kitsu_comment
|
||||
|
|
|
|||
|
|
@ -8,14 +8,12 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
label = "Kitsu Review"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
task = context.data["kitsu_task"]
|
||||
comment = context.data.get("kitsu_comment")
|
||||
task = instance.data["kitsu_task"]["id"]
|
||||
comment = instance.data["kitsu_comment"]["id"]
|
||||
|
||||
# Check comment has been created
|
||||
if not comment:
|
||||
|
|
@ -27,9 +25,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
# Add review representations as preview of comment
|
||||
for representation in instance.data.get("representations", []):
|
||||
# Skip if not tagged as review
|
||||
if "review" not in representation.get("tags", []):
|
||||
if "kitsureview" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
review_path = representation.get("published_path")
|
||||
self.log.debug("Found review at: {}".format(review_path))
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ def validate_host(kitsu_url: str) -> bool:
|
|||
if gazu.client.host_is_valid():
|
||||
return True
|
||||
else:
|
||||
raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.")
|
||||
raise gazu.exception.HostException(
|
||||
"Host '{}' is invalid.".format(kitsu_url))
|
||||
|
||||
|
||||
def clear_credentials():
|
||||
|
|
|
|||
|
|
@ -1,3 +1,15 @@
|
|||
"""
|
||||
Bugs:
|
||||
* Error when adding task type to anything that isn't Shot or Assets
|
||||
* Assets don't get added under an episode if TV show
|
||||
* Assets added under Main Pack throws error. No Main Pack name in dict
|
||||
|
||||
Features ToDo:
|
||||
* Select in settings what types you wish to sync
|
||||
* Print what's updated on entity-update
|
||||
* Add listener for Edits
|
||||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
|
||||
|
|
@ -5,6 +17,7 @@ import gazu
|
|||
|
||||
from openpype.client import get_project, get_assets, get_asset_by_name
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.lib import Logger
|
||||
from .credentials import validate_credentials
|
||||
from .update_op_with_zou import (
|
||||
create_op_asset,
|
||||
|
|
@ -14,6 +27,8 @@ from .update_op_with_zou import (
|
|||
update_op_assets,
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class Listener:
|
||||
"""Host Kitsu listener."""
|
||||
|
|
@ -38,7 +53,7 @@ class Listener:
|
|||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
'Kitsu authentication failed for login: "{}"...'.format(login)
|
||||
)
|
||||
|
||||
gazu.set_event_host(
|
||||
|
|
@ -86,7 +101,9 @@ class Listener:
|
|||
self.event_client, "sequence:delete", self._delete_sequence
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "shot:new", self._new_shot)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:new", self._new_shot
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:update", self._update_shot
|
||||
)
|
||||
|
|
@ -94,7 +111,9 @@ class Listener:
|
|||
self.event_client, "shot:delete", self._delete_shot
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "task:new", self._new_task)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:new", self._new_task
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:update", self._update_task
|
||||
)
|
||||
|
|
@ -103,44 +122,62 @@ class Listener:
|
|||
)
|
||||
|
||||
def start(self):
|
||||
"""Start listening for events."""
|
||||
log.info("Listening to Kitsu events...")
|
||||
gazu.events.run_client(self.event_client)
|
||||
|
||||
def get_ep_dict(self, ep_id):
|
||||
if ep_id and ep_id != "":
|
||||
return gazu.entity.get_entity(ep_id)
|
||||
return
|
||||
|
||||
# == Project ==
|
||||
def _new_project(self, data):
|
||||
"""Create new project into OP DB."""
|
||||
|
||||
# Use update process to avoid duplicating code
|
||||
self._update_project(data)
|
||||
self._update_project(data, new_project=True)
|
||||
|
||||
def _update_project(self, data):
|
||||
def _update_project(self, data, new_project=False):
|
||||
"""Update project into OP DB."""
|
||||
# Get project entity
|
||||
project = gazu.project.get_project(data["project_id"])
|
||||
project_name = project["name"]
|
||||
|
||||
update_project = write_project_to_op(project, self.dbcon)
|
||||
|
||||
# Write into DB
|
||||
if update_project:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(
|
||||
data["project_id"]
|
||||
)
|
||||
self.dbcon.bulk_write([update_project])
|
||||
|
||||
if new_project:
|
||||
log.info("Project created: {}".format(project["name"]))
|
||||
|
||||
def _delete_project(self, data):
|
||||
"""Delete project."""
|
||||
|
||||
project_name = get_kitsu_project_name(data["project_id"])
|
||||
collections = self.dbcon.database.list_collection_names()
|
||||
for collection in collections:
|
||||
project = self.dbcon.database[collection].find_one(
|
||||
{"data.zou_id": data["project_id"]}
|
||||
)
|
||||
if project:
|
||||
# Delete project collection
|
||||
self.dbcon.database[project["name"]].drop()
|
||||
|
||||
# Delete project collection
|
||||
self.dbcon.database[project_name].drop()
|
||||
# Print message
|
||||
log.info("Project deleted: {}".format(project["name"]))
|
||||
return
|
||||
|
||||
# == Asset ==
|
||||
|
||||
def _new_asset(self, data):
|
||||
"""Create new asset into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
# Get asset entity
|
||||
asset = gazu.asset.get_asset(data["asset_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
|
|
@ -149,6 +186,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_asset(data)
|
||||
|
||||
# Print message
|
||||
ep_id = asset.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset created: {proj_name} - {ep_name}"
|
||||
"{asset_type_name} - {asset_name}".format(
|
||||
proj_name=asset["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
asset_type_name=asset["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_asset(self, data):
|
||||
"""Update asset into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -166,10 +218,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(asset["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[asset],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -179,10 +236,27 @@ class Listener:
|
|||
"""Delete asset of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
asset = self.dbcon.find_one({"data.zou.id": data["asset_id"]})
|
||||
if asset:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = asset["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset deleted: {proj_name} - {ep_name}"
|
||||
"{type_name} - {asset_name}".format(
|
||||
proj_name=asset["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
type_name=asset["data"]["zou"]["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Episode ==
|
||||
def _new_episode(self, data):
|
||||
|
|
@ -191,14 +265,20 @@ class Listener:
|
|||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(episode))
|
||||
self.dbcon.insert_one(create_op_asset(ep))
|
||||
|
||||
# Update
|
||||
self._update_episode(data)
|
||||
|
||||
# Print message
|
||||
msg = "Episode created: {proj_name} - {ep_name}".format(
|
||||
proj_name=ep["project_name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_episode(self, data):
|
||||
"""Update episode into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -206,7 +286,7 @@ class Listener:
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
|
|
@ -215,11 +295,16 @@ class Listener:
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
|
||||
zou_ids_and_asset_docs[ep["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(ep["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[ep],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -228,12 +313,23 @@ class Listener:
|
|||
def _delete_episode(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete episode") # TODO check bugfix
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
ep = self.dbcon.find_one({"data.zou.id": data["episode_id"]})
|
||||
if ep:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
project = gazu.project.get_project(
|
||||
ep["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = "Episode deleted: {proj_name} - {ep_name}".format(
|
||||
proj_name=project["name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Sequence ==
|
||||
def _new_sequence(self, data):
|
||||
|
|
@ -250,6 +346,20 @@ class Listener:
|
|||
# Update
|
||||
self._update_sequence(data)
|
||||
|
||||
# Print message
|
||||
ep_id = sequence.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Sequence created: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=sequence["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_sequence(self, data):
|
||||
"""Update sequence into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -267,10 +377,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(sequence["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[sequence],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -279,12 +394,30 @@ class Listener:
|
|||
def _delete_sequence(self, data):
|
||||
"""Delete sequence of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete sequence") # TODO check bugfix
|
||||
sequence = self.dbcon.find_one({"data.zou.id": data["sequence_id"]})
|
||||
if sequence:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
# Print message
|
||||
ep_id = sequence["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
gazu_project = gazu.project.get_project(
|
||||
sequence["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = (
|
||||
"Sequence deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=gazu_project["name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Shot ==
|
||||
def _new_shot(self, data):
|
||||
|
|
@ -301,6 +434,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_shot(data)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["episode_id"]
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot created: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_shot(self, data):
|
||||
"""Update shot into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -318,11 +466,17 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(shot["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[shot],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
|
@ -330,11 +484,28 @@ class Listener:
|
|||
def _delete_shot(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
shot = self.dbcon.find_one({"data.zou.id": data["shot_id"]})
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
if shot:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["data"]["zou"]["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Task ==
|
||||
def _new_task(self, data):
|
||||
|
|
@ -346,23 +517,59 @@ class Listener:
|
|||
# Get gazu entity
|
||||
task = gazu.task.get_task(data["task_id"])
|
||||
|
||||
# Find asset doc
|
||||
parent_name = task["entity"]["name"]
|
||||
# Print message
|
||||
ep_id = task.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
asset_doc = get_asset_by_name(project_name, parent_name)
|
||||
parent_name = None
|
||||
asset_name = None
|
||||
ent_type = None
|
||||
|
||||
if task["task_type"]["for_entity"] == "Asset":
|
||||
parent_name = task["entity"]["name"]
|
||||
asset_name = task["entity"]["name"]
|
||||
ent_type = task["entity_type"]["name"]
|
||||
elif task["task_type"]["for_entity"] == "Shot":
|
||||
parent_name = "{ep_name}{sequence_name} - {shot_name}".format(
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
asset_name = "{ep_name}{sequence_name}_{shot_name}".format(
|
||||
ep_name=ep["name"] + "_" if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
|
||||
# Update asset tasks with new one
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {"type": task_type_name, "zou": task}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}
|
||||
)
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
if asset_doc:
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {
|
||||
"type": task_type_name,
|
||||
"zou": task,
|
||||
}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
msg = (
|
||||
"Task created: {proj} - {ent_type}{parent}"
|
||||
" - {task}".format(
|
||||
proj=task["project"]["name"],
|
||||
ent_type=ent_type + " - " if ent_type is not None else "",
|
||||
parent=parent_name,
|
||||
task=task["task_type"]["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_task(self, data):
|
||||
"""Update task into OP DB."""
|
||||
# TODO is it necessary?
|
||||
pass
|
||||
|
||||
def _delete_task(self, data):
|
||||
"""Delete task of OP DB."""
|
||||
|
|
@ -384,6 +591,31 @@ class Listener:
|
|||
{"_id": doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
entity = gazu.entity.get_entity(task["zou"]["entity_id"])
|
||||
ep = self.get_ep_dict(entity["source_id"])
|
||||
|
||||
if entity["type"] == "Asset":
|
||||
parent_name = "{ep}{entity_type} - {entity}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
entity_type=task["zou"]["entity_type"]["name"],
|
||||
entity=task["zou"]["entity"]["name"],
|
||||
)
|
||||
elif entity["type"] == "Shot":
|
||||
parent_name = "{ep}{sequence} - {shot}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
sequence=task["zou"]["sequence"]["name"],
|
||||
shot=task["zou"]["entity"]["name"],
|
||||
)
|
||||
|
||||
msg = "Task deleted: {proj} - {parent} - {task}".format(
|
||||
proj=task["zou"]["project"]["name"],
|
||||
parent=parent_name,
|
||||
task=name,
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
|
@ -394,9 +626,10 @@ def start_listeners(login: str, password: str):
|
|||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
|
||||
# Refresh token every week
|
||||
def refresh_token_every_week():
|
||||
print("Refreshing token...")
|
||||
log.info("Refreshing token...")
|
||||
gazu.refresh_token()
|
||||
threading.Timer(7 * 3600 * 24, refresh_token_every_week).start()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,10 +5,6 @@ from typing import Dict, List
|
|||
|
||||
from pymongo import DeleteOne, UpdateOne
|
||||
import gazu
|
||||
from gazu.task import (
|
||||
all_tasks_for_asset,
|
||||
all_tasks_for_shot,
|
||||
)
|
||||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
|
|
@ -18,7 +14,6 @@ from openpype.client import (
|
|||
create_project,
|
||||
)
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
from openpype.lib import Logger
|
||||
|
|
@ -69,6 +64,7 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str):
|
|||
|
||||
def update_op_assets(
|
||||
dbcon: AvalonMongoDB,
|
||||
gazu_project: dict,
|
||||
project_doc: dict,
|
||||
entities_list: List[dict],
|
||||
asset_doc_ids: Dict[str, dict],
|
||||
|
|
@ -78,14 +74,18 @@ def update_op_assets(
|
|||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to DB
|
||||
gazu_project (dict): Dict of gazu,
|
||||
project_doc (dict): Dict of project,
|
||||
entities_list (List[dict]): List of zou entities to update
|
||||
asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...]
|
||||
|
||||
Returns:
|
||||
List[Dict[str, dict]]: List of (doc_id, update_dict) tuples
|
||||
"""
|
||||
if not project_doc:
|
||||
return
|
||||
|
||||
project_name = project_doc["name"]
|
||||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
|
||||
assets_with_update = []
|
||||
for item in entities_list:
|
||||
|
|
@ -94,7 +94,8 @@ def update_op_assets(
|
|||
if not item_doc: # Create asset
|
||||
op_asset = create_op_asset(item)
|
||||
insert_result = dbcon.insert_one(op_asset)
|
||||
item_doc = get_asset_by_id(project_name, insert_result.inserted_id)
|
||||
item_doc = get_asset_by_id(
|
||||
project_name, insert_result.inserted_id)
|
||||
|
||||
# Update asset
|
||||
item_data = deepcopy(item_doc["data"])
|
||||
|
|
@ -113,38 +114,73 @@ def update_op_assets(
|
|||
except (TypeError, ValueError):
|
||||
frame_in = 1001
|
||||
item_data["frameStart"] = frame_in
|
||||
# Frames duration, fallback on 0
|
||||
# Frames duration, fallback on 1
|
||||
try:
|
||||
# NOTE nb_frames is stored directly in item
|
||||
# because of zou's legacy design
|
||||
frames_duration = int(item.get("nb_frames", 0))
|
||||
frames_duration = int(item.get("nb_frames", 1))
|
||||
except (TypeError, ValueError):
|
||||
frames_duration = 0
|
||||
frames_duration = None
|
||||
# Frame out, fallback on frame_in + duration or project's value or 1001
|
||||
frame_out = item_data.pop("frame_out", None)
|
||||
if not frame_out:
|
||||
frame_out = frame_in + frames_duration
|
||||
try:
|
||||
frame_out = int(frame_out)
|
||||
except (TypeError, ValueError):
|
||||
frame_out = 1001
|
||||
if frames_duration:
|
||||
frame_out = frame_in + frames_duration - 1
|
||||
else:
|
||||
frame_out = project_doc["data"].get("frameEnd", frame_in)
|
||||
item_data["frameEnd"] = frame_out
|
||||
# Fps, fallback to project's value or default value (25.0)
|
||||
try:
|
||||
fps = float(item_data.get("fps", project_doc["data"].get("fps")))
|
||||
fps = float(item_data.get("fps"))
|
||||
except (TypeError, ValueError):
|
||||
fps = 25.0
|
||||
fps = float(gazu_project.get(
|
||||
"fps", project_doc["data"].get("fps", 25)))
|
||||
item_data["fps"] = fps
|
||||
# Resolution, fall back to project default
|
||||
match_res = re.match(
|
||||
r"(\d+)x(\d+)",
|
||||
item_data.get("resolution", gazu_project.get("resolution"))
|
||||
)
|
||||
if match_res:
|
||||
item_data["resolutionWidth"] = int(match_res.group(1))
|
||||
item_data["resolutionHeight"] = int(match_res.group(2))
|
||||
else:
|
||||
item_data["resolutionWidth"] = project_doc["data"].get(
|
||||
"resolutionWidth")
|
||||
item_data["resolutionHeight"] = project_doc["data"].get(
|
||||
"resolutionHeight")
|
||||
# Properties that doesn't fully exist in Kitsu.
|
||||
# Guessing those property names below:
|
||||
# Pixel Aspect Ratio
|
||||
item_data["pixelAspect"] = item_data.get(
|
||||
"pixel_aspect", project_doc["data"].get("pixelAspect"))
|
||||
# Handle Start
|
||||
item_data["handleStart"] = item_data.get(
|
||||
"handle_start", project_doc["data"].get("handleStart"))
|
||||
# Handle End
|
||||
item_data["handleEnd"] = item_data.get(
|
||||
"handle_end", project_doc["data"].get("handleEnd"))
|
||||
# Clip In
|
||||
item_data["clipIn"] = item_data.get(
|
||||
"clip_in", project_doc["data"].get("clipIn"))
|
||||
# Clip Out
|
||||
item_data["clipOut"] = item_data.get(
|
||||
"clip_out", project_doc["data"].get("clipOut"))
|
||||
|
||||
# Tasks
|
||||
tasks_list = []
|
||||
item_type = item["type"]
|
||||
if item_type == "Asset":
|
||||
tasks_list = all_tasks_for_asset(item)
|
||||
tasks_list = gazu.task.all_tasks_for_asset(item)
|
||||
elif item_type == "Shot":
|
||||
tasks_list = all_tasks_for_shot(item)
|
||||
tasks_list = gazu.task.all_tasks_for_shot(item)
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {"type": t["task_type_name"], "zou": t}
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {
|
||||
"type": t["task_type_name"],
|
||||
"zou": gazu.task.get_task(t["id"]),
|
||||
}
|
||||
}
|
||||
for t in tasks_list
|
||||
}
|
||||
|
||||
|
|
@ -176,9 +212,14 @@ def update_op_assets(
|
|||
entity_root_asset_name = "Shots"
|
||||
|
||||
# Root parent folder if exist
|
||||
visual_parent_doc_id = (
|
||||
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
|
||||
)
|
||||
visual_parent_doc_id = None
|
||||
if parent_zou_id is not None:
|
||||
parent_zou_id_dict = asset_doc_ids.get(parent_zou_id)
|
||||
if parent_zou_id_dict is not None:
|
||||
visual_parent_doc_id = (
|
||||
parent_zou_id_dict.get("_id")
|
||||
if parent_zou_id_dict else None)
|
||||
|
||||
if visual_parent_doc_id is None:
|
||||
# Find root folder doc ("Assets" or "Shots")
|
||||
root_folder_doc = get_asset_by_name(
|
||||
|
|
@ -197,12 +238,15 @@ def update_op_assets(
|
|||
item_data["parents"] = []
|
||||
ancestor_id = parent_zou_id
|
||||
while ancestor_id is not None:
|
||||
parent_doc = asset_doc_ids[ancestor_id]
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
parent_doc = asset_doc_ids.get(ancestor_id)
|
||||
if parent_doc is not None:
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
else:
|
||||
ancestor_id = None
|
||||
|
||||
# Build OpenPype compatible name
|
||||
if item_type in ["Shot", "Sequence"] and parent_zou_id is not None:
|
||||
|
|
@ -250,13 +294,12 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
UpdateOne: Update instance for the project
|
||||
"""
|
||||
project_name = project["name"]
|
||||
project_doc = get_project(project_name)
|
||||
if not project_doc:
|
||||
log.info(f"Creating project '{project_name}'")
|
||||
project_doc = create_project(project_name, project_name)
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
project_dict = create_project(project_name, project_name)
|
||||
|
||||
# Project data and tasks
|
||||
project_data = project_doc["data"] or {}
|
||||
project_data = project_dict["data"] or {}
|
||||
|
||||
# Build project code and update Kitsu
|
||||
project_code = project.get("code")
|
||||
|
|
@ -287,7 +330,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": project_doc["_id"]},
|
||||
{"_id": project_dict["_id"]},
|
||||
{
|
||||
"$set": {
|
||||
"config.tasks": {
|
||||
|
|
@ -301,7 +344,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
|
||||
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
|
||||
def sync_all_projects(
|
||||
login: str, password: str, ignore_projects: list = None):
|
||||
"""Update all OP projects in DB with Zou data.
|
||||
|
||||
Args:
|
||||
|
|
@ -346,7 +390,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
if not project:
|
||||
project = gazu.project.get_project_by_name(project["name"])
|
||||
|
||||
log.info(f"Synchronizing {project['name']}...")
|
||||
log.info("Synchronizing {}...".format(project['name']))
|
||||
|
||||
# Get all assets from zou
|
||||
all_assets = gazu.asset.all_assets_for_project(project)
|
||||
|
|
@ -365,12 +409,16 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
]
|
||||
|
||||
# Sync project. Create if doesn't exist
|
||||
project_name = project["name"]
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
log.info("Project created: {}".format(project_name))
|
||||
bulk_writes.append(write_project_to_op(project, dbcon))
|
||||
|
||||
# Try to find project document
|
||||
project_name = project["name"]
|
||||
if not project_dict:
|
||||
project_dict = get_project(project_name)
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
project_doc = get_project(project_name)
|
||||
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
|
|
@ -378,7 +426,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[project["id"]] = project_doc
|
||||
zou_ids_and_asset_docs[project["id"]] = project_dict
|
||||
|
||||
# Create entities root folders
|
||||
to_insert = [
|
||||
|
|
@ -389,6 +437,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
"data": {
|
||||
"root_of": r,
|
||||
"tasks": {},
|
||||
"visualParent": None,
|
||||
"parents": [],
|
||||
},
|
||||
}
|
||||
for r in ["Assets", "Shots"]
|
||||
|
|
@ -423,7 +473,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
[
|
||||
UpdateOne({"_id": id}, update)
|
||||
for id, update in update_op_assets(
|
||||
dbcon, project_doc, all_entities, zou_ids_and_asset_docs
|
||||
dbcon, project, project_dict,
|
||||
all_entities, zou_ids_and_asset_docs
|
||||
)
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ def sync_zou_from_op_project(
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get all entities from zou
|
||||
print(f"Synchronizing {project_name}...")
|
||||
print("Synchronizing {}...".format(project_name))
|
||||
zou_project = gazu.project.get_project_by_name(project_name)
|
||||
|
||||
# Create project
|
||||
|
|
@ -82,7 +82,9 @@ def sync_zou_from_op_project(
|
|||
f"x{project_doc['data']['resolutionHeight']}",
|
||||
}
|
||||
)
|
||||
gazu.project.update_project_data(zou_project, data=project_doc["data"])
|
||||
gazu.project.update_project_data(
|
||||
zou_project, data=project_doc["data"]
|
||||
)
|
||||
gazu.project.update_project(zou_project)
|
||||
|
||||
asset_types = gazu.asset.all_asset_types()
|
||||
|
|
@ -98,8 +100,7 @@ def sync_zou_from_op_project(
|
|||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
asset_docs = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in get_assets(project_name)
|
||||
asset_doc["_id"]: asset_doc for asset_doc in get_assets(project_name)
|
||||
}
|
||||
|
||||
# Create new assets
|
||||
|
|
@ -174,7 +175,9 @@ def sync_zou_from_op_project(
|
|||
doc["name"],
|
||||
frame_in=doc["data"]["frameStart"],
|
||||
frame_out=doc["data"]["frameEnd"],
|
||||
nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"],
|
||||
nb_frames=(
|
||||
doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1
|
||||
),
|
||||
)
|
||||
|
||||
elif match.group(2): # Sequence
|
||||
|
|
@ -229,7 +232,7 @@ def sync_zou_from_op_project(
|
|||
"frame_in": frame_in,
|
||||
"frame_out": frame_out,
|
||||
},
|
||||
"nb_frames": frame_out - frame_in,
|
||||
"nb_frames": frame_out - frame_in + 1,
|
||||
}
|
||||
)
|
||||
entity = gazu.raw.update("entities", zou_id, entity_data)
|
||||
|
|
@ -258,7 +261,7 @@ def sync_zou_from_op_project(
|
|||
for asset_doc in asset_docs.values()
|
||||
}
|
||||
for entity_id in deleted_entities:
|
||||
gazu.raw.delete(f"data/entities/{entity_id}")
|
||||
gazu.raw.delete("data/entities/{}".format(entity_id))
|
||||
|
||||
# Write into DB
|
||||
if bulk_writes:
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@
|
|||
"enabled": true,
|
||||
"optional": false,
|
||||
"active": true,
|
||||
"use_published": true,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
|
|
|
|||
|
|
@ -139,7 +139,8 @@
|
|||
"ext": "mp4",
|
||||
"tags": [
|
||||
"burnin",
|
||||
"ftrackreview"
|
||||
"ftrackreview",
|
||||
"kitsureview"
|
||||
],
|
||||
"burnins": [],
|
||||
"ffmpeg_args": {
|
||||
|
|
|
|||
|
|
@ -179,6 +179,13 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateReview": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
],
|
||||
"useMayaTimeline": true
|
||||
},
|
||||
"CreateAss": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -199,6 +206,14 @@
|
|||
"maskColor_manager": false,
|
||||
"maskOperator": false
|
||||
},
|
||||
"CreateVrayProxy": {
|
||||
"enabled": true,
|
||||
"vrmesh": true,
|
||||
"alembic": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateMultiverseUsd": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -247,12 +262,6 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateReview": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateRig": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -268,12 +277,6 @@
|
|||
"Anim"
|
||||
]
|
||||
},
|
||||
"CreateVrayProxy": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateVRayScene": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -676,7 +679,7 @@
|
|||
"families": [
|
||||
"pointcache",
|
||||
"model",
|
||||
"vrayproxy"
|
||||
"vrayproxy.alembic"
|
||||
]
|
||||
},
|
||||
"ExtractObj": {
|
||||
|
|
|
|||
|
|
@ -285,11 +285,6 @@
|
|||
"key": "active",
|
||||
"label": "Active"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "use_published",
|
||||
"label": "Use Published scene"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -240,6 +240,31 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateReview",
|
||||
"label": "Create Review",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "useMayaTimeline",
|
||||
"label": "Use Maya Timeline for Frame Range."
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -332,6 +357,36 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateVrayProxy",
|
||||
"label": "Create VRay Proxy",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "vrmesh",
|
||||
"label": "VrMesh"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "alembic",
|
||||
"label": "Alembic"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_create_plugin",
|
||||
|
|
@ -368,10 +423,6 @@
|
|||
"key": "CreateRenderSetup",
|
||||
"label": "Create Render Setup"
|
||||
},
|
||||
{
|
||||
"key": "CreateReview",
|
||||
"label": "Create Review"
|
||||
},
|
||||
{
|
||||
"key": "CreateRig",
|
||||
"label": "Create Rig"
|
||||
|
|
@ -380,10 +431,6 @@
|
|||
"key": "CreateSetDress",
|
||||
"label": "Create Set Dress"
|
||||
},
|
||||
{
|
||||
"key": "CreateVrayProxy",
|
||||
"label": "Create VRay Proxy"
|
||||
},
|
||||
{
|
||||
"key": "CreateVRayScene",
|
||||
"label": "Create VRay Scene"
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@
|
|||
{
|
||||
"shotgridreview": "Add review to Shotgrid"
|
||||
},
|
||||
{
|
||||
"kitsureview": "Add review to Kitsu"
|
||||
},
|
||||
{
|
||||
"delete": "Delete output"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -28,16 +28,16 @@ For [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) support you ne
|
|||
OpenPype integration for Deadline consists of two parts:
|
||||
|
||||
- The `OpenPype` Deadline Plug-in
|
||||
- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job)
|
||||
- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job)
|
||||
|
||||
The `GlobalJobPreLoad` handles populating render and publish jobs with proper environment variables using settings from the `OpenPype` Deadline Plug-in.
|
||||
|
||||
The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to
|
||||
The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to
|
||||
destinations accessible by DL process. Check permissions (must be executable and accessible by Deadline process)
|
||||
|
||||
- Enable `Tools > Super User Mode` in Deadline Monitor
|
||||
|
||||
- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype
|
||||
- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype
|
||||
executable. It is recommended to use the `openpype_console` executable as it provides a bit more logging.
|
||||
|
||||
- In case of multi OS farms, provide multiple locations, each Deadline Worker goes through the list and tries to find the first accessible
|
||||
|
|
@ -45,12 +45,22 @@ executable. It is recommended to use the `openpype_console` executable as it pro
|
|||
|
||||

|
||||
|
||||
### Pools
|
||||
|
||||
The main pools can be configured at `project_settings/deadline/publish/CollectDeadlinePools/primary_pool`, which is applied to the rendering jobs.
|
||||
|
||||
The dependent publishing job's pool uses `project_settings/deadline/publish/ProcessSubmittedJobOnFarm/deadline_pool`. If nothing is specified the pool will fallback to the primary pool above.
|
||||
|
||||
:::note maya tile rendering
|
||||
The logic for publishing job pool assignment applies to tiling jobs.
|
||||
:::
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
#### Publishing jobs fail directly in DCCs
|
||||
|
||||
- Double check that all previously described steps were finished
|
||||
- Check that `deadlinewebservice` is running on DL server
|
||||
- Check that `deadlinewebservice` is running on DL server
|
||||
- Check that user's machine has access to deadline server on configured port
|
||||
|
||||
#### Jobs are failing on DL side
|
||||
|
|
@ -61,40 +71,40 @@ Each publishing from OpenPype consists of 2 jobs, first one is rendering, second
|
|||
|
||||
- Jobs are failing with `OpenPype executable was not found` error
|
||||
|
||||
Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration)
|
||||
Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration)
|
||||
|
||||
|
||||
- Publishing job is failing with `ffmpeg not installed` error
|
||||
|
||||
|
||||
OpenPype executable has to have access to `ffmpeg` executable, check OpenPype `Setting > General`
|
||||
|
||||

|
||||
|
||||
- Both jobs finished successfully, but there is no review on Ftrack
|
||||
|
||||
Make sure that you correctly set published family to be send to Ftrack.
|
||||
Make sure that you correctly set published family to be send to Ftrack.
|
||||
|
||||

|
||||
|
||||
Example: I want send to Ftrack review of rendered images from Harmony :
|
||||
- `Host names`: "harmony"
|
||||
- `Families`: "render"
|
||||
- `Families`: "render"
|
||||
- `Add Ftrack Family` to "Enabled"
|
||||
|
||||
|
||||
Make sure that you actually configured to create review for published subset in `project_settings/ftrack/publish/CollectFtrackFamily`
|
||||
|
||||

|
||||
|
||||
Example: I want to create review for all reviewable subsets in Harmony :
|
||||
Example: I want to create review for all reviewable subsets in Harmony :
|
||||
- Add "harmony" as a new key an ".*" as a value.
|
||||
|
||||
|
||||
- Rendering jobs are stuck in 'Queued' state or failing
|
||||
|
||||
Make sure that your Deadline is not limiting specific jobs to be run only on specific machines. (Eg. only some machines have installed particular application.)
|
||||
|
||||
|
||||
Check `project_settings/deadline`
|
||||
|
||||
|
||||

|
||||
|
||||
Example: I have separated machines with "Harmony" installed into "harmony" group on Deadline. I want rendering jobs published from Harmony to run only on those machines.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue