mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/OP-4778_Nuke-create-first-workfile-template-switch-to-preset
This commit is contained in:
commit
1dc35f2af3
36 changed files with 954 additions and 492 deletions
|
|
@ -1,11 +1,9 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from openpype.client import get_version_by_id
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
legacy_io,
|
||||
get_representation_path,
|
||||
import openpype.pipeline.load as load
|
||||
from openpype.pipeline.load import (
|
||||
get_representation_context,
|
||||
get_representation_path_from_context
|
||||
)
|
||||
from openpype.hosts.fusion.api import (
|
||||
imprint_container,
|
||||
|
|
@ -148,7 +146,7 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
path = self._get_first_image(os.path.dirname(self.fname))
|
||||
path = get_representation_path_from_context(context)
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
comp = get_current_comp()
|
||||
|
|
@ -217,13 +215,11 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
assert tool.ID == "Loader", "Must be Loader"
|
||||
comp = tool.Comp()
|
||||
|
||||
root = os.path.dirname(get_representation_path(representation))
|
||||
path = self._get_first_image(root)
|
||||
context = get_representation_context(representation)
|
||||
path = get_representation_path_from_context(context)
|
||||
|
||||
# Get start frame from version data
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_version_by_id(project_name, representation["parent"])
|
||||
start = self._get_start(version, tool)
|
||||
start = self._get_start(context["version"], tool)
|
||||
|
||||
with comp_lock_and_undo_chunk(comp, "Update Loader"):
|
||||
|
||||
|
|
@ -256,11 +252,6 @@ class FusionLoadSequence(load.LoaderPlugin):
|
|||
with comp_lock_and_undo_chunk(comp, "Remove Loader"):
|
||||
tool.Delete()
|
||||
|
||||
def _get_first_image(self, root):
|
||||
"""Get first file in representation root"""
|
||||
files = sorted(os.listdir(root))
|
||||
return os.path.join(root, files[0])
|
||||
|
||||
def _get_start(self, version_doc, tool):
|
||||
"""Return real start frame of published files (incl. handles)"""
|
||||
data = version_doc["data"]
|
||||
|
|
|
|||
|
|
@ -120,13 +120,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
|
|||
track = sitem.parentTrack().name()
|
||||
# node serialization
|
||||
node = sitem.node()
|
||||
node_serialized = self.node_serialisation(node)
|
||||
node_serialized = self.node_serialization(node)
|
||||
node_name = sitem.name()
|
||||
|
||||
if "_" in node_name:
|
||||
node_class = re.sub(r"(?:_)[_0-9]+", "", node_name) # more numbers
|
||||
else:
|
||||
node_class = re.sub(r"\d+", "", node_name) # one number
|
||||
node_class = node.Class()
|
||||
|
||||
# collect timelineIn/Out
|
||||
effect_t_in = int(sitem.timelineIn())
|
||||
|
|
@ -148,7 +144,7 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
|
|||
"node": node_serialized
|
||||
}}
|
||||
|
||||
def node_serialisation(self, node):
|
||||
def node_serialization(self, node):
|
||||
node_serialized = {}
|
||||
|
||||
# adding ignoring knob keys
|
||||
|
|
|
|||
|
|
@ -3574,3 +3574,34 @@ def get_color_management_output_transform():
|
|||
if preferences["output_transform_enabled"]:
|
||||
colorspace = preferences["output_transform"]
|
||||
return colorspace
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
|
|
|
|||
|
|
@ -137,6 +137,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# Create the instance
|
||||
instance = context.create_instance(objset)
|
||||
instance[:] = members_hierarchy
|
||||
instance.data["objset"] = objset
|
||||
|
||||
# Store the exact members of the object set
|
||||
instance.data["setMembers"] = members
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError, RepairAction
|
||||
)
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
from openpype.hosts.maya.api.lib import imprint
|
||||
|
||||
|
||||
class ValidateInstanceAttributes(pyblish.api.InstancePlugin):
|
||||
"""Validate Instance Attributes.
|
||||
|
||||
New attributes can be introduced as new features come in. Old instances
|
||||
will need to be updated with these attributes for the documentation to make
|
||||
sense, and users do not have to recreate the instances.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["*"]
|
||||
label = "Instance Attributes"
|
||||
plugins_by_family = {
|
||||
p.family: p for p in discover_legacy_creator_plugins()
|
||||
}
|
||||
actions = [RepairAction]
|
||||
|
||||
@classmethod
|
||||
def get_missing_attributes(self, instance):
|
||||
plugin = self.plugins_by_family[instance.data["family"]]
|
||||
subset = instance.data["subset"]
|
||||
asset = instance.data["asset"]
|
||||
objset = instance.data["objset"]
|
||||
|
||||
missing_attributes = {}
|
||||
for key, value in plugin(subset, asset).data.items():
|
||||
if not cmds.objExists("{}.{}".format(objset, key)):
|
||||
missing_attributes[key] = value
|
||||
|
||||
return missing_attributes
|
||||
|
||||
def process(self, instance):
|
||||
objset = instance.data.get("objset")
|
||||
if objset is None:
|
||||
self.log.debug(
|
||||
"Skipping {} because no objectset found.".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
missing_attributes = self.get_missing_attributes(instance)
|
||||
if missing_attributes:
|
||||
raise PublishValidationError(
|
||||
"Missing attributes on {}:\n{}".format(
|
||||
objset, missing_attributes
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
imprint(instance.data["objset"], cls.get_missing_attributes(instance))
|
||||
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
54
openpype/hosts/maya/plugins/publish/validate_mesh_empty.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateMeshOrder
|
||||
)
|
||||
|
||||
|
||||
class ValidateMeshEmpty(pyblish.api.InstancePlugin):
|
||||
"""Validate meshes have some vertices.
|
||||
|
||||
Its possible to have meshes without any vertices. To replicate
|
||||
this issue, delete all faces/polygons then all edges.
|
||||
"""
|
||||
|
||||
order = ValidateMeshOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Mesh Empty"
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
invalid = cls.get_invalid(instance)
|
||||
for node in invalid:
|
||||
cmds.delete(node)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
invalid = []
|
||||
|
||||
meshes = cmds.ls(instance, type="mesh", long=True)
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"\"{}\" does not have any vertices.".format(mesh)
|
||||
)
|
||||
invalid.append(mesh)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Meshes found in instance without any vertices: %s" % invalid
|
||||
)
|
||||
|
|
@ -1,39 +1,9 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import ValidateMeshOrder
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
for c in components:
|
||||
match = re.search("\[([0-9]+):([0-9]+)\]", c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
||||
|
|
@ -57,6 +27,15 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
|||
invalid = []
|
||||
|
||||
for node in cmds.ls(instance, type='mesh'):
|
||||
num_vertices = cmds.polyEvaluate(node, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(node)
|
||||
)
|
||||
continue
|
||||
|
||||
uv = cmds.polyEvaluate(node, uv=True)
|
||||
|
||||
if uv == 0:
|
||||
|
|
|
|||
|
|
@ -28,7 +28,10 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Return the invalid edges.
|
||||
Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
Also see:
|
||||
|
||||
http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -36,8 +39,21 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
if not meshes:
|
||||
return list()
|
||||
|
||||
valid_meshes = []
|
||||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
valid_meshes.append(mesh)
|
||||
|
||||
# Get all edges
|
||||
edges = ['{0}.e[*]'.format(node) for node in meshes]
|
||||
edges = ['{0}.e[*]'.format(node) for node in valid_meshes]
|
||||
|
||||
# Filter by constraint on edge length
|
||||
invalid = lib.polyConstraint(edges,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import re
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -8,37 +6,7 @@ from openpype.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateMeshOrder,
|
||||
)
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
"""Return the length of the list as if it was flattened.
|
||||
|
||||
Maya will return consecutive components as a single entry
|
||||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
components (list): The non-flattened components.
|
||||
|
||||
Returns:
|
||||
int: The amount of entries.
|
||||
|
||||
"""
|
||||
assert isinstance(components, (list, tuple))
|
||||
n = 0
|
||||
|
||||
pattern = re.compile(r"\[(\d+):(\d+)\]")
|
||||
for c in components:
|
||||
match = pattern.search(c)
|
||||
if match:
|
||||
start, end = match.groups()
|
||||
n += int(end) - int(start) + 1
|
||||
else:
|
||||
n += 1
|
||||
return n
|
||||
from openpype.hosts.maya.api.lib import len_flattened
|
||||
|
||||
|
||||
class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
||||
|
|
@ -87,6 +55,13 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
|||
for mesh in meshes:
|
||||
num_vertices = cmds.polyEvaluate(mesh, vertex=True)
|
||||
|
||||
if num_vertices == 0:
|
||||
cls.log.warning(
|
||||
"Skipping \"{}\", cause it does not have any "
|
||||
"vertices.".format(mesh)
|
||||
)
|
||||
continue
|
||||
|
||||
# Vertices from all edges
|
||||
edges = "%s.e[*]" % mesh
|
||||
vertices = cmds.polyListComponentConversion(edges, toVertex=True)
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@
|
|||
"""Validate model nodes names."""
|
||||
import os
|
||||
import re
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
import platform
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import ValidateContentsOrder
|
||||
import openpype.hosts.maya.api.action
|
||||
|
|
@ -44,7 +46,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
if not cmds.ls(child, transforms=True):
|
||||
return False
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
invalid = []
|
||||
|
|
@ -94,9 +96,10 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
# load shader list file as utf-8
|
||||
shaders = []
|
||||
if not use_db:
|
||||
if cls.material_file:
|
||||
if os.path.isfile(cls.material_file):
|
||||
shader_file = open(cls.material_file, "r")
|
||||
material_file = cls.material_file[platform.system().lower()]
|
||||
if material_file:
|
||||
if os.path.isfile(material_file):
|
||||
shader_file = open(material_file, "r")
|
||||
shaders = shader_file.readlines()
|
||||
shader_file.close()
|
||||
else:
|
||||
|
|
@ -113,7 +116,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
|
|||
shader_file.close()
|
||||
|
||||
# strip line endings from list
|
||||
shaders = map(lambda s: s.rstrip(), shaders)
|
||||
shaders = [s.rstrip() for s in shaders if s.rstrip()]
|
||||
|
||||
# compile regex for testing names
|
||||
regex = cls.regex
|
||||
|
|
|
|||
|
|
@ -1268,7 +1268,7 @@ def convert_to_valid_instaces():
|
|||
creator_attr["farm_chunk"] = (
|
||||
node["deadlineChunkSize"].value())
|
||||
if "deadlineConcurrentTasks" in node.knobs():
|
||||
creator_attr["farm_concurency"] = (
|
||||
creator_attr["farm_concurrency"] = (
|
||||
node["deadlineConcurrentTasks"].value())
|
||||
|
||||
_remove_old_knobs(node)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from openpype.pipeline import (
|
|||
CreatedInstance
|
||||
)
|
||||
from openpype.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef
|
||||
BoolDef
|
||||
)
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
||||
|
|
@ -49,33 +46,6 @@ class CreateWritePrerender(napi.NukeWriteCreator):
|
|||
self._get_render_target_enum(),
|
||||
self._get_reviewable_bool()
|
||||
]
|
||||
if "farm_rendering" in self.instance_attributes:
|
||||
attr_defs.extend([
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Farm rendering attributes"),
|
||||
BoolDef("suspended_publish", label="Suspended publishing"),
|
||||
NumberDef(
|
||||
"farm_priority",
|
||||
label="Priority",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=50
|
||||
),
|
||||
NumberDef(
|
||||
"farm_chunk",
|
||||
label="Chunk size",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=10
|
||||
),
|
||||
NumberDef(
|
||||
"farm_concurency",
|
||||
label="Concurent tasks",
|
||||
minimum=1,
|
||||
maximum=10,
|
||||
default=1
|
||||
)
|
||||
])
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@ from openpype.pipeline import (
|
|||
CreatedInstance
|
||||
)
|
||||
from openpype.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef
|
||||
BoolDef
|
||||
)
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
||||
|
|
@ -46,33 +43,6 @@ class CreateWriteRender(napi.NukeWriteCreator):
|
|||
self._get_render_target_enum(),
|
||||
self._get_reviewable_bool()
|
||||
]
|
||||
if "farm_rendering" in self.instance_attributes:
|
||||
attr_defs.extend([
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Farm rendering attributes"),
|
||||
BoolDef("suspended_publish", label="Suspended publishing"),
|
||||
NumberDef(
|
||||
"farm_priority",
|
||||
label="Priority",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=50
|
||||
),
|
||||
NumberDef(
|
||||
"farm_chunk",
|
||||
label="Chunk size",
|
||||
minimum=1,
|
||||
maximum=99,
|
||||
default=10
|
||||
),
|
||||
NumberDef(
|
||||
"farm_concurency",
|
||||
label="Concurent tasks",
|
||||
minimum=1,
|
||||
maximum=10,
|
||||
default=1
|
||||
)
|
||||
])
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, subset_name, instance_data):
|
||||
|
|
|
|||
|
|
@ -3,12 +3,14 @@ from pprint import pformat
|
|||
import nuke
|
||||
import pyblish.api
|
||||
from openpype.hosts.nuke import api as napi
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin):
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Collect all write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.48
|
||||
order = pyblish.api.CollectorOrder + 0.0021
|
||||
label = "Collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render", "prerender", "image"]
|
||||
|
|
@ -66,6 +68,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
if render_target == "frames":
|
||||
|
|
@ -128,25 +133,30 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
representation['files'] = collected_frames
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Publishing rendered frames ...")
|
||||
|
||||
elif render_target == "farm":
|
||||
farm_priority = creator_attributes.get("farm_priority")
|
||||
farm_chunk = creator_attributes.get("farm_chunk")
|
||||
farm_concurency = creator_attributes.get("farm_concurency")
|
||||
instance.data.update({
|
||||
"deadlineChunkSize": farm_chunk or 1,
|
||||
"deadlinePriority": farm_priority or 50,
|
||||
"deadlineConcurrentTasks": farm_concurency or 0
|
||||
})
|
||||
farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"]
|
||||
for key in farm_keys:
|
||||
# Skip if key is not in creator attributes
|
||||
if key not in creator_attributes:
|
||||
continue
|
||||
# Add farm attributes to instance
|
||||
instance.data[key] = creator_attributes[key]
|
||||
|
||||
# Farm rendering
|
||||
instance.data["transfer"] = False
|
||||
instance.data["farm"] = True
|
||||
self.log.info("Farm rendering ON ...")
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
# TODO: remove this when we have proper colorspace support
|
||||
version_data = {
|
||||
"colorspace": colorspace
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,12 +4,13 @@ import shutil
|
|||
import pyblish.api
|
||||
import clique
|
||||
import nuke
|
||||
|
||||
from openpype.hosts.nuke import api as napi
|
||||
from openpype.pipeline import publish
|
||||
from openpype.lib import collect_frames
|
||||
|
||||
|
||||
class NukeRenderLocal(publish.ExtractorColormanaged):
|
||||
class NukeRenderLocal(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Render the current Nuke composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
|
|
@ -85,7 +86,7 @@ class NukeRenderLocal(publish.ExtractorColormanaged):
|
|||
)
|
||||
|
||||
ext = node["file_type"].value()
|
||||
colorspace = node["colorspace"].value()
|
||||
colorspace = napi.get_colorspace_from_node(node)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -10,10 +10,20 @@ from wsrpc_aiohttp import (
|
|||
|
||||
from qtpy import QtCore
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import Logger, StringTemplate
|
||||
from openpype.pipeline import (
|
||||
registered_host,
|
||||
Anatomy,
|
||||
)
|
||||
from openpype.pipeline.workfile import (
|
||||
get_workfile_template_key_from_context,
|
||||
get_last_workfile,
|
||||
)
|
||||
from openpype.pipeline.template_data import get_template_data_with_names
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.tools.adobe_webserver.app import WebServerTool
|
||||
from openpype.pipeline.context_tools import change_current_context
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
from .ws_stub import PhotoshopServerStub
|
||||
|
||||
|
|
@ -310,23 +320,28 @@ class PhotoshopRoute(WebSocketRoute):
|
|||
# client functions
|
||||
async def set_context(self, project, asset, task):
|
||||
"""
|
||||
Sets 'project' and 'asset' to envs, eg. setting context
|
||||
Sets 'project' and 'asset' to envs, eg. setting context.
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
Opens last workile from that context if exists.
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
task (str
|
||||
"""
|
||||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
if project:
|
||||
legacy_io.Session["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
if asset:
|
||||
legacy_io.Session["AVALON_ASSET"] = asset
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
if task:
|
||||
legacy_io.Session["AVALON_TASK"] = task
|
||||
os.environ["AVALON_TASK"] = task
|
||||
log.info(f"project {project} asset {asset} task {task}")
|
||||
|
||||
asset_doc = get_asset_by_name(project, asset)
|
||||
change_current_context(asset_doc, task)
|
||||
|
||||
last_workfile_path = self._get_last_workfile_path(project,
|
||||
asset,
|
||||
task)
|
||||
if last_workfile_path and os.path.exists(last_workfile_path):
|
||||
ProcessLauncher.execute_in_main_thread(
|
||||
lambda: stub().open(last_workfile_path))
|
||||
|
||||
|
||||
async def read(self):
|
||||
log.debug("photoshop.read client calls server server calls "
|
||||
|
|
@ -356,3 +371,35 @@ class PhotoshopRoute(WebSocketRoute):
|
|||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def _get_last_workfile_path(self, project_name, asset_name, task_name):
|
||||
"""Returns last workfile path if exists"""
|
||||
host = registered_host()
|
||||
host_name = "photoshop"
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
asset_name,
|
||||
task_name,
|
||||
host_name,
|
||||
project_name=project_name
|
||||
)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = get_template_data_with_names(
|
||||
project_name, asset_name, task_name, host_name
|
||||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
file_template = anatomy.templates[template_key]["file"]
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
|
||||
folder_template = anatomy.templates[template_key]["folder"]
|
||||
work_root = StringTemplate.format_strict_template(
|
||||
folder_template, data
|
||||
)
|
||||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
|
||||
return last_workfile_path
|
||||
|
|
|
|||
|
|
@ -9,11 +9,19 @@ import pyblish.api
|
|||
|
||||
import nuke
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import (
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.lib import is_running_from_build
|
||||
from openpype.lib import (
|
||||
is_running_from_build,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
|
||||
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Submit write to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
|
|
@ -21,10 +29,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
label = "Submit Nuke to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke", "nukestudio"]
|
||||
families = ["render.farm", "prerender.farm"]
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender.farm"]
|
||||
optional = True
|
||||
targets = ["local"]
|
||||
|
||||
|
|
@ -39,7 +47,42 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
env_allowed_keys = []
|
||||
env_search_replace_values = {}
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"use_gpu",
|
||||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
families = instance.data["families"]
|
||||
|
||||
|
|
@ -141,7 +184,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
responce_data=None
|
||||
response_data=None
|
||||
):
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
batch_name = os.path.basename(script_path)
|
||||
|
|
@ -152,8 +195,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not responce_data:
|
||||
responce_data = {}
|
||||
if not response_data:
|
||||
response_data = {}
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
|
|
@ -161,20 +204,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.data["deadlineChunkSize"]
|
||||
if chunk_size == 0 and self.chunk_size:
|
||||
chunk_size = self.chunk_size
|
||||
|
||||
# define chunk and priority
|
||||
concurrent_tasks = instance.data["deadlineConcurrentTasks"]
|
||||
if concurrent_tasks == 0 and self.concurrent_tasks:
|
||||
concurrent_tasks = self.concurrent_tasks
|
||||
|
||||
priority = instance.data["deadlinePriority"]
|
||||
if not priority:
|
||||
priority = self.priority
|
||||
|
||||
# resolve any limit groups
|
||||
limit_groups = self.get_limit_groups()
|
||||
self.log.info("Limit groups: `{}`".format(limit_groups))
|
||||
|
|
@ -193,9 +222,14 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Priority": priority,
|
||||
"ChunkSize": chunk_size,
|
||||
"ConcurrentTasks": concurrent_tasks,
|
||||
"Priority": instance.data["attributeValues"].get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": instance.data["attributeValues"].get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": instance.data["attributeValues"].get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
"Department": self.department,
|
||||
|
||||
|
|
@ -234,7 +268,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AWSAssetFile0": render_path,
|
||||
|
||||
# using GPU by default
|
||||
"UseGpu": self.use_gpu,
|
||||
"UseGpu": instance.data["attributeValues"].get(
|
||||
"use_gpu", self.use_gpu),
|
||||
|
||||
# Only the specific write node is rendered.
|
||||
"WriteNode": exe_node_name
|
||||
|
|
@ -244,11 +279,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
if responce_data.get("_id"):
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"BatchName": responce_data["Props"]["Batch"],
|
||||
"JobDependency0": responce_data["_id"],
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -23,36 +23,37 @@ class ShowInKitsu(LauncherAction):
|
|||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
|
||||
# Context inputs
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session.get("AVALON_ASSET", None)
|
||||
task_name = session.get("AVALON_TASK", None)
|
||||
|
||||
project = get_project(project_name=project_name,
|
||||
fields=["data.zou_id"])
|
||||
project = get_project(
|
||||
project_name=project_name, fields=["data.zou_id"]
|
||||
)
|
||||
if not project:
|
||||
raise RuntimeError(f"Project {project_name} not found.")
|
||||
raise RuntimeError("Project {} not found.".format(project_name))
|
||||
|
||||
project_zou_id = project["data"].get("zou_id")
|
||||
if not project_zou_id:
|
||||
raise RuntimeError(f"Project {project_name} has no "
|
||||
f"connected kitsu id.")
|
||||
raise RuntimeError(
|
||||
"Project {} has no connected kitsu id.".format(project_name)
|
||||
)
|
||||
|
||||
asset_zou_name = None
|
||||
asset_zou_id = None
|
||||
asset_zou_type = 'Assets'
|
||||
asset_zou_type = "Assets"
|
||||
task_zou_id = None
|
||||
zou_sub_type = ['AssetType', 'Sequence']
|
||||
zou_sub_type = ["AssetType", "Sequence"]
|
||||
if asset_name:
|
||||
asset_zou_name = asset_name
|
||||
asset_fields = ["data.zou.id", "data.zou.type"]
|
||||
if task_name:
|
||||
asset_fields.append(f"data.tasks.{task_name}.zou.id")
|
||||
asset_fields.append("data.tasks.{}.zou.id".format(task_name))
|
||||
|
||||
asset = get_asset_by_name(project_name,
|
||||
asset_name=asset_name,
|
||||
fields=asset_fields)
|
||||
asset = get_asset_by_name(
|
||||
project_name, asset_name=asset_name, fields=asset_fields
|
||||
)
|
||||
|
||||
asset_zou_data = asset["data"].get("zou")
|
||||
|
||||
|
|
@ -67,40 +68,47 @@ class ShowInKitsu(LauncherAction):
|
|||
task_data = asset["data"]["tasks"][task_name]
|
||||
task_zou_data = task_data.get("zou", {})
|
||||
if not task_zou_data:
|
||||
self.log.debug(f"No zou task data for task: {task_name}")
|
||||
self.log.debug(
|
||||
"No zou task data for task: {}".format(task_name)
|
||||
)
|
||||
task_zou_id = task_zou_data["id"]
|
||||
|
||||
# Define URL
|
||||
url = self.get_url(project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id)
|
||||
url = self.get_url(
|
||||
project_id=project_zou_id,
|
||||
asset_name=asset_zou_name,
|
||||
asset_id=asset_zou_id,
|
||||
asset_type=asset_zou_type,
|
||||
task_id=task_zou_id,
|
||||
)
|
||||
|
||||
# Open URL in webbrowser
|
||||
self.log.info(f"Opening URL: {url}")
|
||||
webbrowser.open(url,
|
||||
# Try in new tab
|
||||
new=2)
|
||||
self.log.info("Opening URL: {}".format(url))
|
||||
webbrowser.open(
|
||||
url,
|
||||
# Try in new tab
|
||||
new=2,
|
||||
)
|
||||
|
||||
def get_url(self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None):
|
||||
|
||||
shots_url = {'Shots', 'Sequence', 'Shot'}
|
||||
sub_type = {'AssetType', 'Sequence'}
|
||||
def get_url(
|
||||
self,
|
||||
project_id,
|
||||
asset_name=None,
|
||||
asset_id=None,
|
||||
asset_type=None,
|
||||
task_id=None,
|
||||
):
|
||||
shots_url = {"Shots", "Sequence", "Shot"}
|
||||
sub_type = {"AssetType", "Sequence"}
|
||||
kitsu_module = self.get_kitsu_module()
|
||||
|
||||
# Get kitsu url with /api stripped
|
||||
kitsu_url = kitsu_module.server_url
|
||||
if kitsu_url.endswith("/api"):
|
||||
kitsu_url = kitsu_url[:-len("/api")]
|
||||
kitsu_url = kitsu_url[: -len("/api")]
|
||||
|
||||
sub_url = f"/productions/{project_id}"
|
||||
asset_type_url = "Shots" if asset_type in shots_url else "Assets"
|
||||
asset_type_url = "shots" if asset_type in shots_url else "assets"
|
||||
|
||||
if task_id:
|
||||
# Go to task page
|
||||
|
|
@ -120,6 +128,6 @@ class ShowInKitsu(LauncherAction):
|
|||
# Add search method if is a sub_type
|
||||
sub_url += f"/{asset_type_url}"
|
||||
if asset_type in sub_type:
|
||||
sub_url += f'?search={asset_name}'
|
||||
sub_url += f"?search={asset_name}"
|
||||
|
||||
return f"{kitsu_url}{sub_url}"
|
||||
|
|
|
|||
|
|
@ -13,6 +13,5 @@ class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in
|
|||
# families = ["kitsu"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
gazu.client.set_host(os.environ["KITSU_SERVER"])
|
||||
gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"])
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -12,62 +10,69 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
label = "Kitsu entities"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = context.data["assetEntity"]["data"]
|
||||
zou_asset_data = asset_data.get("zou")
|
||||
if not zou_asset_data:
|
||||
raise AssertionError("Zou asset data not found in OpenPype!")
|
||||
self.log.debug("Collected zou asset data: {}".format(zou_asset_data))
|
||||
|
||||
zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get(
|
||||
"zou"
|
||||
kitsu_project = gazu.project.get_project_by_name(
|
||||
context.data["projectName"]
|
||||
)
|
||||
if not zou_task_data:
|
||||
self.log.warning("Zou task data not found in OpenPype!")
|
||||
self.log.debug("Collected zou task data: {}".format(zou_task_data))
|
||||
|
||||
kitsu_project = gazu.project.get_project(zou_asset_data["project_id"])
|
||||
if not kitsu_project:
|
||||
raise AssertionError("Project not found in kitsu!")
|
||||
raise ValueError("Project not found in kitsu!")
|
||||
|
||||
context.data["kitsu_project"] = kitsu_project
|
||||
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
|
||||
|
||||
entity_type = zou_asset_data["type"]
|
||||
if entity_type == "Shot":
|
||||
kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"])
|
||||
else:
|
||||
kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
kitsu_entities_by_id = {}
|
||||
for instance in context:
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
continue
|
||||
|
||||
if not kitsu_entity:
|
||||
raise AssertionError("{} not found in kitsu!".format(entity_type))
|
||||
zou_asset_data = asset_doc["data"].get("zou")
|
||||
if not zou_asset_data:
|
||||
raise ValueError("Zou asset data not found in OpenPype!")
|
||||
|
||||
context.data["kitsu_entity"] = kitsu_entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(entity_type, kitsu_entity)
|
||||
)
|
||||
task_name = instance.data.get("task")
|
||||
if not task_name:
|
||||
continue
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(
|
||||
os.environ["AVALON_TASK"]
|
||||
zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou")
|
||||
self.log.debug(
|
||||
"Collected zou task data: {}".format(zou_task_data)
|
||||
)
|
||||
if not kitsu_task_type:
|
||||
raise AssertionError(
|
||||
"Task type {} not found in Kitsu!".format(
|
||||
os.environ["AVALON_TASK"]
|
||||
|
||||
entity_id = zou_asset_data["id"]
|
||||
entity = kitsu_entities_by_id.get(entity_id)
|
||||
if not entity:
|
||||
entity = gazu.entity.get_entity(entity_id)
|
||||
if not entity:
|
||||
raise ValueError(
|
||||
"{} was not found in kitsu!".format(
|
||||
zou_asset_data["name"]
|
||||
)
|
||||
)
|
||||
|
||||
kitsu_entities_by_id[entity_id] = entity
|
||||
instance.data["entity"] = entity
|
||||
self.log.debug(
|
||||
"Collect kitsu {}: {}".format(zou_asset_data["type"], entity)
|
||||
)
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task_id = zou_task_data["id"]
|
||||
kitsu_task = kitsu_entities_by_id.get(kitsu_task_id)
|
||||
if not kitsu_task:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
kitsu_entities_by_id[kitsu_task_id] = kitsu_task
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(task_name)
|
||||
if not kitsu_task_type:
|
||||
raise ValueError(
|
||||
"Task type {} not found in Kitsu!".format(task_name)
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
entity, kitsu_task_type
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
kitsu_entity, kitsu_task_type
|
||||
)
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
raise ValueError("Task not found in kitsu!")
|
||||
instance.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
|
|
|||
|
|
@ -8,12 +8,11 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Kitsu Note and Status"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
set_status_note = False
|
||||
note_status_shortname = "wfa"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Get comment text body
|
||||
publish_comment = context.data.get("comment")
|
||||
if not publish_comment:
|
||||
|
|
@ -21,30 +20,33 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = context.data["kitsu_task"]["task_status_id"]
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
for instance in context:
|
||||
kitsu_task = instance.data.get("kitsu_task")
|
||||
if kitsu_task is None:
|
||||
continue
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = kitsu_task["task_status"]["id"]
|
||||
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
)
|
||||
|
||||
# Add comment to kitsu task
|
||||
self.log.debug(
|
||||
"Add new note in taks id {}".format(
|
||||
context.data["kitsu_task"]["id"]
|
||||
# Add comment to kitsu task
|
||||
task_id = kitsu_task["id"]
|
||||
self.log.debug("Add new note in taks id {}".format(task_id))
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
task_id, note_status, comment=publish_comment
|
||||
)
|
||||
)
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
context.data["kitsu_task"], note_status, comment=publish_comment
|
||||
)
|
||||
|
||||
context.data["kitsu_comment"] = kitsu_comment
|
||||
instance.data["kitsu_comment"] = kitsu_comment
|
||||
|
|
|
|||
|
|
@ -8,14 +8,12 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
label = "Kitsu Review"
|
||||
# families = ["kitsu"]
|
||||
families = ["render", "kitsu"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
task = context.data["kitsu_task"]
|
||||
comment = context.data.get("kitsu_comment")
|
||||
task = instance.data["kitsu_task"]["id"]
|
||||
comment = instance.data["kitsu_comment"]["id"]
|
||||
|
||||
# Check comment has been created
|
||||
if not comment:
|
||||
|
|
@ -27,9 +25,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
# Add review representations as preview of comment
|
||||
for representation in instance.data.get("representations", []):
|
||||
# Skip if not tagged as review
|
||||
if "review" not in representation.get("tags", []):
|
||||
if "kitsureview" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
review_path = representation.get("published_path")
|
||||
self.log.debug("Found review at: {}".format(review_path))
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ def validate_host(kitsu_url: str) -> bool:
|
|||
if gazu.client.host_is_valid():
|
||||
return True
|
||||
else:
|
||||
raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.")
|
||||
raise gazu.exception.HostException(
|
||||
"Host '{}' is invalid.".format(kitsu_url))
|
||||
|
||||
|
||||
def clear_credentials():
|
||||
|
|
|
|||
|
|
@ -1,3 +1,15 @@
|
|||
"""
|
||||
Bugs:
|
||||
* Error when adding task type to anything that isn't Shot or Assets
|
||||
* Assets don't get added under an episode if TV show
|
||||
* Assets added under Main Pack throws error. No Main Pack name in dict
|
||||
|
||||
Features ToDo:
|
||||
* Select in settings what types you wish to sync
|
||||
* Print what's updated on entity-update
|
||||
* Add listener for Edits
|
||||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
|
||||
|
|
@ -5,6 +17,7 @@ import gazu
|
|||
|
||||
from openpype.client import get_project, get_assets, get_asset_by_name
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.lib import Logger
|
||||
from .credentials import validate_credentials
|
||||
from .update_op_with_zou import (
|
||||
create_op_asset,
|
||||
|
|
@ -14,6 +27,8 @@ from .update_op_with_zou import (
|
|||
update_op_assets,
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class Listener:
|
||||
"""Host Kitsu listener."""
|
||||
|
|
@ -38,7 +53,7 @@ class Listener:
|
|||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
'Kitsu authentication failed for login: "{}"...'.format(login)
|
||||
)
|
||||
|
||||
gazu.set_event_host(
|
||||
|
|
@ -86,7 +101,9 @@ class Listener:
|
|||
self.event_client, "sequence:delete", self._delete_sequence
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "shot:new", self._new_shot)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:new", self._new_shot
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:update", self._update_shot
|
||||
)
|
||||
|
|
@ -94,7 +111,9 @@ class Listener:
|
|||
self.event_client, "shot:delete", self._delete_shot
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "task:new", self._new_task)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:new", self._new_task
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:update", self._update_task
|
||||
)
|
||||
|
|
@ -103,44 +122,62 @@ class Listener:
|
|||
)
|
||||
|
||||
def start(self):
|
||||
"""Start listening for events."""
|
||||
log.info("Listening to Kitsu events...")
|
||||
gazu.events.run_client(self.event_client)
|
||||
|
||||
def get_ep_dict(self, ep_id):
|
||||
if ep_id and ep_id != "":
|
||||
return gazu.entity.get_entity(ep_id)
|
||||
return
|
||||
|
||||
# == Project ==
|
||||
def _new_project(self, data):
|
||||
"""Create new project into OP DB."""
|
||||
|
||||
# Use update process to avoid duplicating code
|
||||
self._update_project(data)
|
||||
self._update_project(data, new_project=True)
|
||||
|
||||
def _update_project(self, data):
|
||||
def _update_project(self, data, new_project=False):
|
||||
"""Update project into OP DB."""
|
||||
# Get project entity
|
||||
project = gazu.project.get_project(data["project_id"])
|
||||
project_name = project["name"]
|
||||
|
||||
update_project = write_project_to_op(project, self.dbcon)
|
||||
|
||||
# Write into DB
|
||||
if update_project:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(
|
||||
data["project_id"]
|
||||
)
|
||||
self.dbcon.bulk_write([update_project])
|
||||
|
||||
if new_project:
|
||||
log.info("Project created: {}".format(project["name"]))
|
||||
|
||||
def _delete_project(self, data):
|
||||
"""Delete project."""
|
||||
|
||||
project_name = get_kitsu_project_name(data["project_id"])
|
||||
collections = self.dbcon.database.list_collection_names()
|
||||
for collection in collections:
|
||||
project = self.dbcon.database[collection].find_one(
|
||||
{"data.zou_id": data["project_id"]}
|
||||
)
|
||||
if project:
|
||||
# Delete project collection
|
||||
self.dbcon.database[project["name"]].drop()
|
||||
|
||||
# Delete project collection
|
||||
self.dbcon.database[project_name].drop()
|
||||
# Print message
|
||||
log.info("Project deleted: {}".format(project["name"]))
|
||||
return
|
||||
|
||||
# == Asset ==
|
||||
|
||||
def _new_asset(self, data):
|
||||
"""Create new asset into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
# Get asset entity
|
||||
asset = gazu.asset.get_asset(data["asset_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
|
|
@ -149,6 +186,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_asset(data)
|
||||
|
||||
# Print message
|
||||
ep_id = asset.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset created: {proj_name} - {ep_name}"
|
||||
"{asset_type_name} - {asset_name}".format(
|
||||
proj_name=asset["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
asset_type_name=asset["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_asset(self, data):
|
||||
"""Update asset into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -166,10 +218,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(asset["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[asset],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -179,10 +236,27 @@ class Listener:
|
|||
"""Delete asset of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
asset = self.dbcon.find_one({"data.zou.id": data["asset_id"]})
|
||||
if asset:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = asset["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Asset deleted: {proj_name} - {ep_name}"
|
||||
"{type_name} - {asset_name}".format(
|
||||
proj_name=asset["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
type_name=asset["data"]["zou"]["asset_type_name"],
|
||||
asset_name=asset["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Episode ==
|
||||
def _new_episode(self, data):
|
||||
|
|
@ -191,14 +265,20 @@ class Listener:
|
|||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(episode))
|
||||
self.dbcon.insert_one(create_op_asset(ep))
|
||||
|
||||
# Update
|
||||
self._update_episode(data)
|
||||
|
||||
# Print message
|
||||
msg = "Episode created: {proj_name} - {ep_name}".format(
|
||||
proj_name=ep["project_name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_episode(self, data):
|
||||
"""Update episode into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -206,7 +286,7 @@ class Listener:
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
ep = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
|
|
@ -215,11 +295,16 @@ class Listener:
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
|
||||
zou_ids_and_asset_docs[ep["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(ep["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[ep],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -228,12 +313,23 @@ class Listener:
|
|||
def _delete_episode(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete episode") # TODO check bugfix
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
ep = self.dbcon.find_one({"data.zou.id": data["episode_id"]})
|
||||
if ep:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
project = gazu.project.get_project(
|
||||
ep["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = "Episode deleted: {proj_name} - {ep_name}".format(
|
||||
proj_name=project["name"], ep_name=ep["name"]
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Sequence ==
|
||||
def _new_sequence(self, data):
|
||||
|
|
@ -250,6 +346,20 @@ class Listener:
|
|||
# Update
|
||||
self._update_sequence(data)
|
||||
|
||||
# Print message
|
||||
ep_id = sequence.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Sequence created: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=sequence["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_sequence(self, data):
|
||||
"""Update sequence into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -267,10 +377,15 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(sequence["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[sequence],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
|
|
@ -279,12 +394,30 @@ class Listener:
|
|||
def _delete_sequence(self, data):
|
||||
"""Delete sequence of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete sequence") # TODO check bugfix
|
||||
sequence = self.dbcon.find_one({"data.zou.id": data["sequence_id"]})
|
||||
if sequence:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
# Print message
|
||||
ep_id = sequence["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
gazu_project = gazu.project.get_project(
|
||||
sequence["data"]["zou"]["project_id"]
|
||||
)
|
||||
|
||||
msg = (
|
||||
"Sequence deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name}".format(
|
||||
proj_name=gazu_project["name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=sequence["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Shot ==
|
||||
def _new_shot(self, data):
|
||||
|
|
@ -301,6 +434,21 @@ class Listener:
|
|||
# Update
|
||||
self._update_shot(data)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["episode_id"]
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot created: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_shot(self, data):
|
||||
"""Update shot into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
|
@ -318,11 +466,17 @@ class Listener:
|
|||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
|
||||
gazu_project = gazu.project.get_project(shot["project_id"])
|
||||
|
||||
# Update
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
|
||||
self.dbcon,
|
||||
gazu_project,
|
||||
project_doc,
|
||||
[shot],
|
||||
zou_ids_and_asset_docs,
|
||||
)
|
||||
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
|
@ -330,11 +484,28 @@ class Listener:
|
|||
def _delete_shot(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
shot = self.dbcon.find_one({"data.zou.id": data["shot_id"]})
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
if shot:
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
|
||||
# Print message
|
||||
ep_id = shot["data"]["zou"].get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
msg = (
|
||||
"Shot deleted: {proj_name} - {ep_name}"
|
||||
"{sequence_name} - {shot_name}".format(
|
||||
proj_name=shot["data"]["zou"]["project_name"],
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=shot["data"]["zou"]["sequence_name"],
|
||||
shot_name=shot["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
# == Task ==
|
||||
def _new_task(self, data):
|
||||
|
|
@ -346,23 +517,59 @@ class Listener:
|
|||
# Get gazu entity
|
||||
task = gazu.task.get_task(data["task_id"])
|
||||
|
||||
# Find asset doc
|
||||
parent_name = task["entity"]["name"]
|
||||
# Print message
|
||||
ep_id = task.get("episode_id")
|
||||
ep = self.get_ep_dict(ep_id)
|
||||
|
||||
asset_doc = get_asset_by_name(project_name, parent_name)
|
||||
parent_name = None
|
||||
asset_name = None
|
||||
ent_type = None
|
||||
|
||||
if task["task_type"]["for_entity"] == "Asset":
|
||||
parent_name = task["entity"]["name"]
|
||||
asset_name = task["entity"]["name"]
|
||||
ent_type = task["entity_type"]["name"]
|
||||
elif task["task_type"]["for_entity"] == "Shot":
|
||||
parent_name = "{ep_name}{sequence_name} - {shot_name}".format(
|
||||
ep_name=ep["name"] + " - " if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
asset_name = "{ep_name}{sequence_name}_{shot_name}".format(
|
||||
ep_name=ep["name"] + "_" if ep is not None else "",
|
||||
sequence_name=task["sequence"]["name"],
|
||||
shot_name=task["entity"]["name"],
|
||||
)
|
||||
|
||||
# Update asset tasks with new one
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {"type": task_type_name, "zou": task}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}
|
||||
)
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
if asset_doc:
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {
|
||||
"type": task_type_name,
|
||||
"zou": task,
|
||||
}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
msg = (
|
||||
"Task created: {proj} - {ent_type}{parent}"
|
||||
" - {task}".format(
|
||||
proj=task["project"]["name"],
|
||||
ent_type=ent_type + " - " if ent_type is not None else "",
|
||||
parent=parent_name,
|
||||
task=task["task_type"]["name"],
|
||||
)
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
def _update_task(self, data):
|
||||
"""Update task into OP DB."""
|
||||
# TODO is it necessary?
|
||||
pass
|
||||
|
||||
def _delete_task(self, data):
|
||||
"""Delete task of OP DB."""
|
||||
|
|
@ -384,6 +591,31 @@ class Listener:
|
|||
{"_id": doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
|
||||
# Print message
|
||||
entity = gazu.entity.get_entity(task["zou"]["entity_id"])
|
||||
ep = self.get_ep_dict(entity["source_id"])
|
||||
|
||||
if entity["type"] == "Asset":
|
||||
parent_name = "{ep}{entity_type} - {entity}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
entity_type=task["zou"]["entity_type"]["name"],
|
||||
entity=task["zou"]["entity"]["name"],
|
||||
)
|
||||
elif entity["type"] == "Shot":
|
||||
parent_name = "{ep}{sequence} - {shot}".format(
|
||||
ep=ep["name"] + " - " if ep is not None else "",
|
||||
sequence=task["zou"]["sequence"]["name"],
|
||||
shot=task["zou"]["entity"]["name"],
|
||||
)
|
||||
|
||||
msg = "Task deleted: {proj} - {parent} - {task}".format(
|
||||
proj=task["zou"]["project"]["name"],
|
||||
parent=parent_name,
|
||||
task=name,
|
||||
)
|
||||
log.info(msg)
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
|
@ -394,9 +626,10 @@ def start_listeners(login: str, password: str):
|
|||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
|
||||
# Refresh token every week
|
||||
def refresh_token_every_week():
|
||||
print("Refreshing token...")
|
||||
log.info("Refreshing token...")
|
||||
gazu.refresh_token()
|
||||
threading.Timer(7 * 3600 * 24, refresh_token_every_week).start()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,10 +5,6 @@ from typing import Dict, List
|
|||
|
||||
from pymongo import DeleteOne, UpdateOne
|
||||
import gazu
|
||||
from gazu.task import (
|
||||
all_tasks_for_asset,
|
||||
all_tasks_for_shot,
|
||||
)
|
||||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
|
|
@ -18,7 +14,6 @@ from openpype.client import (
|
|||
create_project,
|
||||
)
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
from openpype.lib import Logger
|
||||
|
|
@ -69,6 +64,7 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str):
|
|||
|
||||
def update_op_assets(
|
||||
dbcon: AvalonMongoDB,
|
||||
gazu_project: dict,
|
||||
project_doc: dict,
|
||||
entities_list: List[dict],
|
||||
asset_doc_ids: Dict[str, dict],
|
||||
|
|
@ -78,14 +74,18 @@ def update_op_assets(
|
|||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to DB
|
||||
gazu_project (dict): Dict of gazu,
|
||||
project_doc (dict): Dict of project,
|
||||
entities_list (List[dict]): List of zou entities to update
|
||||
asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...]
|
||||
|
||||
Returns:
|
||||
List[Dict[str, dict]]: List of (doc_id, update_dict) tuples
|
||||
"""
|
||||
if not project_doc:
|
||||
return
|
||||
|
||||
project_name = project_doc["name"]
|
||||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
|
||||
assets_with_update = []
|
||||
for item in entities_list:
|
||||
|
|
@ -94,7 +94,8 @@ def update_op_assets(
|
|||
if not item_doc: # Create asset
|
||||
op_asset = create_op_asset(item)
|
||||
insert_result = dbcon.insert_one(op_asset)
|
||||
item_doc = get_asset_by_id(project_name, insert_result.inserted_id)
|
||||
item_doc = get_asset_by_id(
|
||||
project_name, insert_result.inserted_id)
|
||||
|
||||
# Update asset
|
||||
item_data = deepcopy(item_doc["data"])
|
||||
|
|
@ -113,38 +114,73 @@ def update_op_assets(
|
|||
except (TypeError, ValueError):
|
||||
frame_in = 1001
|
||||
item_data["frameStart"] = frame_in
|
||||
# Frames duration, fallback on 0
|
||||
# Frames duration, fallback on 1
|
||||
try:
|
||||
# NOTE nb_frames is stored directly in item
|
||||
# because of zou's legacy design
|
||||
frames_duration = int(item.get("nb_frames", 0))
|
||||
frames_duration = int(item.get("nb_frames", 1))
|
||||
except (TypeError, ValueError):
|
||||
frames_duration = 0
|
||||
frames_duration = None
|
||||
# Frame out, fallback on frame_in + duration or project's value or 1001
|
||||
frame_out = item_data.pop("frame_out", None)
|
||||
if not frame_out:
|
||||
frame_out = frame_in + frames_duration
|
||||
try:
|
||||
frame_out = int(frame_out)
|
||||
except (TypeError, ValueError):
|
||||
frame_out = 1001
|
||||
if frames_duration:
|
||||
frame_out = frame_in + frames_duration - 1
|
||||
else:
|
||||
frame_out = project_doc["data"].get("frameEnd", frame_in)
|
||||
item_data["frameEnd"] = frame_out
|
||||
# Fps, fallback to project's value or default value (25.0)
|
||||
try:
|
||||
fps = float(item_data.get("fps", project_doc["data"].get("fps")))
|
||||
fps = float(item_data.get("fps"))
|
||||
except (TypeError, ValueError):
|
||||
fps = 25.0
|
||||
fps = float(gazu_project.get(
|
||||
"fps", project_doc["data"].get("fps", 25)))
|
||||
item_data["fps"] = fps
|
||||
# Resolution, fall back to project default
|
||||
match_res = re.match(
|
||||
r"(\d+)x(\d+)",
|
||||
item_data.get("resolution", gazu_project.get("resolution"))
|
||||
)
|
||||
if match_res:
|
||||
item_data["resolutionWidth"] = int(match_res.group(1))
|
||||
item_data["resolutionHeight"] = int(match_res.group(2))
|
||||
else:
|
||||
item_data["resolutionWidth"] = project_doc["data"].get(
|
||||
"resolutionWidth")
|
||||
item_data["resolutionHeight"] = project_doc["data"].get(
|
||||
"resolutionHeight")
|
||||
# Properties that doesn't fully exist in Kitsu.
|
||||
# Guessing those property names below:
|
||||
# Pixel Aspect Ratio
|
||||
item_data["pixelAspect"] = item_data.get(
|
||||
"pixel_aspect", project_doc["data"].get("pixelAspect"))
|
||||
# Handle Start
|
||||
item_data["handleStart"] = item_data.get(
|
||||
"handle_start", project_doc["data"].get("handleStart"))
|
||||
# Handle End
|
||||
item_data["handleEnd"] = item_data.get(
|
||||
"handle_end", project_doc["data"].get("handleEnd"))
|
||||
# Clip In
|
||||
item_data["clipIn"] = item_data.get(
|
||||
"clip_in", project_doc["data"].get("clipIn"))
|
||||
# Clip Out
|
||||
item_data["clipOut"] = item_data.get(
|
||||
"clip_out", project_doc["data"].get("clipOut"))
|
||||
|
||||
# Tasks
|
||||
tasks_list = []
|
||||
item_type = item["type"]
|
||||
if item_type == "Asset":
|
||||
tasks_list = all_tasks_for_asset(item)
|
||||
tasks_list = gazu.task.all_tasks_for_asset(item)
|
||||
elif item_type == "Shot":
|
||||
tasks_list = all_tasks_for_shot(item)
|
||||
tasks_list = gazu.task.all_tasks_for_shot(item)
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {"type": t["task_type_name"], "zou": t}
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {
|
||||
"type": t["task_type_name"],
|
||||
"zou": gazu.task.get_task(t["id"]),
|
||||
}
|
||||
}
|
||||
for t in tasks_list
|
||||
}
|
||||
|
||||
|
|
@ -176,9 +212,14 @@ def update_op_assets(
|
|||
entity_root_asset_name = "Shots"
|
||||
|
||||
# Root parent folder if exist
|
||||
visual_parent_doc_id = (
|
||||
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
|
||||
)
|
||||
visual_parent_doc_id = None
|
||||
if parent_zou_id is not None:
|
||||
parent_zou_id_dict = asset_doc_ids.get(parent_zou_id)
|
||||
if parent_zou_id_dict is not None:
|
||||
visual_parent_doc_id = (
|
||||
parent_zou_id_dict.get("_id")
|
||||
if parent_zou_id_dict else None)
|
||||
|
||||
if visual_parent_doc_id is None:
|
||||
# Find root folder doc ("Assets" or "Shots")
|
||||
root_folder_doc = get_asset_by_name(
|
||||
|
|
@ -197,12 +238,15 @@ def update_op_assets(
|
|||
item_data["parents"] = []
|
||||
ancestor_id = parent_zou_id
|
||||
while ancestor_id is not None:
|
||||
parent_doc = asset_doc_ids[ancestor_id]
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
parent_doc = asset_doc_ids.get(ancestor_id)
|
||||
if parent_doc is not None:
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
ancestor_id = parent_entity.get("parent_id")
|
||||
else:
|
||||
ancestor_id = None
|
||||
|
||||
# Build OpenPype compatible name
|
||||
if item_type in ["Shot", "Sequence"] and parent_zou_id is not None:
|
||||
|
|
@ -250,13 +294,12 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
UpdateOne: Update instance for the project
|
||||
"""
|
||||
project_name = project["name"]
|
||||
project_doc = get_project(project_name)
|
||||
if not project_doc:
|
||||
log.info(f"Creating project '{project_name}'")
|
||||
project_doc = create_project(project_name, project_name)
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
project_dict = create_project(project_name, project_name)
|
||||
|
||||
# Project data and tasks
|
||||
project_data = project_doc["data"] or {}
|
||||
project_data = project_dict["data"] or {}
|
||||
|
||||
# Build project code and update Kitsu
|
||||
project_code = project.get("code")
|
||||
|
|
@ -287,7 +330,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": project_doc["_id"]},
|
||||
{"_id": project_dict["_id"]},
|
||||
{
|
||||
"$set": {
|
||||
"config.tasks": {
|
||||
|
|
@ -301,7 +344,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
|
||||
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
|
||||
def sync_all_projects(
|
||||
login: str, password: str, ignore_projects: list = None):
|
||||
"""Update all OP projects in DB with Zou data.
|
||||
|
||||
Args:
|
||||
|
|
@ -346,7 +390,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
if not project:
|
||||
project = gazu.project.get_project_by_name(project["name"])
|
||||
|
||||
log.info(f"Synchronizing {project['name']}...")
|
||||
log.info("Synchronizing {}...".format(project['name']))
|
||||
|
||||
# Get all assets from zou
|
||||
all_assets = gazu.asset.all_assets_for_project(project)
|
||||
|
|
@ -365,12 +409,16 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
]
|
||||
|
||||
# Sync project. Create if doesn't exist
|
||||
project_name = project["name"]
|
||||
project_dict = get_project(project_name)
|
||||
if not project_dict:
|
||||
log.info("Project created: {}".format(project_name))
|
||||
bulk_writes.append(write_project_to_op(project, dbcon))
|
||||
|
||||
# Try to find project document
|
||||
project_name = project["name"]
|
||||
if not project_dict:
|
||||
project_dict = get_project(project_name)
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
project_doc = get_project(project_name)
|
||||
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
|
|
@ -378,7 +426,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
for asset_doc in get_assets(project_name)
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[project["id"]] = project_doc
|
||||
zou_ids_and_asset_docs[project["id"]] = project_dict
|
||||
|
||||
# Create entities root folders
|
||||
to_insert = [
|
||||
|
|
@ -389,6 +437,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
"data": {
|
||||
"root_of": r,
|
||||
"tasks": {},
|
||||
"visualParent": None,
|
||||
"parents": [],
|
||||
},
|
||||
}
|
||||
for r in ["Assets", "Shots"]
|
||||
|
|
@ -423,7 +473,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
[
|
||||
UpdateOne({"_id": id}, update)
|
||||
for id, update in update_op_assets(
|
||||
dbcon, project_doc, all_entities, zou_ids_and_asset_docs
|
||||
dbcon, project, project_dict,
|
||||
all_entities, zou_ids_and_asset_docs
|
||||
)
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ def sync_zou_from_op_project(
|
|||
project_doc = get_project(project_name)
|
||||
|
||||
# Get all entities from zou
|
||||
print(f"Synchronizing {project_name}...")
|
||||
print("Synchronizing {}...".format(project_name))
|
||||
zou_project = gazu.project.get_project_by_name(project_name)
|
||||
|
||||
# Create project
|
||||
|
|
@ -82,7 +82,9 @@ def sync_zou_from_op_project(
|
|||
f"x{project_doc['data']['resolutionHeight']}",
|
||||
}
|
||||
)
|
||||
gazu.project.update_project_data(zou_project, data=project_doc["data"])
|
||||
gazu.project.update_project_data(
|
||||
zou_project, data=project_doc["data"]
|
||||
)
|
||||
gazu.project.update_project(zou_project)
|
||||
|
||||
asset_types = gazu.asset.all_asset_types()
|
||||
|
|
@ -98,8 +100,7 @@ def sync_zou_from_op_project(
|
|||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
asset_docs = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in get_assets(project_name)
|
||||
asset_doc["_id"]: asset_doc for asset_doc in get_assets(project_name)
|
||||
}
|
||||
|
||||
# Create new assets
|
||||
|
|
@ -174,7 +175,9 @@ def sync_zou_from_op_project(
|
|||
doc["name"],
|
||||
frame_in=doc["data"]["frameStart"],
|
||||
frame_out=doc["data"]["frameEnd"],
|
||||
nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"],
|
||||
nb_frames=(
|
||||
doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1
|
||||
),
|
||||
)
|
||||
|
||||
elif match.group(2): # Sequence
|
||||
|
|
@ -229,7 +232,7 @@ def sync_zou_from_op_project(
|
|||
"frame_in": frame_in,
|
||||
"frame_out": frame_out,
|
||||
},
|
||||
"nb_frames": frame_out - frame_in,
|
||||
"nb_frames": frame_out - frame_in + 1,
|
||||
}
|
||||
)
|
||||
entity = gazu.raw.update("entities", zou_id, entity_data)
|
||||
|
|
@ -258,7 +261,7 @@ def sync_zou_from_op_project(
|
|||
for asset_doc in asset_docs.values()
|
||||
}
|
||||
for entity_id in deleted_entities:
|
||||
gazu.raw.delete(f"data/entities/{entity_id}")
|
||||
gazu.raw.delete("data/entities/{}".format(entity_id))
|
||||
|
||||
# Write into DB
|
||||
if bulk_writes:
|
||||
|
|
|
|||
|
|
@ -335,9 +335,10 @@ def get_imageio_config(
|
|||
get_template_data_from_session)
|
||||
anatomy_data = get_template_data_from_session()
|
||||
|
||||
formatting_data = deepcopy(anatomy_data)
|
||||
# add project roots to anatomy data
|
||||
anatomy_data["root"] = anatomy.roots
|
||||
anatomy_data["platform"] = platform.system().lower()
|
||||
formatting_data["root"] = anatomy.roots
|
||||
formatting_data["platform"] = platform.system().lower()
|
||||
|
||||
# get colorspace settings
|
||||
imageio_global, imageio_host = _get_imageio_settings(
|
||||
|
|
@ -347,7 +348,7 @@ def get_imageio_config(
|
|||
|
||||
if config_host.get("enabled"):
|
||||
config_data = _get_config_data(
|
||||
config_host["filepath"], anatomy_data
|
||||
config_host["filepath"], formatting_data
|
||||
)
|
||||
else:
|
||||
config_data = None
|
||||
|
|
@ -356,7 +357,7 @@ def get_imageio_config(
|
|||
# get config path from either global or host_name
|
||||
config_global = imageio_global["ocio_config"]
|
||||
config_data = _get_config_data(
|
||||
config_global["filepath"], anatomy_data
|
||||
config_global["filepath"], formatting_data
|
||||
)
|
||||
|
||||
if not config_data:
|
||||
|
|
@ -372,12 +373,12 @@ def _get_config_data(path_list, anatomy_data):
|
|||
"""Return first existing path in path list.
|
||||
|
||||
If template is used in path inputs,
|
||||
then it is formated by anatomy data
|
||||
then it is formatted by anatomy data
|
||||
and environment variables
|
||||
|
||||
Args:
|
||||
path_list (list[str]): list of abs paths
|
||||
anatomy_data (dict): formating data
|
||||
anatomy_data (dict): formatting data
|
||||
|
||||
Returns:
|
||||
dict: config data
|
||||
|
|
@ -389,30 +390,30 @@ def _get_config_data(path_list, anatomy_data):
|
|||
|
||||
# first try host config paths
|
||||
for path_ in path_list:
|
||||
formated_path = _format_path(path_, formatting_data)
|
||||
formatted_path = _format_path(path_, formatting_data)
|
||||
|
||||
if not os.path.exists(formated_path):
|
||||
if not os.path.exists(formatted_path):
|
||||
continue
|
||||
|
||||
return {
|
||||
"path": os.path.normpath(formated_path),
|
||||
"path": os.path.normpath(formatted_path),
|
||||
"template": path_
|
||||
}
|
||||
|
||||
|
||||
def _format_path(tempate_path, formatting_data):
|
||||
"""Single template path formating.
|
||||
def _format_path(template_path, formatting_data):
|
||||
"""Single template path formatting.
|
||||
|
||||
Args:
|
||||
tempate_path (str): template string
|
||||
template_path (str): template string
|
||||
formatting_data (dict): data to be used for
|
||||
template formating
|
||||
template formatting
|
||||
|
||||
Returns:
|
||||
str: absolute formated path
|
||||
str: absolute formatted path
|
||||
"""
|
||||
# format path for anatomy keys
|
||||
formatted_path = StringTemplate(tempate_path).format(
|
||||
formatted_path = StringTemplate(template_path).format(
|
||||
formatting_data)
|
||||
|
||||
return os.path.abspath(formatted_path)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from .publish_plugins import (
|
|||
RepairContextAction,
|
||||
|
||||
Extractor,
|
||||
ExtractorColormanaged,
|
||||
ColormanagedPyblishPluginMixin
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -64,7 +64,7 @@ __all__ = (
|
|||
"RepairContextAction",
|
||||
|
||||
"Extractor",
|
||||
"ExtractorColormanaged",
|
||||
"ColormanagedPyblishPluginMixin",
|
||||
|
||||
"get_publish_template_name",
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from abc import ABCMeta
|
|||
from pprint import pformat
|
||||
import pyblish.api
|
||||
from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin
|
||||
|
||||
from openpype.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
||||
from openpype.lib import BoolDef
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -288,28 +288,29 @@ class Extractor(pyblish.api.InstancePlugin):
|
|||
return get_instance_staging_dir(instance)
|
||||
|
||||
|
||||
class ExtractorColormanaged(Extractor):
|
||||
"""Extractor base for color managed image data.
|
||||
|
||||
Each Extractor intended to export pixel data representation
|
||||
should inherit from this class to allow color managed data.
|
||||
Class implements "get_colorspace_settings" and
|
||||
"set_representation_colorspace" functions used
|
||||
for injecting colorspace data to representation data for farther
|
||||
integration into db document.
|
||||
class ColormanagedPyblishPluginMixin(object):
|
||||
"""Mixin for colormanaged plugins.
|
||||
|
||||
This class is used to set colorspace data to a publishing
|
||||
representation. It contains a static method,
|
||||
get_colorspace_settings, which returns config and
|
||||
file rules data for the host context.
|
||||
It also contains a method, set_representation_colorspace,
|
||||
which sets colorspace data to the representation.
|
||||
The allowed file extensions are listed in the allowed_ext variable.
|
||||
The method first checks if the file extension is in
|
||||
the list of allowed extensions. If it is, it then gets the
|
||||
colorspace settings from the host context and gets a
|
||||
matching colorspace from rules. Finally, it infuses this
|
||||
data into the representation.
|
||||
"""
|
||||
|
||||
allowed_ext = [
|
||||
"cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
|
||||
"mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
|
||||
"1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
|
||||
"sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img"
|
||||
]
|
||||
allowed_ext = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_colorspace_settings(context):
|
||||
"""Retuns solved settings for the host context.
|
||||
"""Returns solved settings for the host context.
|
||||
|
||||
Args:
|
||||
context (publish.Context): publishing context
|
||||
|
|
@ -375,7 +376,10 @@ class ExtractorColormanaged(Extractor):
|
|||
ext = representation["ext"]
|
||||
# check extension
|
||||
self.log.debug("__ ext: `{}`".format(ext))
|
||||
if ext.lower() not in self.allowed_ext:
|
||||
|
||||
# check if ext in lower case is in self.allowed_ext
|
||||
if ext.lstrip(".").lower() not in self.allowed_ext:
|
||||
self.log.debug("Extension is not in allowed extensions.")
|
||||
return
|
||||
|
||||
if colorspace_settings is None:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import pyblish.api
|
|||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractColorspaceData(publish.ExtractorColormanaged):
|
||||
class ExtractColorspaceData(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
""" Inject Colorspace data to available representations.
|
||||
|
||||
Input data:
|
||||
|
|
|
|||
|
|
@ -135,6 +135,38 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
)
|
||||
return project_doc
|
||||
|
||||
def _prepare_new_tasks(self, asset_doc, entity_data):
|
||||
new_tasks = entity_data.get("tasks") or {}
|
||||
if not asset_doc:
|
||||
return new_tasks
|
||||
|
||||
old_tasks = asset_doc.get("data", {}).get("tasks")
|
||||
# Just use new tasks if old are not available
|
||||
if not old_tasks:
|
||||
return new_tasks
|
||||
|
||||
output = deepcopy(old_tasks)
|
||||
# Create mapping of lowered task names from old tasks
|
||||
cur_task_low_mapping = {
|
||||
task_name.lower(): task_name
|
||||
for task_name in old_tasks
|
||||
}
|
||||
# Add/update tasks from new entity data
|
||||
for task_name, task_info in new_tasks.items():
|
||||
task_info = deepcopy(task_info)
|
||||
task_name_low = task_name.lower()
|
||||
# Add new task
|
||||
if task_name_low not in cur_task_low_mapping:
|
||||
output[task_name] = task_info
|
||||
continue
|
||||
|
||||
# Update existing task with new info
|
||||
mapped_task_name = cur_task_low_mapping.pop(task_name_low)
|
||||
src_task_info = output.pop(mapped_task_name)
|
||||
src_task_info.update(task_info)
|
||||
output[task_name] = src_task_info
|
||||
return output
|
||||
|
||||
def sync_asset(
|
||||
self,
|
||||
asset_name,
|
||||
|
|
@ -170,11 +202,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
data["parents"] = parents
|
||||
|
||||
asset_doc = asset_docs_by_name.get(asset_name)
|
||||
|
||||
# Tasks
|
||||
data["tasks"] = self._prepare_new_tasks(asset_doc, entity_data)
|
||||
|
||||
# --- Create/Unarchive asset and end ---
|
||||
if not asset_doc:
|
||||
# Just use tasks from entity data as they are
|
||||
# - this is different from the case when tasks are updated
|
||||
data["tasks"] = entity_data.get("tasks") or {}
|
||||
archived_asset_doc = None
|
||||
for archived_entity in archived_asset_docs_by_name[asset_name]:
|
||||
archived_parents = (
|
||||
|
|
@ -201,19 +234,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if "data" not in asset_doc:
|
||||
asset_doc["data"] = {}
|
||||
cur_entity_data = asset_doc["data"]
|
||||
cur_entity_tasks = cur_entity_data.get("tasks") or {}
|
||||
|
||||
# Tasks
|
||||
data["tasks"] = {}
|
||||
new_tasks = entity_data.get("tasks") or {}
|
||||
for task_name, task_info in new_tasks.items():
|
||||
task_info = deepcopy(task_info)
|
||||
if task_name in cur_entity_tasks:
|
||||
src_task_info = deepcopy(cur_entity_tasks[task_name])
|
||||
src_task_info.update(task_info)
|
||||
task_info = src_task_info
|
||||
|
||||
data["tasks"][task_name] = task_info
|
||||
|
||||
changes = {}
|
||||
for key, value in data.items():
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@
|
|||
"enabled": true,
|
||||
"optional": false,
|
||||
"active": true,
|
||||
"use_published": true,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
|
|
|
|||
|
|
@ -139,7 +139,8 @@
|
|||
"ext": "mp4",
|
||||
"tags": [
|
||||
"burnin",
|
||||
"ftrackreview"
|
||||
"ftrackreview",
|
||||
"kitsureview"
|
||||
],
|
||||
"burnins": [],
|
||||
"ffmpeg_args": {
|
||||
|
|
|
|||
|
|
@ -285,11 +285,6 @@
|
|||
"key": "active",
|
||||
"label": "Active"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "use_published",
|
||||
"label": "Use Published scene"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@
|
|||
{
|
||||
"shotgridreview": "Add review to Shotgrid"
|
||||
},
|
||||
{
|
||||
"kitsureview": "Add review to Kitsu"
|
||||
},
|
||||
{
|
||||
"delete": "Delete output"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.2-nightly.3"
|
||||
__version__ = "3.15.2-nightly.4"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue