Merge branch 'feature/ftrack_group_is_bcw_compatible' into develop

This commit is contained in:
Milan Kolar 2020-08-07 14:49:14 +02:00
commit 66449b3b5c
36 changed files with 1397 additions and 470 deletions

View file

@ -1,11 +1,18 @@
# -*- coding: utf-8 -*-
"""Cleanup leftover files from publish."""
import os
import shutil
import pyblish.api
def clean_renders(instance):
transfers = instance.data.get("transfers", list())
"""Delete renders after publishing.
Args:
instance (pyblish.api.Instace): Instance to work on.
"""
transfers = instance.data.get("transfers", list())
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
dirnames = []
@ -40,6 +47,7 @@ class CleanUp(pyblish.api.InstancePlugin):
active = True
def process(self, instance):
"""Plugin entry point."""
# Get the errored instances
failed = []
for result in instance.context.data["results"]:
@ -52,7 +60,7 @@ class CleanUp(pyblish.api.InstancePlugin):
)
)
self.log.info("Cleaning renders ...")
self.log.info("Performing cleanup on {}".format(instance))
clean_renders(instance)
if [ef for ef in self.exclude_families
@ -60,16 +68,17 @@ class CleanUp(pyblish.api.InstancePlugin):
return
import tempfile
staging_dir = instance.data.get("stagingDir", None)
if not staging_dir or not os.path.exists(staging_dir):
self.log.info("No staging directory found: %s" % staging_dir)
return
temp_root = tempfile.gettempdir()
staging_dir = instance.data.get("stagingDir", None)
if not os.path.normpath(staging_dir).startswith(temp_root):
self.log.info("Skipping cleanup. Staging directory is not in the "
"temp folder: %s" % staging_dir)
return
self.log.info("Removing staging directory ...")
if not staging_dir or not os.path.exists(staging_dir):
self.log.info("No staging directory found: %s" % staging_dir)
return
self.log.info("Removing staging directory {}".format(staging_dir))
shutil.rmtree(staging_dir)

View file

@ -410,15 +410,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# go through aovs in expected files
for aov, files in exp_files[0].items():
cols, rem = clique.assemble(files)
# we shouldn't have any reminders
if rem:
self.log.warning(
"skipping unexpected files found "
"in sequence: {}".format(rem))
# but we really expect only one collection, nothing else make sense
assert len(cols) == 1, "only one image sequence type is expected"
# we shouldn't have any reminders. And if we do, it should
# be just one item for single frame renders.
if not cols and rem:
assert len(rem) == 1, ("Found multiple non related files "
"to render, don't know what to do "
"with them.")
col = rem[0]
_, ext = os.path.splitext(col)
else:
# but we really expect only one collection.
# Nothing else make sense.
assert len(cols) == 1, "only one image sequence type is expected" # noqa: E501
_, ext = os.path.splitext(cols[0].tail)
col = list(cols[0])
self.log.debug(col)
# create subset name `familyTaskSubset_AOV`
group_name = 'render{}{}{}{}'.format(
task[0].upper(), task[1:],
@ -426,7 +433,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
subset_name = '{}_{}'.format(group_name, aov)
staging = os.path.dirname(list(cols[0])[0])
if isinstance(col, (list, tuple)):
staging = os.path.dirname(col[0])
else:
staging = os.path.dirname(col)
success, rootless_staging_dir = (
self.anatomy.find_root_template_from_path(staging)
)
@ -451,13 +462,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
new_instance["subset"] = subset_name
new_instance["subsetGroup"] = group_name
ext = cols[0].tail.lstrip(".")
# create represenation
if isinstance(col, (list, tuple)):
files = [os.path.basename(f) for f in col]
else:
files = os.path.basename(col)
rep = {
"name": ext,
"ext": ext,
"files": [os.path.basename(f) for f in list(cols[0])],
"files": files,
"frameStart": int(instance_data.get("frameStartHandle")),
"frameEnd": int(instance_data.get("frameEndHandle")),
# If expectedFile are absolute, we need only filenames

View file

@ -0,0 +1,133 @@
# -*- coding: utf-8 -*-
"""Validate if instance asset is the same as context asset."""
from __future__ import absolute_import
import pyblish.api
import pype.api
class SelectInvalidInstances(pyblish.api.Action):
"""Select invalid instances in Outliner."""
label = "Select Instances"
icon = "briefcase"
on = "failed"
def process(self, context, plugin):
"""Process invalid validators and select invalid instances."""
# Get the errored instances
failed = []
for result in context.data["results"]:
if result["error"] is None:
continue
if result["instance"] is None:
continue
if result["instance"] in failed:
continue
if result["plugin"] != plugin:
continue
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
if instances:
self.log.info(
"Selecting invalid nodes: %s" % ", ".join(
[str(x) for x in instances]
)
)
self.select(instances)
else:
self.log.info("No invalid nodes found.")
self.deselect()
def select(self, instances):
if "nuke" in pyblish.api.registered_hosts():
import avalon.nuke.lib
import nuke
avalon.nuke.lib.select_nodes(
[nuke.toNode(str(x)) for x in instances]
)
if "maya" in pyblish.api.registered_hosts():
from maya import cmds
cmds.select(instances, replace=True, noExpand=True)
def deselect(self):
if "nuke" in pyblish.api.registered_hosts():
import avalon.nuke.lib
avalon.nuke.lib.reset_selection()
if "maya" in pyblish.api.registered_hosts():
from maya import cmds
cmds.select(deselect=True)
class RepairSelectInvalidInstances(pyblish.api.Action):
"""Repair the instance asset."""
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if result["error"] is None:
continue
if result["instance"] is None:
continue
if result["instance"] in failed:
continue
if result["plugin"] != plugin:
continue
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
context_asset = context.data["assetEntity"]["name"]
for instance in instances:
self.set_attribute(instance, context_asset)
def set_attribute(self, instance, context_asset):
if "nuke" in pyblish.api.registered_hosts():
import nuke
nuke.toNode(
instance.data.get("name")
)["avalon:asset"].setValue(context_asset)
if "maya" in pyblish.api.registered_hosts():
from maya import cmds
cmds.setAttr(
instance.data.get("name") + ".asset",
context_asset,
type="string"
)
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
"""Validator to check if instance asset match context asset.
When working in per-shot style you always publish data in context of
current asset (shot). This validator checks if this is so. It is optional
so it can be disabled when needed.
Action on this validator will select invalid instances in Outliner.
"""
order = pype.api.ValidateContentsOrder
label = "Instance in same Context"
optional = True
hosts = ["maya", "nuke"]
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
def process(self, instance):
asset = instance.data.get("asset")
context_asset = instance.context.data["assetEntity"]["name"]
msg = "{} has asset {}".format(instance.name, asset)
assert asset == context_asset, msg

View file

@ -1,14 +1,25 @@
from avalon import api
import maya.app.renderSetup.model.renderSetup as renderSetup
from avalon.maya import lib
from maya import cmds
# -*- coding: utf-8 -*-
"""Load and update RenderSetup settings.
Working with RenderSetup setting is Maya is done utilizing json files.
When this json is loaded, it will overwrite all settings on RenderSetup
instance.
"""
import json
import six
import sys
from avalon import api
from avalon.maya import lib
from pype.hosts.maya import lib as pypelib
from maya import cmds
import maya.app.renderSetup.model.renderSetup as renderSetup
class RenderSetupLoader(api.Loader):
"""
This will load json preset for RenderSetup, overwriting current one.
"""
"""Load json preset for RenderSetup overwriting current one."""
families = ["rendersetup"]
representations = ["json"]
@ -19,7 +30,7 @@ class RenderSetupLoader(api.Loader):
color = "orange"
def load(self, context, name, namespace, data):
"""Load RenderSetup settings."""
from avalon.maya.pipeline import containerise
# from pype.hosts.maya.lib import namespaced
@ -29,7 +40,7 @@ class RenderSetupLoader(api.Loader):
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
self.log.info(">>> loading json [ {} ]".format(self.fname))
with open(self.fname, "r") as file:
renderSetup.instance().decode(
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
@ -42,9 +53,56 @@ class RenderSetupLoader(api.Loader):
if not nodes:
return
self.log.info(">>> containerising [ {} ]".format(name))
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def remove(self, container):
"""Remove RenderSetup settings instance."""
from maya import cmds
container_name = container["objectName"]
self.log.info("Removing '%s' from Maya.." % container["name"])
container_content = cmds.sets(container_name, query=True)
nodes = cmds.ls(container_content, long=True)
nodes.append(container_name)
try:
cmds.delete(nodes)
except ValueError:
# Already implicitly deleted by Maya upon removing reference
pass
def update(self, container, representation):
"""Update RenderSetup setting by overwriting existing settings."""
pypelib.show_message(
"Render setup update",
"Render setup setting will be overwritten by new version. All "
"setting specified by user not included in loaded version "
"will be lost.")
path = api.get_representation_path(representation)
with open(path, "r") as file:
try:
renderSetup.instance().decode(
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
except Exception:
self.log.error("There were errors during loading")
six.reraise(*sys.exc_info())
# Update metadata
node = container["objectName"]
cmds.setAttr("{}.representation".format(node),
str(representation["_id"]),
type="string")
self.log.info("... updated")
def switch(self, container, representation):
"""Switch representations."""
self.update(container, representation)

View file

@ -19,6 +19,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
label = "Camera (Alembic)"
hosts = ["maya"]
families = ["camera"]
bake_attributes = []
def process(self, instance):
@ -66,6 +67,14 @@ class ExtractCameraAlembic(pype.api.Extractor):
job_str += ' -file "{0}"'.format(path)
# bake specified attributes in preset
assert isinstance(self.bake_attributes, (list, tuple)), (
"Attributes to bake must be specified as a list"
)
for attr in self.bake_attributes:
self.log.info("Adding {} attribute".format(attr))
job_str += " -attr {0}".format(attr)
with lib.evaluation("off"):
with avalon.maya.suspended_refresh():
cmds.AbcExport(j=job_str, verbose=False)

View file

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""Extract camera as Maya Scene."""
import os
from maya import cmds
@ -65,8 +67,8 @@ def unlock(plug):
cmds.disconnectAttr(source, destination)
class ExtractCameraMayaAscii(pype.api.Extractor):
"""Extract a Camera as Maya Ascii.
class ExtractCameraMayaScene(pype.api.Extractor):
"""Extract a Camera as Maya Scene.
This will create a duplicate of the camera that will be baked *with*
substeps and handles for the required frames. This temporary duplicate
@ -81,13 +83,28 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
"""
label = "Camera (Maya Ascii)"
label = "Camera (Maya Scene)"
hosts = ["maya"]
families = ["camera"]
scene_type = "ma"
def process(self, instance):
"""Plugin entry point."""
# get settings
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
# no preset found
pass
framerange = [instance.data.get("frameStart", 1),
instance.data.get("frameEnd", 1)]
handles = instance.data.get("handles", 0)
@ -95,7 +112,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
bake_to_worldspace = instance.data("bakeToWorldSpace", True)
if not bake_to_worldspace:
self.log.warning("Camera (Maya Ascii) export only supports world"
self.log.warning("Camera (Maya Scene) export only supports world"
"space baked camera extractions. The disabled "
"bake to world space is ignored...")
@ -115,7 +132,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
filename = "{0}.{1}".format(instance.name, self.scene_type)
path = os.path.join(dir_path, filename)
# Perform extraction
@ -152,7 +169,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
cmds.select(baked_shapes, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
@ -164,15 +181,15 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
# Delete the baked hierarchy
if bake_to_worldspace:
cmds.delete(baked)
massage_ma_file(path)
if self.scene_type == "ma":
massage_ma_file(path)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'ma',
'ext': 'ma',
'name': self.scene_type,
'ext': self.scene_type,
'files': filename,
"stagingDir": dir_path,
}

View file

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""Extract data as Maya scene (raw)."""
import os
from maya import cmds
@ -6,24 +8,37 @@ import avalon.maya
import pype.api
class ExtractMayaAsciiRaw(pype.api.Extractor):
"""Extract as Maya Ascii (raw)
class ExtractMayaSceneRaw(pype.api.Extractor):
"""Extract as Maya Scene (raw).
This will preserve all references, construction history, etc.
"""
label = "Maya ASCII (Raw)"
label = "Maya Scene (Raw)"
hosts = ["maya"]
families = ["mayaAscii",
"setdress",
"layout"]
scene_type = "ma"
def process(self, instance):
"""Plugin entry point."""
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
# no preset found
pass
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
filename = "{0}.{1}".format(instance.name, self.scene_type)
path = os.path.join(dir_path, filename)
# Whether to include all nodes in the instance (including those from
@ -38,12 +53,12 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
members = instance[:]
# Perform extraction
self.log.info("Performing extraction..")
self.log.info("Performing extraction ...")
with avalon.maya.maintained_selection():
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=True,
constructionHistory=True,
@ -55,8 +70,8 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
instance.data["representations"] = []
representation = {
'name': 'ma',
'ext': 'ma',
'name': self.scene_type,
'ext': self.scene_type,
'files': filename,
"stagingDir": dir_path
}

View file

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""Extract model as Maya Scene."""
import os
from maya import cmds
@ -8,7 +10,7 @@ from pype.hosts.maya import lib
class ExtractModel(pype.api.Extractor):
"""Extract as Model (Maya Ascii)
"""Extract as Model (Maya Scene).
Only extracts contents based on the original "setMembers" data to ensure
publishing the least amount of required shapes. From that it only takes
@ -22,19 +24,33 @@ class ExtractModel(pype.api.Extractor):
"""
label = "Model (Maya ASCII)"
label = "Model (Maya Scene)"
hosts = ["maya"]
families = ["model"]
scene_type = "ma"
def process(self, instance):
"""Plugin entry point."""
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
# no preset found
pass
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
filename = "{0}.{1}".format(instance.name, self.scene_type)
path = os.path.join(stagingdir, filename)
# Perform extraction
self.log.info("Performing extraction..")
self.log.info("Performing extraction ...")
# Get only the shape contents we need in such a way that we avoid
# taking along intermediateObjects
@ -59,7 +75,7 @@ class ExtractModel(pype.api.Extractor):
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
channels=False,
@ -73,8 +89,8 @@ class ExtractModel(pype.api.Extractor):
instance.data["representations"] = []
representation = {
'name': 'ma',
'ext': 'ma',
'name': self.scene_type,
'ext': self.scene_type,
'files': filename,
"stagingDir": stagingdir,
}

View file

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
"""Extract rig as Maya Scene."""
import os
from maya import cmds
@ -7,26 +9,40 @@ import pype.api
class ExtractRig(pype.api.Extractor):
"""Extract rig as Maya Ascii"""
"""Extract rig as Maya Scene."""
label = "Extract Rig (Maya ASCII)"
label = "Extract Rig (Maya Scene)"
hosts = ["maya"]
families = ["rig"]
scene_type = "ma"
def process(self, instance):
"""Plugin entry point."""
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
# no preset found
pass
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
filename = "{0}.{1}".format(instance.name, self.scene_type)
path = os.path.join(dir_path, filename)
# Perform extraction
self.log.info("Performing extraction..")
self.log.info("Performing extraction ...")
with avalon.maya.maintained_selection():
cmds.select(instance, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
channels=True,
@ -38,12 +54,11 @@ class ExtractRig(pype.api.Extractor):
instance.data["representations"] = []
representation = {
'name': 'ma',
'ext': 'ma',
'name': self.scene_type,
'ext': self.scene_type,
'files': filename,
"stagingDir": dir_path
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -1,3 +1,6 @@
# -*- coding: utf-8 -*-
"""Extract Yeti rig."""
import os
import json
import contextlib
@ -11,7 +14,7 @@ import pype.hosts.maya.lib as maya
@contextlib.contextmanager
def disconnect_plugs(settings, members):
"""Disconnect and store attribute connections."""
members = cmds.ls(members, long=True)
original_connections = []
try:
@ -55,7 +58,7 @@ def disconnect_plugs(settings, members):
@contextlib.contextmanager
def yetigraph_attribute_values(assumed_destination, resources):
"""Get values from Yeti attributes in graph."""
try:
for resource in resources:
if "graphnode" not in resource:
@ -89,14 +92,28 @@ def yetigraph_attribute_values(assumed_destination, resources):
class ExtractYetiRig(pype.api.Extractor):
"""Extract the Yeti rig to a MayaAscii and write the Yeti rig data"""
"""Extract the Yeti rig to a Maya Scene and write the Yeti rig data."""
label = "Extract Yeti Rig"
hosts = ["maya"]
families = ["yetiRig"]
scene_type = "ma"
def process(self, instance):
"""Plugin entry point."""
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
# no preset found
pass
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
if not yeti_nodes:
raise RuntimeError("No pgYetiMaya nodes found in the instance")
@ -106,7 +123,8 @@ class ExtractYetiRig(pype.api.Extractor):
settings_path = os.path.join(dirname, "yeti.rigsettings")
# Yeti related staging dirs
maya_path = os.path.join(dirname, "yeti_rig.ma")
maya_path = os.path.join(
dirname, "yeti_rig.{}".format(self.scene_type))
self.log.info("Writing metadata file")
@ -153,7 +171,7 @@ class ExtractYetiRig(pype.api.Extractor):
cmds.file(maya_path,
force=True,
exportSelected=True,
typ="mayaAscii",
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
preserveReferences=False,
constructionHistory=True,
shader=False)
@ -163,21 +181,21 @@ class ExtractYetiRig(pype.api.Extractor):
if "representations" not in instance.data:
instance.data["representations"] = []
self.log.info("rig file: {}".format("yeti_rig.ma"))
self.log.info("rig file: {}".format(maya_path))
instance.data["representations"].append(
{
'name': "ma",
'ext': 'ma',
'files': "yeti_rig.ma",
'name': self.scene_type,
'ext': self.scene_type,
'files': os.path.basename(maya_path),
'stagingDir': dirname
}
)
self.log.info("settings file: {}".format("yeti.rigsettings"))
self.log.info("settings file: {}".format(settings))
instance.data["representations"].append(
{
'name': 'rigsettings',
'ext': 'rigsettings',
'files': 'yeti.rigsettings',
'files': os.path.basename(settings),
'stagingDir': dirname
}
)

View file

@ -20,6 +20,7 @@ import os
import json
import getpass
import copy
import re
import clique
import requests
@ -108,8 +109,8 @@ def get_renderer_variables(renderlayer, root):
# does not work for vray.
scene = cmds.file(query=True, sceneName=True)
scene, _ = os.path.splitext(os.path.basename(scene))
filename_0 = filename_prefix.replace('<Scene>', scene)
filename_0 = filename_0.replace('<Layer>', renderlayer)
filename_0 = re.sub('<Scene>', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501
filename_0 = re.sub('<Layer>', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501
filename_0 = "{}.{}.{}".format(
filename_0, "#" * int(padding), extension)
filename_0 = os.path.normpath(os.path.join(root, filename_0))
@ -375,16 +376,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
if isinstance(exp[0], dict):
# we have aovs and we need to iterate over them
for _aov, files in exp[0].items():
col = clique.assemble(files)[0][0]
output_file = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
col, rem = clique.assemble(files)
if not col and rem:
# we couldn't find any collections but have
# individual files.
assert len(rem) == 1, ("Found multiple non related files "
"to render, don't know what to do "
"with them.")
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
output_file = rem[0]
else:
output_file = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
output_filenames[exp_index] = output_file
exp_index += 1
else:
col = clique.assemble(files)[0][0]
output_file = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file
# OutputFilenames[exp_index] = output_file
col, rem = clique.assemble(files)
if not col and rem:
# we couldn't find any collections but have
# individual files.
assert len(rem) == 1, ("Found multiple non related files "
"to render, don't know what to do "
"with them.")
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
else:
output_file = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
plugin = payload["JobInfo"]["Plugin"]
self.log.info("using render plugin : {}".format(plugin))

View file

@ -62,9 +62,16 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
for family in families:
for preset in presets[family]:
[node_name, attribute_name] = preset.split(".")
attributes.update(
{node_name: {attribute_name: presets[family][preset]}}
)
try:
attributes[node_name].update(
{attribute_name: presets[family][preset]}
)
except KeyError:
attributes.update({
node_name: {
attribute_name: presets[family][preset]
}
})
# Get invalid attributes.
nodes = pm.ls()

View file

@ -1,108 +0,0 @@
# -*- coding: utf-8 -*-
"""Validate if instance asset is the same as context asset."""
from __future__ import absolute_import
import pyblish.api
from pype.action import get_errored_instances_from_context
import pype.api
class SelectInvalidInstances(pyblish.api.Action):
"""Select invalid instances in Outliner."""
label = "Show Instances"
icon = "briefcase"
on = "failed"
def process(self, context, plugin):
"""Process invalid validators and select invalid instances."""
try:
from maya import cmds
except ImportError:
raise ImportError("Current host is not Maya")
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
invalid = list()
for _instance in instances:
invalid_instances = plugin.get_invalid(context)
if invalid_instances:
if isinstance(invalid_instances, (list, tuple)):
invalid.extend(invalid_instances)
else:
self.log.warning("Plug-in returned to be invalid, "
"but has no selectable nodes.")
# Ensure unique (process each node only once)
invalid = list(set(invalid))
if invalid:
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
cmds.select(invalid, replace=True, noExpand=True)
else:
self.log.info("No invalid nodes found.")
cmds.select(deselect=True)
class RepairSelectInvalidInstances(pyblish.api.Action):
"""Repair the instance asset."""
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
from maya import cmds
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
context_asset = context.data["assetEntity"]["name"]
for instance in instances:
cmds.setAttr(instance.data.get("name") + ".asset",
context_asset, type="string")
class ValidateInstanceInContext(pyblish.api.ContextPlugin):
"""Validator to check if instance asset match context asset.
When working in per-shot style you always publish data in context of
current asset (shot). This validator checks if this is so. It is optional
so it can be disabled when needed.
Action on this validator will select invalid instances in Outliner.
"""
order = pype.api.ValidateContentsOrder
label = "Instance in same Context"
optional = True
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
@classmethod
def get_invalid(cls, context):
"""Get invalid instances."""
invalid = []
context_asset = context.data["assetEntity"]["name"]
cls.log.info("we are in {}".format(context_asset))
for instance in context:
asset = instance.data.get("asset")
if asset != context_asset:
cls.log.warning("{} has asset {}".format(instance.name, asset))
invalid.append(instance.name)
return invalid
def process(self, context):
"""Check instances."""
invalid = self.get_invalid(context)
if invalid:
raise AssertionError("Some instances doesn't share same context")

View file

@ -106,7 +106,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
instance.data.update({
"subset": subset,
"asset": os.environ["AVALON_ASSET"],
"asset": avalon_knob_data["asset"],
"label": node.name(),
"name": node.name(),
"subset": subset,

View file

@ -1,4 +1,7 @@
import pyblish.api
import pype.api
from avalon import io, api
import nuke
@ -23,6 +26,21 @@ class CollectReview(pyblish.api.InstancePlugin):
if not node["review"].value():
return
# Add audio to instance if it exists.
try:
version = pype.api.get_latest_version(
instance.context.data["assetEntity"]["name"], "audioMain"
)
representation = io.find_one(
{"type": "representation", "parent": version["_id"]}
)
instance.data["audio"] = [{
"offset": 0,
"filename": api.get_representation_path(representation)
}]
except AssertionError:
pass
instance.data["families"].append("review")
instance.data['families'].append('ftrack')

View file

@ -152,7 +152,7 @@ class ExtractThumbnail(pype.api.Extractor):
ipn_orig = None
for v in [n for n in nuke.allNodes()
if "Viewer" in n.Class()]:
if "Viewer" == n.Class()]:
ip = v['input_process'].getValue()
ipn = v['input_process_node'].getValue()
if "VIEWER_INPUT" not in ipn and ip: