mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch '2.x/develop' into feature/determine-maya-scene-type-by-preset
This commit is contained in:
commit
c684812bc2
17 changed files with 359 additions and 173 deletions
|
|
@ -151,27 +151,31 @@ def application_launch():
|
|||
def export_template(backdrops, nodes, filepath):
|
||||
func = """function func(args)
|
||||
{
|
||||
// Add an extra node just so a new group can be created.
|
||||
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
var template_group = node.createGroup(temp_node, "temp_group");
|
||||
node.deleteNode( template_group + "/temp_note" );
|
||||
|
||||
// This will make Node View to focus on the new group.
|
||||
selection.clearSelection();
|
||||
for (var f = 0; f < args[1].length; f++)
|
||||
{
|
||||
selection.addNodeToSelection(args[1][f]);
|
||||
}
|
||||
|
||||
Action.perform("copy()", "Node View");
|
||||
|
||||
selection.clearSelection();
|
||||
selection.addNodeToSelection(template_group);
|
||||
Action.perform("onActionEnterGroup()", "Node View");
|
||||
Action.perform("paste()", "Node View");
|
||||
|
||||
// Recreate backdrops in group.
|
||||
for (var i = 0 ; i < args[0].length; i++)
|
||||
{
|
||||
MessageLog.trace(args[0][i]);
|
||||
Backdrop.addBackdrop(template_group, args[0][i]);
|
||||
};
|
||||
|
||||
// Copy-paste the selected nodes into the new group.
|
||||
var drag_object = copyPaste.copy(args[1], 1, frame.numberOf, "");
|
||||
copyPaste.pasteNewNodes(drag_object, template_group, "");
|
||||
|
||||
// Select all nodes within group and export as template.
|
||||
Action.perform( "selectAll()", "Node View" );
|
||||
copyPaste.createTemplateFromSelection(args[2], args[3]);
|
||||
|
||||
|
|
|
|||
|
|
@ -158,6 +158,25 @@ class AExpectedFiles:
|
|||
"""To be implemented by renderer class."""
|
||||
pass
|
||||
|
||||
def sanitize_camera_name(self, camera):
|
||||
"""Sanitize camera name.
|
||||
|
||||
Remove Maya illegal characters from camera name.
|
||||
|
||||
Args:
|
||||
camera (str): Maya camera name.
|
||||
|
||||
Returns:
|
||||
(str): sanitized camera name
|
||||
|
||||
Example:
|
||||
>>> sanizite_camera_name('test:camera_01')
|
||||
test_camera_01
|
||||
|
||||
"""
|
||||
sanitized = re.sub('[^0-9a-zA-Z_]+', '_', camera)
|
||||
return sanitized
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
"""Return prefix for specific renderer.
|
||||
|
||||
|
|
@ -252,7 +271,7 @@ class AExpectedFiles:
|
|||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data["sceneName"]),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data["layerName"]),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, cam),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, self.sanitize_camera_name(cam)),
|
||||
# this is required to remove unfilled aov token, for example
|
||||
# in Redshift
|
||||
(R_REMOVE_AOV_TOKEN, ""),
|
||||
|
|
@ -287,7 +306,8 @@ class AExpectedFiles:
|
|||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data["sceneName"]),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data["layerName"]),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, cam),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN,
|
||||
self.sanitize_camera_name(cam)),
|
||||
(R_SUBSTITUTE_AOV_TOKEN, aov[0]),
|
||||
(R_CLEAN_FRAME_TOKEN, ""),
|
||||
(R_CLEAN_EXT_TOKEN, ""),
|
||||
|
|
@ -314,7 +334,8 @@ class AExpectedFiles:
|
|||
# camera name to AOV to allow per camera AOVs.
|
||||
aov_name = aov[0]
|
||||
if len(layer_data["cameras"]) > 1:
|
||||
aov_name = "{}_{}".format(aov[0], cam)
|
||||
aov_name = "{}_{}".format(aov[0],
|
||||
self.sanitize_camera_name(cam))
|
||||
|
||||
aov_file_list[aov_name] = aov_files
|
||||
file_prefix = layer_data["filePrefix"]
|
||||
|
|
|
|||
|
|
@ -1445,7 +1445,7 @@ class ExporterReview:
|
|||
anlib.reset_selection()
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
if "Viewer" == n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Cleanup leftover files from publish."""
|
||||
import os
|
||||
import shutil
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def clean_renders(instance):
|
||||
transfers = instance.data.get("transfers", list())
|
||||
"""Delete renders after publishing.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instace): Instance to work on.
|
||||
|
||||
"""
|
||||
transfers = instance.data.get("transfers", list())
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
dirnames = []
|
||||
|
|
@ -40,6 +47,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in instance.context.data["results"]:
|
||||
|
|
@ -52,7 +60,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
)
|
||||
)
|
||||
|
||||
self.log.info("Cleaning renders ...")
|
||||
self.log.info("Performing cleanup on {}".format(instance))
|
||||
clean_renders(instance)
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
|
|
@ -60,16 +68,17 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
return
|
||||
import tempfile
|
||||
|
||||
staging_dir = instance.data.get("stagingDir", None)
|
||||
if not staging_dir or not os.path.exists(staging_dir):
|
||||
self.log.info("No staging directory found: %s" % staging_dir)
|
||||
return
|
||||
|
||||
temp_root = tempfile.gettempdir()
|
||||
staging_dir = instance.data.get("stagingDir", None)
|
||||
|
||||
if not os.path.normpath(staging_dir).startswith(temp_root):
|
||||
self.log.info("Skipping cleanup. Staging directory is not in the "
|
||||
"temp folder: %s" % staging_dir)
|
||||
return
|
||||
|
||||
self.log.info("Removing staging directory ...")
|
||||
if not staging_dir or not os.path.exists(staging_dir):
|
||||
self.log.info("No staging directory found: %s" % staging_dir)
|
||||
return
|
||||
|
||||
self.log.info("Removing staging directory {}".format(staging_dir))
|
||||
shutil.rmtree(staging_dir)
|
||||
|
|
|
|||
|
|
@ -410,15 +410,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# go through aovs in expected files
|
||||
for aov, files in exp_files[0].items():
|
||||
cols, rem = clique.assemble(files)
|
||||
# we shouldn't have any reminders
|
||||
if rem:
|
||||
self.log.warning(
|
||||
"skipping unexpected files found "
|
||||
"in sequence: {}".format(rem))
|
||||
|
||||
# but we really expect only one collection, nothing else make sense
|
||||
assert len(cols) == 1, "only one image sequence type is expected"
|
||||
# we shouldn't have any reminders. And if we do, it should
|
||||
# be just one item for single frame renders.
|
||||
if not cols and rem:
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
col = rem[0]
|
||||
_, ext = os.path.splitext(col)
|
||||
else:
|
||||
# but we really expect only one collection.
|
||||
# Nothing else make sense.
|
||||
assert len(cols) == 1, "only one image sequence type is expected" # noqa: E501
|
||||
_, ext = os.path.splitext(cols[0].tail)
|
||||
col = list(cols[0])
|
||||
|
||||
self.log.debug(col)
|
||||
# create subset name `familyTaskSubset_AOV`
|
||||
group_name = 'render{}{}{}{}'.format(
|
||||
task[0].upper(), task[1:],
|
||||
|
|
@ -426,7 +433,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
|
||||
staging = os.path.dirname(list(cols[0])[0])
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
else:
|
||||
staging = os.path.dirname(col)
|
||||
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
|
|
@ -451,13 +462,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
new_instance["subset"] = subset_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
ext = cols[0].tail.lstrip(".")
|
||||
|
||||
# create represenation
|
||||
if isinstance(col, (list, tuple)):
|
||||
files = [os.path.basename(f) for f in col]
|
||||
else:
|
||||
files = os.path.basename(col)
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(cols[0])],
|
||||
"files": files,
|
||||
"frameStart": int(instance_data.get("frameStartHandle")),
|
||||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
|
|
|
|||
133
pype/plugins/global/publish/validate_instance_in_context.py
Normal file
133
pype/plugins/global/publish/validate_instance_in_context.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Select Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
if instances:
|
||||
self.log.info(
|
||||
"Selecting invalid nodes: %s" % ", ".join(
|
||||
[str(x) for x in instances]
|
||||
)
|
||||
)
|
||||
self.select(instances)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
import nuke
|
||||
avalon.nuke.lib.select_nodes(
|
||||
[nuke.toNode(str(x)) for x in instances]
|
||||
)
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
|
||||
def deselect(self):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
avalon.nuke.lib.reset_selection()
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
self.set_attribute(instance, context_asset)
|
||||
|
||||
def set_attribute(self, instance, context_asset):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import nuke
|
||||
nuke.toNode(
|
||||
instance.data.get("name")
|
||||
)["avalon:asset"].setValue(context_asset)
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["maya", "nuke"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
def process(self, instance):
|
||||
asset = instance.data.get("asset")
|
||||
context_asset = instance.context.data["assetEntity"]["name"]
|
||||
msg = "{} has asset {}".format(instance.name, asset)
|
||||
assert asset == context_asset, msg
|
||||
|
|
@ -72,19 +72,27 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
self.log.info(output.decode("utf-8"))
|
||||
|
||||
# Collect rendered files.
|
||||
self.log.debug(path)
|
||||
files = os.listdir(path)
|
||||
self.log.debug(files)
|
||||
collections, remainder = clique.assemble(files, minimum_items=1)
|
||||
assert not remainder, (
|
||||
"There should not be a remainder for {0}: {1}".format(
|
||||
instance[0], remainder
|
||||
)
|
||||
)
|
||||
assert len(collections) == 1, (
|
||||
"There should only be one image sequence in {}. Found: {}".format(
|
||||
path, len(collections)
|
||||
)
|
||||
)
|
||||
collection = collections[0]
|
||||
self.log.debug(collections)
|
||||
if len(collections) > 1:
|
||||
for col in collections:
|
||||
if len(list(col)) > 1:
|
||||
collection = col
|
||||
else:
|
||||
# assert len(collections) == 1, (
|
||||
# "There should only be one image sequence in {}. Found: {}".format(
|
||||
# path, len(collections)
|
||||
# )
|
||||
# )
|
||||
collection = collections[0]
|
||||
|
||||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(path, "thumbnail.png")
|
||||
|
|
|
|||
|
|
@ -28,8 +28,11 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
hosts = ["harmony"]
|
||||
actions = [ValidateSceneSettingsRepair]
|
||||
|
||||
frame_check_filter = ["_ch_", "_pr_", "_intd_", "_extd_"]
|
||||
|
||||
def process(self, instance):
|
||||
expected_settings = pype.hosts.harmony.get_asset_settings()
|
||||
self.log.info(expected_settings)
|
||||
|
||||
# Harmony is expected to start at 1.
|
||||
frame_start = expected_settings["frameStart"]
|
||||
|
|
@ -37,6 +40,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
expected_settings["frameEnd"] = frame_end - frame_start + 1
|
||||
expected_settings["frameStart"] = 1
|
||||
|
||||
|
||||
|
||||
self.log.info(instance.context.data['anatomyData']['asset'])
|
||||
|
||||
if any(string in instance.context.data['anatomyData']['asset']
|
||||
for string in frame_check_filter):
|
||||
expected_settings.pop("frameEnd")
|
||||
|
||||
func = """function func()
|
||||
{
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,25 @@
|
|||
from avalon import api
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
from avalon.maya import lib
|
||||
from maya import cmds
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Load and update RenderSetup settings.
|
||||
|
||||
Working with RenderSetup setting is Maya is done utilizing json files.
|
||||
When this json is loaded, it will overwrite all settings on RenderSetup
|
||||
instance.
|
||||
"""
|
||||
|
||||
import json
|
||||
import six
|
||||
import sys
|
||||
|
||||
from avalon import api
|
||||
from avalon.maya import lib
|
||||
from pype.hosts.maya import lib as pypelib
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
|
||||
class RenderSetupLoader(api.Loader):
|
||||
"""
|
||||
This will load json preset for RenderSetup, overwriting current one.
|
||||
"""
|
||||
"""Load json preset for RenderSetup overwriting current one."""
|
||||
|
||||
families = ["rendersetup"]
|
||||
representations = ["json"]
|
||||
|
|
@ -19,7 +30,7 @@ class RenderSetupLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
"""Load RenderSetup settings."""
|
||||
from avalon.maya.pipeline import containerise
|
||||
# from pype.hosts.maya.lib import namespaced
|
||||
|
||||
|
|
@ -29,7 +40,7 @@ class RenderSetupLoader(api.Loader):
|
|||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
self.log.info(">>> loading json [ {} ]".format(self.fname))
|
||||
with open(self.fname, "r") as file:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
|
|
@ -42,9 +53,56 @@ class RenderSetupLoader(api.Loader):
|
|||
if not nodes:
|
||||
return
|
||||
|
||||
self.log.info(">>> containerising [ {} ]".format(name))
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove RenderSetup settings instance."""
|
||||
from maya import cmds
|
||||
|
||||
container_name = container["objectName"]
|
||||
|
||||
self.log.info("Removing '%s' from Maya.." % container["name"])
|
||||
|
||||
container_content = cmds.sets(container_name, query=True)
|
||||
nodes = cmds.ls(container_content, long=True)
|
||||
|
||||
nodes.append(container_name)
|
||||
|
||||
try:
|
||||
cmds.delete(nodes)
|
||||
except ValueError:
|
||||
# Already implicitly deleted by Maya upon removing reference
|
||||
pass
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update RenderSetup setting by overwriting existing settings."""
|
||||
pypelib.show_message(
|
||||
"Render setup update",
|
||||
"Render setup setting will be overwritten by new version. All "
|
||||
"setting specified by user not included in loaded version "
|
||||
"will be lost.")
|
||||
path = api.get_representation_path(representation)
|
||||
with open(path, "r") as file:
|
||||
try:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
except Exception:
|
||||
self.log.error("There were errors during loading")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
# Update metadata
|
||||
node = container["objectName"]
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
self.log.info("... updated")
|
||||
|
||||
def switch(self, container, representation):
|
||||
"""Switch representations."""
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
label = "Camera (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["camera"]
|
||||
bake_attributes = []
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -66,6 +67,14 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
|
||||
job_str += ' -file "{0}"'.format(path)
|
||||
|
||||
# bake specified attributes in preset
|
||||
assert isinstance(self.bake_attributes, (list, tuple)), (
|
||||
"Attributes to bake must be specified as a list"
|
||||
)
|
||||
for attr in self.bake_attributes:
|
||||
self.log.info("Adding {} attribute".format(attr))
|
||||
job_str += " -attr {0}".format(attr)
|
||||
|
||||
with lib.evaluation("off"):
|
||||
with avalon.maya.suspended_refresh():
|
||||
cmds.AbcExport(j=job_str, verbose=False)
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import os
|
|||
import json
|
||||
import getpass
|
||||
import copy
|
||||
import re
|
||||
|
||||
import clique
|
||||
import requests
|
||||
|
|
@ -108,8 +109,8 @@ def get_renderer_variables(renderlayer, root):
|
|||
# does not work for vray.
|
||||
scene = cmds.file(query=True, sceneName=True)
|
||||
scene, _ = os.path.splitext(os.path.basename(scene))
|
||||
filename_0 = filename_prefix.replace('<Scene>', scene)
|
||||
filename_0 = filename_0.replace('<Layer>', renderlayer)
|
||||
filename_0 = re.sub('<Scene>', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = re.sub('<Layer>', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = "{}.{}.{}".format(
|
||||
filename_0, "#" * int(padding), extension)
|
||||
filename_0 = os.path.normpath(os.path.join(root, filename_0))
|
||||
|
|
@ -375,16 +376,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
for _aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
output_file = rem[0]
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
output_filenames[exp_index] = output_file
|
||||
exp_index += 1
|
||||
else:
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file
|
||||
# OutputFilenames[exp_index] = output_file
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
|
|
|||
|
|
@ -62,9 +62,16 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
for family in families:
|
||||
for preset in presets[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
attributes.update(
|
||||
{node_name: {attribute_name: presets[family][preset]}}
|
||||
)
|
||||
try:
|
||||
attributes[node_name].update(
|
||||
{attribute_name: presets[family][preset]}
|
||||
)
|
||||
except KeyError:
|
||||
attributes.update({
|
||||
node_name: {
|
||||
attribute_name: presets[family][preset]
|
||||
}
|
||||
})
|
||||
|
||||
# Get invalid attributes.
|
||||
nodes = pm.ls()
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
import pyblish.api
|
||||
from pype.action import get_errored_instances_from_context
|
||||
import pype.api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Show Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
try:
|
||||
from maya import cmds
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for _instance in instances:
|
||||
invalid_instances = plugin.get_invalid(context)
|
||||
if invalid_instances:
|
||||
if isinstance(invalid_instances, (list, tuple)):
|
||||
invalid.extend(invalid_instances)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
cmds.select(invalid, replace=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
from maya import cmds
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
cmds.setAttr(instance.data.get("name") + ".asset",
|
||||
context_asset, type="string")
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.ContextPlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
"""Get invalid instances."""
|
||||
invalid = []
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
cls.log.info("we are in {}".format(context_asset))
|
||||
for instance in context:
|
||||
asset = instance.data.get("asset")
|
||||
if asset != context_asset:
|
||||
cls.log.warning("{} has asset {}".format(instance.name, asset))
|
||||
invalid.append(instance.name)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, context):
|
||||
"""Check instances."""
|
||||
invalid = self.get_invalid(context)
|
||||
if invalid:
|
||||
raise AssertionError("Some instances doesn't share same context")
|
||||
|
|
@ -106,7 +106,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"asset": avalon_knob_data["asset"],
|
||||
"label": node.name(),
|
||||
"name": node.name(),
|
||||
"subset": subset,
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
if "Viewer" == n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class ExtractImage(pype.api.Extractor):
|
|||
label = "Extract Image"
|
||||
hosts = ["photoshop"]
|
||||
families = ["image"]
|
||||
formats = ["png", "jpg"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -32,10 +33,12 @@ class ExtractImage(pype.api.Extractor):
|
|||
if layer.id not in extract_ids:
|
||||
layer.Visible = False
|
||||
|
||||
save_options = {
|
||||
"png": photoshop.com_objects.PNGSaveOptions(),
|
||||
"jpg": photoshop.com_objects.JPEGSaveOptions()
|
||||
}
|
||||
save_options = {}
|
||||
if "png" in self.formats:
|
||||
save_options["png"] = photoshop.com_objects.PNGSaveOptions()
|
||||
if "jpg" in self.formats:
|
||||
save_options["jpg"] = photoshop.com_objects.JPEGSaveOptions()
|
||||
|
||||
file_basename = os.path.splitext(
|
||||
photoshop.app().ActiveDocument.Name
|
||||
)[0]
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "2.11.0"
|
||||
__version__ = "2.11.1"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue