mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch '2.x/develop' into feature/determine-maya-scene-type-by-preset
This commit is contained in:
commit
c684812bc2
17 changed files with 359 additions and 173 deletions
|
|
@ -1,14 +1,25 @@
|
|||
from avalon import api
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
from avalon.maya import lib
|
||||
from maya import cmds
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Load and update RenderSetup settings.
|
||||
|
||||
Working with RenderSetup setting is Maya is done utilizing json files.
|
||||
When this json is loaded, it will overwrite all settings on RenderSetup
|
||||
instance.
|
||||
"""
|
||||
|
||||
import json
|
||||
import six
|
||||
import sys
|
||||
|
||||
from avalon import api
|
||||
from avalon.maya import lib
|
||||
from pype.hosts.maya import lib as pypelib
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
|
||||
class RenderSetupLoader(api.Loader):
|
||||
"""
|
||||
This will load json preset for RenderSetup, overwriting current one.
|
||||
"""
|
||||
"""Load json preset for RenderSetup overwriting current one."""
|
||||
|
||||
families = ["rendersetup"]
|
||||
representations = ["json"]
|
||||
|
|
@ -19,7 +30,7 @@ class RenderSetupLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
"""Load RenderSetup settings."""
|
||||
from avalon.maya.pipeline import containerise
|
||||
# from pype.hosts.maya.lib import namespaced
|
||||
|
||||
|
|
@ -29,7 +40,7 @@ class RenderSetupLoader(api.Loader):
|
|||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
self.log.info(">>> loading json [ {} ]".format(self.fname))
|
||||
with open(self.fname, "r") as file:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
|
|
@ -42,9 +53,56 @@ class RenderSetupLoader(api.Loader):
|
|||
if not nodes:
|
||||
return
|
||||
|
||||
self.log.info(">>> containerising [ {} ]".format(name))
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove RenderSetup settings instance."""
|
||||
from maya import cmds
|
||||
|
||||
container_name = container["objectName"]
|
||||
|
||||
self.log.info("Removing '%s' from Maya.." % container["name"])
|
||||
|
||||
container_content = cmds.sets(container_name, query=True)
|
||||
nodes = cmds.ls(container_content, long=True)
|
||||
|
||||
nodes.append(container_name)
|
||||
|
||||
try:
|
||||
cmds.delete(nodes)
|
||||
except ValueError:
|
||||
# Already implicitly deleted by Maya upon removing reference
|
||||
pass
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update RenderSetup setting by overwriting existing settings."""
|
||||
pypelib.show_message(
|
||||
"Render setup update",
|
||||
"Render setup setting will be overwritten by new version. All "
|
||||
"setting specified by user not included in loaded version "
|
||||
"will be lost.")
|
||||
path = api.get_representation_path(representation)
|
||||
with open(path, "r") as file:
|
||||
try:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
except Exception:
|
||||
self.log.error("There were errors during loading")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
# Update metadata
|
||||
node = container["objectName"]
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
self.log.info("... updated")
|
||||
|
||||
def switch(self, container, representation):
|
||||
"""Switch representations."""
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
label = "Camera (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["camera"]
|
||||
bake_attributes = []
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -66,6 +67,14 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
|
||||
job_str += ' -file "{0}"'.format(path)
|
||||
|
||||
# bake specified attributes in preset
|
||||
assert isinstance(self.bake_attributes, (list, tuple)), (
|
||||
"Attributes to bake must be specified as a list"
|
||||
)
|
||||
for attr in self.bake_attributes:
|
||||
self.log.info("Adding {} attribute".format(attr))
|
||||
job_str += " -attr {0}".format(attr)
|
||||
|
||||
with lib.evaluation("off"):
|
||||
with avalon.maya.suspended_refresh():
|
||||
cmds.AbcExport(j=job_str, verbose=False)
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import os
|
|||
import json
|
||||
import getpass
|
||||
import copy
|
||||
import re
|
||||
|
||||
import clique
|
||||
import requests
|
||||
|
|
@ -108,8 +109,8 @@ def get_renderer_variables(renderlayer, root):
|
|||
# does not work for vray.
|
||||
scene = cmds.file(query=True, sceneName=True)
|
||||
scene, _ = os.path.splitext(os.path.basename(scene))
|
||||
filename_0 = filename_prefix.replace('<Scene>', scene)
|
||||
filename_0 = filename_0.replace('<Layer>', renderlayer)
|
||||
filename_0 = re.sub('<Scene>', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = re.sub('<Layer>', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = "{}.{}.{}".format(
|
||||
filename_0, "#" * int(padding), extension)
|
||||
filename_0 = os.path.normpath(os.path.join(root, filename_0))
|
||||
|
|
@ -375,16 +376,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
for _aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
output_file = rem[0]
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
output_filenames[exp_index] = output_file
|
||||
exp_index += 1
|
||||
else:
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file
|
||||
# OutputFilenames[exp_index] = output_file
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
|
|
|||
|
|
@ -62,9 +62,16 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
for family in families:
|
||||
for preset in presets[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
attributes.update(
|
||||
{node_name: {attribute_name: presets[family][preset]}}
|
||||
)
|
||||
try:
|
||||
attributes[node_name].update(
|
||||
{attribute_name: presets[family][preset]}
|
||||
)
|
||||
except KeyError:
|
||||
attributes.update({
|
||||
node_name: {
|
||||
attribute_name: presets[family][preset]
|
||||
}
|
||||
})
|
||||
|
||||
# Get invalid attributes.
|
||||
nodes = pm.ls()
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
import pyblish.api
|
||||
from pype.action import get_errored_instances_from_context
|
||||
import pype.api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Show Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
try:
|
||||
from maya import cmds
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for _instance in instances:
|
||||
invalid_instances = plugin.get_invalid(context)
|
||||
if invalid_instances:
|
||||
if isinstance(invalid_instances, (list, tuple)):
|
||||
invalid.extend(invalid_instances)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
cmds.select(invalid, replace=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
from maya import cmds
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
cmds.setAttr(instance.data.get("name") + ".asset",
|
||||
context_asset, type="string")
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.ContextPlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
"""Get invalid instances."""
|
||||
invalid = []
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
cls.log.info("we are in {}".format(context_asset))
|
||||
for instance in context:
|
||||
asset = instance.data.get("asset")
|
||||
if asset != context_asset:
|
||||
cls.log.warning("{} has asset {}".format(instance.name, asset))
|
||||
invalid.append(instance.name)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, context):
|
||||
"""Check instances."""
|
||||
invalid = self.get_invalid(context)
|
||||
if invalid:
|
||||
raise AssertionError("Some instances doesn't share same context")
|
||||
Loading…
Add table
Add a link
Reference in a new issue