mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into chore/remove_deprecated_lib_funcs
This commit is contained in:
commit
a0225eb790
37 changed files with 994 additions and 812 deletions
|
|
@ -6,8 +6,7 @@ from openpype.hosts.aftereffects import api
|
|||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError,
|
||||
legacy_io,
|
||||
CreatorError
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
from openpype.lib import prepare_template_data
|
||||
|
|
@ -127,7 +126,7 @@ class RenderCreator(Creator):
|
|||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change[1])
|
||||
subset_change.new_value)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
|
|
@ -195,7 +194,7 @@ class RenderCreator(Creator):
|
|||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import openpype.hosts.aftereffects.api as api
|
|||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io,
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
|
@ -38,10 +37,11 @@ class AEWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
|
|
|||
|
|
@ -143,6 +143,9 @@ class ExtractSubsetResources(publish.Extractor):
|
|||
# create staging dir path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# append staging dir for later cleanup
|
||||
instance.context.data["cleanupFullPaths"].append(staging_dir)
|
||||
|
||||
# add default preset type for thumbnail and reviewable video
|
||||
# update them with settings and override in case the same
|
||||
# are found in there
|
||||
|
|
@ -548,30 +551,3 @@ class ExtractSubsetResources(publish.Extractor):
|
|||
"Path `{}` is containing more that one clip".format(path)
|
||||
)
|
||||
return clips[0]
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
staging_dir = instance.data.get('stagingDir', None)
|
||||
openpype_temp_dir = os.getenv("OPENPYPE_TEMP_DIR")
|
||||
|
||||
if not staging_dir:
|
||||
if openpype_temp_dir and os.path.exists(openpype_temp_dir):
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(
|
||||
prefix="pyblish_tmp_",
|
||||
dir=openpype_temp_dir
|
||||
)
|
||||
)
|
||||
else:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(staging_dir)
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.hosts.maya.api import (
|
|||
from maya import cmds
|
||||
|
||||
|
||||
class CreateAss(plugin.Creator):
|
||||
class CreateArnoldSceneSource(plugin.Creator):
|
||||
"""Arnold Scene Source"""
|
||||
|
||||
name = "ass"
|
||||
|
|
@ -29,7 +29,7 @@ class CreateAss(plugin.Creator):
|
|||
maskOperator = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAss, self).__init__(*args, **kwargs)
|
||||
super(CreateArnoldSceneSource, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data
|
||||
self.data.update(lib.collect_animation_data())
|
||||
|
|
@ -52,7 +52,7 @@ class CreateAss(plugin.Creator):
|
|||
self.data["maskOperator"] = self.maskOperator
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
instance = super(CreateArnoldSceneSource, self).process()
|
||||
|
||||
nodes = []
|
||||
|
||||
|
|
@ -61,6 +61,6 @@ class CreateAss(plugin.Creator):
|
|||
|
||||
cmds.sets(nodes, rm=instance)
|
||||
|
||||
assContent = cmds.sets(name="content_SET")
|
||||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||
assContent = cmds.sets(name=instance + "_content_SET")
|
||||
assProxy = cmds.sets(name=instance + "_proxy_SET", empty=True)
|
||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
from openpype.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
|
|
@ -37,3 +39,9 @@ class CreatePointCache(plugin.Creator):
|
|||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
|
||||
def process(self):
|
||||
instance = super(CreatePointCache, self).process()
|
||||
|
||||
assProxy = cmds.sets(name=instance + "_proxy_SET", empty=True)
|
||||
cmds.sets(assProxy, forceElement=instance)
|
||||
|
|
|
|||
|
|
@ -1,132 +0,0 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
class AlembicStandinLoader(load.LoaderPlugin):
|
||||
"""Load Alembic as Arnold Standin"""
|
||||
|
||||
families = ["animation", "model", "proxyAbc", "pointcache"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Import Alembic as Arnold Standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
from openpype.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
family = version["data"]["families"]
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.info("family: {}\n".format(family))
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings["maya"]["load"]["colors"]
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
c = colors.get(family[0])
|
||||
if c is not None:
|
||||
r = (float(c[0]) / 255)
|
||||
g = (float(c[1]) / 255)
|
||||
b = (float(c[2]) / 255)
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
r, g, b)
|
||||
|
||||
transform_name = label + "_ABC"
|
||||
|
||||
standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0]
|
||||
standin = cmds.listRelatives(standinShape, parent=True,
|
||||
typ="transform")
|
||||
standin = cmds.rename(standin, transform_name)
|
||||
standinShape = cmds.listRelatives(standin, children=True)[0]
|
||||
|
||||
cmds.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
cmds.setAttr(standinShape + ".dso", self.fname, type="string")
|
||||
cmds.setAttr(standinShape + ".abcFPS", float(fps))
|
||||
|
||||
if frameStart is None:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
elif "model" in family:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
else:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = get_representation_path(representation)
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
self.log.info("container:{}".format(container))
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.dso.set(path)
|
||||
standin.abcFPS.set(float(fps))
|
||||
if "modelMain" in container['objectName']:
|
||||
standin.useFrameExtension.set(0)
|
||||
else:
|
||||
standin.useFrameExtension.set(1)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
218
openpype/hosts/maya/plugins/load/load_arnold_standin.py
Normal file
218
openpype/hosts/maya/plugins/load/load_arnold_standin.py
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
import os
|
||||
import clique
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
unique_namespace, get_attribute_input, maintained_selection
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
def is_sequence(files):
|
||||
sequence = False
|
||||
collections, remainder = clique.assemble(files)
|
||||
if collections:
|
||||
sequence = True
|
||||
|
||||
return sequence
|
||||
|
||||
|
||||
class ArnoldStandinLoader(load.LoaderPlugin):
|
||||
"""Load as Arnold standin"""
|
||||
|
||||
families = ["ass", "animation", "model", "proxyAbc", "pointcache"]
|
||||
representations = ["ass", "abc"]
|
||||
|
||||
label = "Load as Arnold standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
# Set color.
|
||||
settings = get_project_settings(context["project"]["name"])
|
||||
color = settings['maya']['load']['colors'].get('ass')
|
||||
if color is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", True)
|
||||
cmds.setAttr(
|
||||
root + ".outlinerColor", color[0], color[1], color[2]
|
||||
)
|
||||
|
||||
with maintained_selection():
|
||||
# Create transform with shape
|
||||
transform_name = label + "_standin"
|
||||
|
||||
standin_shape = mtoa.ui.arnoldmenu.createStandIn()
|
||||
standin = cmds.listRelatives(standin_shape, parent=True)[0]
|
||||
standin = cmds.rename(standin, transform_name)
|
||||
standin_shape = cmds.listRelatives(standin, shapes=True)[0]
|
||||
|
||||
cmds.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
path, operator = self._setup_proxy(
|
||||
standin_shape, self.fname, namespace
|
||||
)
|
||||
cmds.setAttr(standin_shape + ".dso", path, type="string")
|
||||
sequence = is_sequence(os.listdir(os.path.dirname(self.fname)))
|
||||
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
|
||||
|
||||
nodes = [root, standin]
|
||||
if operator is not None:
|
||||
nodes.append(operator)
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def get_next_free_multi_index(self, attr_name):
|
||||
"""Find the next unconnected multi index at the input attribute."""
|
||||
for index in range(10000000):
|
||||
connection_info = cmds.connectionInfo(
|
||||
"{}[{}]".format(attr_name, index),
|
||||
sourceFromDestination=True
|
||||
)
|
||||
if len(connection_info or []) == 0:
|
||||
return index
|
||||
|
||||
def _get_proxy_path(self, path):
|
||||
basename_split = os.path.basename(path).split(".")
|
||||
proxy_basename = (
|
||||
basename_split[0] + "_proxy." + ".".join(basename_split[1:])
|
||||
)
|
||||
proxy_path = "/".join([os.path.dirname(path), proxy_basename])
|
||||
return proxy_basename, proxy_path
|
||||
|
||||
def _setup_proxy(self, shape, path, namespace):
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
options_node = "defaultArnoldRenderOptions"
|
||||
merge_operator = get_attribute_input(options_node + ".operator")
|
||||
if merge_operator is None:
|
||||
merge_operator = cmds.createNode("aiMerge")
|
||||
cmds.connectAttr(
|
||||
merge_operator + ".message", options_node + ".operator"
|
||||
)
|
||||
|
||||
merge_operator = merge_operator.split(".")[0]
|
||||
|
||||
string_replace_operator = cmds.createNode(
|
||||
"aiStringReplace", name=namespace + ":string_replace_operator"
|
||||
)
|
||||
node_type = "alembic" if path.endswith(".abc") else "procedural"
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".selection",
|
||||
"*.(@node=='{}')".format(node_type),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
|
||||
cmds.connectAttr(
|
||||
string_replace_operator + ".out",
|
||||
"{}.inputs[{}]".format(
|
||||
merge_operator,
|
||||
self.get_next_free_multi_index(merge_operator + ".inputs")
|
||||
)
|
||||
)
|
||||
|
||||
# We setup the string operator no matter whether there is a proxy or
|
||||
# not. This makes it easier to update since the string operator will
|
||||
# always be created. Return original path to use for standin.
|
||||
if not os.path.exists(proxy_path):
|
||||
return path, string_replace_operator
|
||||
|
||||
return proxy_path, string_replace_operator
|
||||
|
||||
def update(self, container, representation):
|
||||
# Update the standin
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
if cmds.nodeType(member) == "aiStringReplace":
|
||||
string_replace_operator = member
|
||||
|
||||
shapes = cmds.listRelatives(member, shapes=True)
|
||||
if not shapes:
|
||||
continue
|
||||
if cmds.nodeType(shapes[0]) == "aiStandIn":
|
||||
standin = shapes[0]
|
||||
|
||||
path = get_representation_path(representation)
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
# Whether there is proxy or so, we still update the string operator.
|
||||
# If no proxy exists, the string operator wont replace anything.
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
"resources/" + proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
|
||||
dso_path = path
|
||||
if os.path.exists(proxy_path):
|
||||
dso_path = proxy_path
|
||||
cmds.setAttr(standin + ".dso", dso_path, type="string")
|
||||
|
||||
sequence = is_sequence(os.listdir(os.path.dirname(path)))
|
||||
cmds.setAttr(standin + ".useFrameExtension", sequence)
|
||||
|
||||
cmds.setAttr(
|
||||
container["objectName"] + ".representation",
|
||||
str(representation["_id"]),
|
||||
type="string"
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
|
@ -1,290 +0,0 @@
|
|||
import os
|
||||
import clique
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.plugin import get_reference_node
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
unique_namespace
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load Arnold Proxy as reference"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Reference .ASS standin with Proxy"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "ass"
|
||||
|
||||
with maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
path = self.fname
|
||||
proxyPath_base = os.path.splitext(path)[0]
|
||||
|
||||
if frameStart is not None:
|
||||
proxyPath_base = os.path.splitext(proxyPath_base)[0]
|
||||
|
||||
publish_folder = os.path.split(path)[0]
|
||||
files_in_folder = os.listdir(publish_folder)
|
||||
collections, remainder = clique.assemble(files_in_folder)
|
||||
|
||||
if collections:
|
||||
hashes = collections[0].padding * '#'
|
||||
coll = collections[0].format('{head}[index]{tail}')
|
||||
filename = coll.replace('[index]', hashes)
|
||||
|
||||
path = os.path.join(publish_folder, filename)
|
||||
|
||||
proxyPath = proxyPath_base + ".ma"
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
file_url = self.prepare_root_value(proxyPath,
|
||||
project_name)
|
||||
|
||||
nodes = cmds.file(file_url,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
settings = get_project_settings(project_name)
|
||||
colors = settings['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
(float(c[0])/255),
|
||||
(float(c[1])/255),
|
||||
(float(c[2])/255)
|
||||
)
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
|
||||
node = container["objectName"]
|
||||
|
||||
representation["context"].pop("frame", None)
|
||||
path = get_representation_path(representation)
|
||||
print(path)
|
||||
# path = self.fname
|
||||
print(self.fname)
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
print(proxyPath)
|
||||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = get_reference_node(members)
|
||||
|
||||
assert os.path.exists(proxyPath), "%s does not exist." % proxyPath
|
||||
|
||||
try:
|
||||
file_url = self.prepare_root_value(proxyPath,
|
||||
representation["context"]
|
||||
["project"]
|
||||
["name"])
|
||||
content = cmds.file(file_url,
|
||||
loadReference=reference_node,
|
||||
type="mayaAscii",
|
||||
returnNewNodes=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(content, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
except RuntimeError as exc:
|
||||
# When changing a reference to a file that has load errors the
|
||||
# command will raise an error even if the file is still loaded
|
||||
# correctly (e.g. when raising errors on Arnold attributes)
|
||||
# When the file is loaded and has content, we consider it's fine.
|
||||
if not cmds.referenceQuery(reference_node, isLoaded=True):
|
||||
raise
|
||||
|
||||
content = cmds.referenceQuery(reference_node,
|
||||
nodes=True,
|
||||
dagPath=True)
|
||||
if not content:
|
||||
raise
|
||||
|
||||
self.log.warning("Ignoring file read error:\n%s", exc)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
|
||||
class AssStandinLoader(load.LoaderPlugin):
|
||||
"""Load .ASS file as standin"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Load .ASS file as standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = pm.group(name=label, empty=True)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings['maya']['load']['colors']
|
||||
|
||||
c = colors.get('ass')
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create transform with shape
|
||||
transform_name = label + "_ASS"
|
||||
# transform = pm.createNode("transform", name=transform_name,
|
||||
# parent=root)
|
||||
|
||||
standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn())
|
||||
standin = standinShape.getParent()
|
||||
standin.rename(transform_name)
|
||||
|
||||
pm.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
if frameStart is not None:
|
||||
standinShape.useFrameExtension.set(1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
files_in_path = os.listdir(os.path.split(path)[0])
|
||||
sequence = 0
|
||||
collections, remainder = clique.assemble(files_in_path)
|
||||
if collections:
|
||||
sequence = 1
|
||||
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.dso.set(path)
|
||||
standin.useFrameExtension.set(sequence)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
|
@ -1,19 +1,18 @@
|
|||
from maya import cmds
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAssData(pyblish.api.InstancePlugin):
|
||||
"""Collect Ass data."""
|
||||
class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Collect Arnold Scene Source data."""
|
||||
|
||||
# Offset to be after renderable camera collection.
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Ass'
|
||||
label = "Collect Arnold Scene Source"
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
objsets = instance.data['setMembers']
|
||||
objsets = instance.data["setMembers"]
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
|
|
@ -21,15 +20,12 @@ class CollectAssData(pyblish.api.InstancePlugin):
|
|||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if "content_SET" in objset:
|
||||
instance.data['setMembers'] = members
|
||||
self.log.debug('content members: {}'.format(members))
|
||||
elif objset.startswith("proxy_SET"):
|
||||
if len(members) != 1:
|
||||
msg = "You have multiple proxy meshes, please only use one"
|
||||
raise KnownPublishError(msg)
|
||||
instance.data['proxy'] = members
|
||||
self.log.debug('proxy members: {}'.format(members))
|
||||
if objset.endswith("content_SET"):
|
||||
instance.data["setMembers"] = cmds.ls(members, long=True)
|
||||
self.log.debug("content members: {}".format(members))
|
||||
elif objset.endswith("proxy_SET"):
|
||||
instance.data["proxy"] = cmds.ls(members, long=True)
|
||||
self.log.debug("proxy members: {}".format(members))
|
||||
|
||||
# Use camera in object set if present else default to render globals
|
||||
# camera.
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -12,3 +14,31 @@ class CollectPointcache(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
||||
proxy_set = None
|
||||
for node in instance.data["setMembers"]:
|
||||
if cmds.nodeType(node) != "objectSet":
|
||||
continue
|
||||
members = cmds.sets(node, query=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty objectset: \"%s\" " % node)
|
||||
continue
|
||||
if node.endswith("proxy_SET"):
|
||||
proxy_set = node
|
||||
instance.data["proxy"] = []
|
||||
instance.data["proxyRoots"] = []
|
||||
for member in members:
|
||||
instance.data["proxy"].extend(cmds.ls(member, long=True))
|
||||
instance.data["proxyRoots"].extend(
|
||||
cmds.ls(member, long=True)
|
||||
)
|
||||
instance.data["proxy"].extend(
|
||||
cmds.listRelatives(member, shapes=True, fullPath=True)
|
||||
)
|
||||
self.log.debug(
|
||||
"proxy members: {}".format(instance.data["proxy"])
|
||||
)
|
||||
|
||||
if proxy_set:
|
||||
instance.remove(proxy_set)
|
||||
instance.data["setMembers"].remove(proxy_set)
|
||||
|
|
|
|||
|
|
@ -42,7 +42,6 @@ Provides:
|
|||
import re
|
||||
import os
|
||||
import platform
|
||||
import json
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
|
@ -320,7 +319,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"renderSetupIncludeLights"
|
||||
),
|
||||
"strict_error_checking": render_instance.data.get(
|
||||
"strict_error_checking")
|
||||
"strict_error_checking", True
|
||||
)
|
||||
}
|
||||
|
||||
# Collect Deadline url if Deadline module is enabled
|
||||
|
|
|
|||
|
|
@ -0,0 +1,160 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
import arnold
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection, attribute_values, delete_after
|
||||
)
|
||||
|
||||
|
||||
class ExtractArnoldSceneSource(publish.Extractor):
|
||||
"""Extract the content of the instance to an Arnold Scene Source file."""
|
||||
|
||||
label = "Extract Arnold Scene Source"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Mask
|
||||
mask = arnold.AI_NODE_ALL
|
||||
|
||||
node_types = {
|
||||
"options": arnold.AI_NODE_OPTIONS,
|
||||
"camera": arnold.AI_NODE_CAMERA,
|
||||
"light": arnold.AI_NODE_LIGHT,
|
||||
"shape": arnold.AI_NODE_SHAPE,
|
||||
"shader": arnold.AI_NODE_SHADER,
|
||||
"override": arnold.AI_NODE_OVERRIDE,
|
||||
"driver": arnold.AI_NODE_DRIVER,
|
||||
"filter": arnold.AI_NODE_FILTER,
|
||||
"color_manager": arnold.AI_NODE_COLOR_MANAGER,
|
||||
"operator": arnold.AI_NODE_OPERATOR
|
||||
}
|
||||
|
||||
for key in node_types.keys():
|
||||
if instance.data.get("mask" + key.title()):
|
||||
mask = mask ^ node_types[key]
|
||||
|
||||
# Motion blur
|
||||
attribute_data = {
|
||||
"defaultArnoldRenderOptions.motion_blur_enable": instance.data.get(
|
||||
"motionBlur", True
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_steps": instance.data.get(
|
||||
"motionBlurKeys", 2
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_frames": instance.data.get(
|
||||
"motionBlurLength", 0.5
|
||||
)
|
||||
}
|
||||
|
||||
# Write out .ass file
|
||||
kwargs = {
|
||||
"filename": file_path,
|
||||
"startFrame": instance.data.get("frameStartHandle", 1),
|
||||
"endFrame": instance.data.get("frameEndHandle", 1),
|
||||
"frameStep": instance.data.get("step", 1),
|
||||
"selected": True,
|
||||
"asciiAss": self.asciiAss,
|
||||
"shadowLinks": True,
|
||||
"lightLinks": True,
|
||||
"boundingBox": True,
|
||||
"expandProcedurals": instance.data.get("expandProcedurals", False),
|
||||
"camera": instance.data["camera"],
|
||||
"mask": mask
|
||||
}
|
||||
|
||||
filenames = self._extract(
|
||||
instance.data["setMembers"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "ass",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"]
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info(
|
||||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
|
||||
filenames = self._extract(
|
||||
instance.data["proxy"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
representation = {
|
||||
"name": "proxy",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"],
|
||||
"outputName": "proxy"
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
self.log.info("Writing: " + kwargs["filename"])
|
||||
filenames = []
|
||||
# Duplicating nodes so they are direct children of the world. This
|
||||
# makes the hierarchy of any exported ass file the same.
|
||||
with delete_after() as delete_bin:
|
||||
duplicate_nodes = []
|
||||
for node in nodes:
|
||||
duplicate_transform = cmds.duplicate(node)[0]
|
||||
|
||||
# Discard the children.
|
||||
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
|
||||
children = cmds.listRelatives(
|
||||
duplicate_transform, children=True
|
||||
)
|
||||
cmds.delete(set(children) - set(shapes))
|
||||
|
||||
duplicate_transform = cmds.parent(
|
||||
duplicate_transform, world=True
|
||||
)[0]
|
||||
|
||||
cmds.rename(duplicate_transform, node.split("|")[-1])
|
||||
duplicate_transform = "|" + node.split("|")[-1]
|
||||
|
||||
duplicate_nodes.append(duplicate_transform)
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
with attribute_values(attribute_data):
|
||||
with maintained_selection():
|
||||
self.log.info(
|
||||
"Writing: {}".format(duplicate_nodes)
|
||||
)
|
||||
cmds.select(duplicate_nodes, noExpand=True)
|
||||
|
||||
self.log.info(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
|
||||
return filenames
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
import arnold
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import maintained_selection, attribute_values
|
||||
|
||||
|
||||
class ExtractAssStandin(publish.Extractor):
|
||||
"""Extract the content of the instance to a ass file"""
|
||||
|
||||
label = "Arnold Scene Source (.ass)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
filenames = []
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Mask
|
||||
mask = arnold.AI_NODE_ALL
|
||||
|
||||
node_types = {
|
||||
"options": arnold.AI_NODE_OPTIONS,
|
||||
"camera": arnold.AI_NODE_CAMERA,
|
||||
"light": arnold.AI_NODE_LIGHT,
|
||||
"shape": arnold.AI_NODE_SHAPE,
|
||||
"shader": arnold.AI_NODE_SHADER,
|
||||
"override": arnold.AI_NODE_OVERRIDE,
|
||||
"driver": arnold.AI_NODE_DRIVER,
|
||||
"filter": arnold.AI_NODE_FILTER,
|
||||
"color_manager": arnold.AI_NODE_COLOR_MANAGER,
|
||||
"operator": arnold.AI_NODE_OPERATOR
|
||||
}
|
||||
|
||||
for key in node_types.keys():
|
||||
if instance.data.get("mask" + key.title()):
|
||||
mask = mask ^ node_types[key]
|
||||
|
||||
# Motion blur
|
||||
values = {
|
||||
"defaultArnoldRenderOptions.motion_blur_enable": instance.data.get(
|
||||
"motionBlur", True
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_steps": instance.data.get(
|
||||
"motionBlurKeys", 2
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_frames": instance.data.get(
|
||||
"motionBlurLength", 0.5
|
||||
)
|
||||
}
|
||||
|
||||
# Write out .ass file
|
||||
kwargs = {
|
||||
"filename": file_path,
|
||||
"startFrame": instance.data.get("frameStartHandle", 1),
|
||||
"endFrame": instance.data.get("frameEndHandle", 1),
|
||||
"frameStep": instance.data.get("step", 1),
|
||||
"selected": True,
|
||||
"asciiAss": self.asciiAss,
|
||||
"shadowLinks": True,
|
||||
"lightLinks": True,
|
||||
"boundingBox": True,
|
||||
"expandProcedurals": instance.data.get("expandProcedurals", False),
|
||||
"camera": instance.data["camera"],
|
||||
"mask": mask
|
||||
}
|
||||
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
with attribute_values(values):
|
||||
with maintained_selection():
|
||||
self.log.info(
|
||||
"Writing: {}".format(instance.data["setMembers"])
|
||||
)
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
|
||||
self.log.info(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ass',
|
||||
'ext': 'ass',
|
||||
'files': filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
'frameStart': kwargs["startFrame"]
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractAssProxy(publish.Extractor):
|
||||
"""Extract proxy model as Maya Ascii to use as arnold standin
|
||||
|
||||
|
||||
"""
|
||||
|
||||
order = publish.Extractor.order + 0.2
|
||||
label = "Ass Proxy (Maya ASCII)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def unparent(root):
|
||||
"""Temporarily unparent `root`"""
|
||||
parent = cmds.listRelatives(root, parent=True)
|
||||
if parent:
|
||||
cmds.parent(root, world=True)
|
||||
yield
|
||||
self.log.info("{} - {}".format(root, parent))
|
||||
cmds.parent(root, parent)
|
||||
else:
|
||||
yield
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
proxy = instance.data.get('proxy', None)
|
||||
|
||||
if not proxy:
|
||||
self.log.info("no proxy mesh")
|
||||
return
|
||||
|
||||
members = cmds.ls(proxy,
|
||||
dag=True,
|
||||
transforms=True,
|
||||
noIntermediate=True)
|
||||
self.log.info(members)
|
||||
|
||||
with maintained_selection():
|
||||
with unparent(members[0]):
|
||||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=False,
|
||||
constraints=False,
|
||||
expressions=False,
|
||||
constructionHistory=False)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import copy
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -9,6 +10,7 @@ from openpype.hosts.maya.api.lib import (
|
|||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
)
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractAlembic(publish.Extractor):
|
||||
|
|
@ -23,9 +25,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["pointcache",
|
||||
"model",
|
||||
"vrayproxy"]
|
||||
families = ["pointcache", "model", "vrayproxy"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -87,6 +87,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
end=end))
|
||||
|
||||
suspend = not instance.data.get("refresh", False)
|
||||
self.log.info(nodes)
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
|
|
@ -101,9 +102,9 @@ class ExtractAlembic(publish.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"name": "abc",
|
||||
"ext": "abc",
|
||||
"files": filename,
|
||||
"stagingDir": dirname
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -112,6 +113,48 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy"):
|
||||
return
|
||||
|
||||
path = path.replace(".abc", "_proxy.abc")
|
||||
if not instance.data.get("includeParentHierarchy", True):
|
||||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = instance.data["proxyRoots"]
|
||||
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(instance.data["proxy"])
|
||||
extract_alembic(
|
||||
file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**options
|
||||
)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "abc"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
published_filename_without_extension = StringTemplate(
|
||||
templates["file"]
|
||||
).format(template_data).replace(".abc", "_proxy")
|
||||
transfers = []
|
||||
destination = os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
filename.replace(
|
||||
filename.split(".")[0],
|
||||
published_filename_without_extension
|
||||
)
|
||||
)
|
||||
transfers.append((path, destination))
|
||||
|
||||
for source, destination in transfers:
|
||||
self.log.debug("Transfer: {} > {}".format(source, destination))
|
||||
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def get_members_and_roots(self, instance):
|
||||
return instance[:], instance.data.get("setMembers")
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,106 @@
|
|||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source.
|
||||
|
||||
We require at least 1 root node/parent for the meshes. This is to ensure we
|
||||
can duplicate the nodes and preserve the names.
|
||||
|
||||
If using proxies we need the nodes to share the same names and not be
|
||||
parent to the world. This ends up needing at least two groups with content
|
||||
nodes and proxy nodes in another.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
|
||||
def _get_nodes_data(self, nodes):
|
||||
ungrouped_nodes = []
|
||||
nodes_by_name = {}
|
||||
parents = []
|
||||
for node in nodes:
|
||||
node_split = node.split("|")
|
||||
if len(node_split) == 2:
|
||||
ungrouped_nodes.append(node)
|
||||
|
||||
parent = "|".join(node_split[:-1])
|
||||
if parent:
|
||||
parents.append(parent)
|
||||
|
||||
nodes_by_name[node_split[-1]] = node
|
||||
for shape in cmds.listRelatives(node, shapes=True):
|
||||
nodes_by_name[shape.split("|")[-1]] = shape
|
||||
|
||||
return ungrouped_nodes, nodes_by_name, parents
|
||||
|
||||
def process(self, instance):
|
||||
ungrouped_nodes = []
|
||||
|
||||
nodes, content_nodes_by_name, content_parents = self._get_nodes_data(
|
||||
instance.data["setMembers"]
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_data(
|
||||
instance.data.get("proxy", [])
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
# Validate against nodes directly parented to world.
|
||||
if ungrouped_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found nodes parented to the world: {}\n"
|
||||
"All nodes need to be grouped.".format(ungrouped_nodes)
|
||||
)
|
||||
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
# Validate for content and proxy nodes amount being the same.
|
||||
if len(instance.data["setMembers"]) != len(instance.data["proxy"]):
|
||||
raise PublishValidationError(
|
||||
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
|
||||
"be the same.".format(
|
||||
len(instance.data["setMembers"]),
|
||||
len(instance.data["proxy"])
|
||||
)
|
||||
)
|
||||
|
||||
# Validate against content and proxy nodes sharing same parent.
|
||||
if list(set(content_parents) & set(proxy_parents)):
|
||||
raise PublishValidationError(
|
||||
"Content and proxy nodes cannot share the same parent."
|
||||
)
|
||||
|
||||
# Validate for content and proxy nodes sharing same names.
|
||||
sorted_content_names = sorted(content_nodes_by_name.keys())
|
||||
sorted_proxy_names = sorted(proxy_nodes_by_name.keys())
|
||||
odd_content_names = list(
|
||||
set(sorted_content_names) - set(sorted_proxy_names)
|
||||
)
|
||||
odd_content_nodes = [
|
||||
content_nodes_by_name[x] for x in odd_content_names
|
||||
]
|
||||
odd_proxy_names = list(
|
||||
set(sorted_proxy_names) - set(sorted_content_names)
|
||||
)
|
||||
odd_proxy_nodes = [
|
||||
proxy_nodes_by_name[x] for x in odd_proxy_names
|
||||
]
|
||||
if not sorted_content_names == sorted_proxy_names:
|
||||
raise PublishValidationError(
|
||||
"Content and proxy nodes need to share the same names.\n"
|
||||
"Content nodes not matching: {}\n"
|
||||
"Proxy nodes not matching: {}".format(
|
||||
odd_content_nodes, odd_proxy_nodes
|
||||
)
|
||||
)
|
||||
|
|
@ -193,7 +193,7 @@ class ImageCreator(Creator):
|
|||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("variant"):
|
||||
instance_data["variant"] = ''
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import openpype.hosts.photoshop.api as api
|
|||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
|
@ -38,10 +37,11 @@ class PSWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import inspect
|
|||
from uuid import uuid4
|
||||
from contextlib import contextmanager
|
||||
|
||||
from openpype.client import get_assets
|
||||
from openpype.client import get_assets, get_asset_by_name
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings
|
||||
|
|
@ -17,13 +17,10 @@ from openpype.lib.attribute_definitions import (
|
|||
UnknownDef,
|
||||
serialize_attr_defs,
|
||||
deserialize_attr_defs,
|
||||
get_default_values,
|
||||
)
|
||||
from openpype.host import IPublishHost
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.mongodb import (
|
||||
AvalonMongoDB,
|
||||
session_data_from_environment,
|
||||
)
|
||||
|
||||
from .creator_plugins import (
|
||||
Creator,
|
||||
|
|
@ -1338,8 +1335,6 @@ class CreateContext:
|
|||
Args:
|
||||
host(ModuleType): Host implementation which handles implementation and
|
||||
global metadata.
|
||||
dbcon(AvalonMongoDB): Connection to mongo with context (at least
|
||||
project).
|
||||
headless(bool): Context is created out of UI (Current not used).
|
||||
reset(bool): Reset context on initialization.
|
||||
discover_publish_plugins(bool): Discover publish plugins during reset
|
||||
|
|
@ -1347,16 +1342,8 @@ class CreateContext:
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self, host, dbcon=None, headless=False, reset=True,
|
||||
discover_publish_plugins=True
|
||||
self, host, headless=False, reset=True, discover_publish_plugins=True
|
||||
):
|
||||
# Create conncetion if is not passed
|
||||
if dbcon is None:
|
||||
session = session_data_from_environment(True)
|
||||
dbcon = AvalonMongoDB(session)
|
||||
dbcon.install()
|
||||
|
||||
self.dbcon = dbcon
|
||||
self.host = host
|
||||
|
||||
# Prepare attribute for logger (Created on demand in `log` property)
|
||||
|
|
@ -1380,6 +1367,10 @@ class CreateContext:
|
|||
" Missing methods: {}"
|
||||
).format(joined_methods))
|
||||
|
||||
self._current_project_name = None
|
||||
self._current_asset_name = None
|
||||
self._current_task_name = None
|
||||
|
||||
self._host_is_valid = host_is_valid
|
||||
# Currently unused variable
|
||||
self.headless = headless
|
||||
|
|
@ -1499,11 +1490,20 @@ class CreateContext:
|
|||
|
||||
@property
|
||||
def host_name(self):
|
||||
if hasattr(self.host, "name"):
|
||||
return self.host.name
|
||||
return os.environ["AVALON_APP"]
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self.dbcon.active_project()
|
||||
def get_current_project_name(self):
|
||||
return self._current_project_name
|
||||
|
||||
def get_current_asset_name(self):
|
||||
return self._current_asset_name
|
||||
|
||||
def get_current_task_name(self):
|
||||
return self._current_task_name
|
||||
|
||||
project_name = property(get_current_project_name)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
|
|
@ -1520,7 +1520,7 @@ class CreateContext:
|
|||
|
||||
self.reset_preparation()
|
||||
|
||||
self.reset_avalon_context()
|
||||
self.reset_current_context()
|
||||
self.reset_plugins(discover_publish_plugins)
|
||||
self.reset_context_data()
|
||||
|
||||
|
|
@ -1567,14 +1567,22 @@ class CreateContext:
|
|||
self._collection_shared_data = None
|
||||
self.refresh_thumbnails()
|
||||
|
||||
def reset_avalon_context(self):
|
||||
"""Give ability to reset avalon context.
|
||||
def reset_current_context(self):
|
||||
"""Refresh current context.
|
||||
|
||||
Reset is based on optional host implementation of `get_current_context`
|
||||
function or using `legacy_io.Session`.
|
||||
|
||||
Some hosts have ability to change context file without using workfiles
|
||||
tool but that change is not propagated to
|
||||
tool but that change is not propagated to 'legacy_io.Session'
|
||||
nor 'os.environ'.
|
||||
|
||||
Todos:
|
||||
UI: Current context should be also checked on save - compare
|
||||
initial values vs. current values.
|
||||
Related to UI checks: Current workfile can be also considered
|
||||
as current context information as that's where the metadata
|
||||
are stored. We should store the workfile (if is available) too.
|
||||
"""
|
||||
|
||||
project_name = asset_name = task_name = None
|
||||
|
|
@ -1592,12 +1600,9 @@ class CreateContext:
|
|||
if not task_name:
|
||||
task_name = legacy_io.Session.get("AVALON_TASK")
|
||||
|
||||
if project_name:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
if asset_name:
|
||||
self.dbcon.Session["AVALON_ASSET"] = asset_name
|
||||
if task_name:
|
||||
self.dbcon.Session["AVALON_TASK"] = task_name
|
||||
self._current_project_name = project_name
|
||||
self._current_asset_name = asset_name
|
||||
self._current_task_name = task_name
|
||||
|
||||
def reset_plugins(self, discover_publish_plugins=True):
|
||||
"""Reload plugins.
|
||||
|
|
@ -1792,40 +1797,128 @@ class CreateContext:
|
|||
with self.bulk_instances_collection():
|
||||
self._bulk_instances_to_process.append(instance)
|
||||
|
||||
def create(self, identifier, *args, **kwargs):
|
||||
"""Wrapper for creators to trigger created.
|
||||
def _get_creator_in_create(self, identifier):
|
||||
"""Creator by identifier with unified error.
|
||||
|
||||
Different types of creators may expect different arguments thus the
|
||||
hints for args are blind.
|
||||
Helper method to get creator by identifier with same error when creator
|
||||
is not available.
|
||||
|
||||
Args:
|
||||
identifier (str): Creator's identifier.
|
||||
*args (Tuple[Any]): Arguments for create method.
|
||||
**kwargs (Dict[Any, Any]): Keyword argument for create method.
|
||||
identifier (str): Identifier of creator plugin.
|
||||
|
||||
Returns:
|
||||
BaseCreator: Creator found by identifier.
|
||||
|
||||
Raises:
|
||||
CreatorError: When identifier is not known.
|
||||
"""
|
||||
|
||||
error_message = "Failed to run Creator with identifier \"{}\". {}"
|
||||
creator = self.creators.get(identifier)
|
||||
label = getattr(creator, "label", None)
|
||||
failed = False
|
||||
add_traceback = False
|
||||
exc_info = None
|
||||
try:
|
||||
# Fake CreatorError (Could be maybe specific exception?)
|
||||
if creator is None:
|
||||
# Fake CreatorError (Could be maybe specific exception?)
|
||||
if creator is None:
|
||||
raise CreatorError(
|
||||
"Creator {} was not found".format(identifier)
|
||||
)
|
||||
return creator
|
||||
|
||||
def create(
|
||||
self,
|
||||
creator_identifier,
|
||||
variant,
|
||||
asset_doc=None,
|
||||
task_name=None,
|
||||
pre_create_data=None
|
||||
):
|
||||
"""Trigger create of plugins with standartized arguments.
|
||||
|
||||
Arguments 'asset_doc' and 'task_name' use current context as default
|
||||
values. If only 'task_name' is provided it will be overriden by
|
||||
task name from current context. If 'task_name' is not provided
|
||||
when 'asset_doc' is, it is considered that task name is not specified,
|
||||
which can lead to error if subset name template requires task name.
|
||||
|
||||
Args:
|
||||
creator_identifier (str): Identifier of creator plugin.
|
||||
variant (str): Variant used for subset name.
|
||||
asset_doc (Dict[str, Any]): Asset document which define context of
|
||||
creation (possible context of created instance/s).
|
||||
task_name (str): Name of task to which is context related.
|
||||
pre_create_data (Dict[str, Any]): Pre-create attribute values.
|
||||
|
||||
Returns:
|
||||
Any: Output of triggered creator's 'create' method.
|
||||
|
||||
Raises:
|
||||
CreatorError: If creator was not found or asset is empty.
|
||||
"""
|
||||
|
||||
creator = self._get_creator_in_create(creator_identifier)
|
||||
|
||||
project_name = self.project_name
|
||||
if asset_doc is None:
|
||||
asset_name = self.get_current_asset_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
task_name = self.get_current_task_name()
|
||||
if asset_doc is None:
|
||||
raise CreatorError(
|
||||
"Creator {} was not found".format(identifier)
|
||||
"Asset with name {} was not found".format(asset_name)
|
||||
)
|
||||
|
||||
creator.create(*args, **kwargs)
|
||||
if pre_create_data is None:
|
||||
pre_create_data = {}
|
||||
|
||||
precreate_attr_defs = creator.get_pre_create_attr_defs() or []
|
||||
# Create default values of precreate data
|
||||
_pre_create_data = get_default_values(precreate_attr_defs)
|
||||
# Update passed precreate data to default values
|
||||
# TODO validate types
|
||||
_pre_create_data.update(pre_create_data)
|
||||
|
||||
subset_name = creator.get_subset_name(
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
self.host_name
|
||||
)
|
||||
instance_data = {
|
||||
"asset": asset_doc["name"],
|
||||
"task": task_name,
|
||||
"family": creator.family,
|
||||
"variant": variant
|
||||
}
|
||||
return creator.create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
_pre_create_data
|
||||
)
|
||||
|
||||
def _create_with_unified_error(
|
||||
self, identifier, creator, *args, **kwargs
|
||||
):
|
||||
error_message = "Failed to run Creator with identifier \"{}\". {}"
|
||||
|
||||
label = None
|
||||
add_traceback = False
|
||||
result = None
|
||||
fail_info = None
|
||||
success = False
|
||||
|
||||
try:
|
||||
# Try to get creator and his label
|
||||
if creator is None:
|
||||
creator = self._get_creator_in_create(identifier)
|
||||
label = getattr(creator, "label", label)
|
||||
|
||||
# Run create
|
||||
result = creator.create(*args, **kwargs)
|
||||
success = True
|
||||
|
||||
except CreatorError:
|
||||
failed = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
|
|
@ -1833,12 +1926,35 @@ class CreateContext:
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
if failed:
|
||||
raise CreatorsCreateFailed([
|
||||
prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
])
|
||||
if not success:
|
||||
fail_info = prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
return result, fail_info
|
||||
|
||||
def create_with_unified_error(self, identifier, *args, **kwargs):
|
||||
"""Trigger create but raise only one error if anything fails.
|
||||
|
||||
Added to raise unified exception. Capture any possible issues and
|
||||
reraise it with unified information.
|
||||
|
||||
Args:
|
||||
identifier (str): Identifier of creator.
|
||||
*args (Tuple[Any]): Arguments for create method.
|
||||
**kwargs (Dict[Any, Any]): Keyword argument for create method.
|
||||
|
||||
Raises:
|
||||
CreatorsCreateFailed: When creation fails due to any possible
|
||||
reason. If anything goes wrong this is only possible exception
|
||||
the method should raise.
|
||||
"""
|
||||
|
||||
result, fail_info = self._create_with_unified_error(
|
||||
identifier, None, *args, **kwargs
|
||||
)
|
||||
if fail_info is not None:
|
||||
raise CreatorsCreateFailed([fail_info])
|
||||
return result
|
||||
|
||||
def _remove_instance(self, instance):
|
||||
self._instances_by_id.pop(instance.id, None)
|
||||
|
|
@ -1968,38 +2084,12 @@ class CreateContext:
|
|||
Reset instances if any autocreator executed properly.
|
||||
"""
|
||||
|
||||
error_message = "Failed to run AutoCreator with identifier \"{}\". {}"
|
||||
failed_info = []
|
||||
for creator in self.sorted_autocreators:
|
||||
identifier = creator.identifier
|
||||
label = creator.label
|
||||
failed = False
|
||||
add_traceback = False
|
||||
try:
|
||||
creator.create()
|
||||
|
||||
except CreatorError:
|
||||
failed = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
# Use bare except because some hosts raise their exceptions that
|
||||
# do not inherit from python's `BaseException`
|
||||
except:
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
error_message.format(identifier, ""),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if failed:
|
||||
failed_info.append(
|
||||
prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
)
|
||||
_, fail_info = self._create_with_unified_error(identifier, creator)
|
||||
if fail_info is not None:
|
||||
failed_info.append(fail_info)
|
||||
|
||||
if failed_info:
|
||||
raise CreatorsCreateFailed(failed_info)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ from openpype.lib import (
|
|||
TemplateUnsolved,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
schema,
|
||||
legacy_io,
|
||||
Anatomy,
|
||||
)
|
||||
|
|
@ -643,7 +642,10 @@ def get_representation_path(representation, root=None, dbcon=None):
|
|||
|
||||
def path_from_config():
|
||||
try:
|
||||
version_, subset, asset, project = dbcon.parenthood(representation)
|
||||
project_name = dbcon.active_project()
|
||||
version_, subset, asset, project = get_representation_parents(
|
||||
project_name, representation
|
||||
)
|
||||
except ValueError:
|
||||
log.debug(
|
||||
"Representation %s wasn't found in database, "
|
||||
|
|
|
|||
|
|
@ -10,11 +10,17 @@ import six
|
|||
import pyblish.plugin
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import Logger, filter_profiles
|
||||
from openpype.lib import (
|
||||
Logger,
|
||||
filter_profiles
|
||||
)
|
||||
from openpype.settings import (
|
||||
get_project_settings,
|
||||
get_system_settings,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
tempdir
|
||||
)
|
||||
|
||||
from .contants import (
|
||||
DEFAULT_PUBLISH_TEMPLATE,
|
||||
|
|
@ -595,7 +601,7 @@ def context_plugin_should_run(plugin, context):
|
|||
|
||||
Args:
|
||||
plugin (pyblish.api.Plugin): Plugin with filters.
|
||||
context (pyblish.api.Context): Pyblish context with insances.
|
||||
context (pyblish.api.Context): Pyblish context with instances.
|
||||
|
||||
Returns:
|
||||
bool: Context plugin should run based on valid instances.
|
||||
|
|
@ -609,12 +615,21 @@ def context_plugin_should_run(plugin, context):
|
|||
def get_instance_staging_dir(instance):
|
||||
"""Unified way how staging dir is stored and created on instances.
|
||||
|
||||
First check if 'stagingDir' is already set in instance data. If there is
|
||||
not create new in tempdir.
|
||||
First check if 'stagingDir' is already set in instance data.
|
||||
In case there already is new tempdir will not be created.
|
||||
|
||||
It also supports `OPENPYPE_TMPDIR`, so studio can define own temp
|
||||
shared repository per project or even per more granular context.
|
||||
Template formatting is supported also with optional keys. Folder is
|
||||
created in case it doesn't exists.
|
||||
|
||||
Available anatomy formatting keys:
|
||||
- root[work | <root name key>]
|
||||
- project[name | code]
|
||||
|
||||
Note:
|
||||
Staging dir does not have to be necessarily in tempdir so be carefull
|
||||
about it's usage.
|
||||
Staging dir does not have to be necessarily in tempdir so be careful
|
||||
about its usage.
|
||||
|
||||
Args:
|
||||
instance (pyblish.lib.Instance): Instance for which we want to get
|
||||
|
|
@ -623,12 +638,27 @@ def get_instance_staging_dir(instance):
|
|||
Returns:
|
||||
str: Path to staging dir of instance.
|
||||
"""
|
||||
staging_dir = instance.data.get('stagingDir')
|
||||
if staging_dir:
|
||||
return staging_dir
|
||||
|
||||
staging_dir = instance.data.get("stagingDir")
|
||||
if not staging_dir:
|
||||
anatomy = instance.context.data.get("anatomy")
|
||||
|
||||
# get customized tempdir path from `OPENPYPE_TMPDIR` env var
|
||||
custom_temp_dir = tempdir.create_custom_tempdir(
|
||||
anatomy.project_name, anatomy)
|
||||
|
||||
if custom_temp_dir:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(
|
||||
prefix="pyblish_tmp_",
|
||||
dir=custom_temp_dir
|
||||
)
|
||||
)
|
||||
else:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
59
openpype/pipeline/tempdir.py
Normal file
59
openpype/pipeline/tempdir.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
"""
|
||||
Temporary folder operations
|
||||
"""
|
||||
|
||||
import os
|
||||
from openpype.lib import StringTemplate
|
||||
from openpype.pipeline import Anatomy
|
||||
|
||||
|
||||
def create_custom_tempdir(project_name, anatomy=None):
|
||||
""" Create custom tempdir
|
||||
|
||||
Template path formatting is supporting:
|
||||
- optional key formatting
|
||||
- available keys:
|
||||
- root[work | <root name key>]
|
||||
- project[name | code]
|
||||
|
||||
Args:
|
||||
project_name (str): project name
|
||||
anatomy (openpype.pipeline.Anatomy)[optional]: Anatomy object
|
||||
|
||||
Returns:
|
||||
str | None: formatted path or None
|
||||
"""
|
||||
openpype_tempdir = os.getenv("OPENPYPE_TMPDIR")
|
||||
if not openpype_tempdir:
|
||||
return
|
||||
|
||||
custom_tempdir = None
|
||||
if "{" in openpype_tempdir:
|
||||
if anatomy is None:
|
||||
anatomy = Anatomy(project_name)
|
||||
# create base formate data
|
||||
data = {
|
||||
"root": anatomy.roots,
|
||||
"project": {
|
||||
"name": anatomy.project_name,
|
||||
"code": anatomy.project_code,
|
||||
}
|
||||
}
|
||||
# path is anatomy template
|
||||
custom_tempdir = StringTemplate.format_template(
|
||||
openpype_tempdir, data).normalized()
|
||||
|
||||
else:
|
||||
# path is absolute
|
||||
custom_tempdir = openpype_tempdir
|
||||
|
||||
# create the dir path if it doesn't exists
|
||||
if not os.path.exists(custom_tempdir):
|
||||
try:
|
||||
# create it if it doesn't exists
|
||||
os.makedirs(custom_tempdir)
|
||||
except IOError as error:
|
||||
raise IOError(
|
||||
"Path couldn't be created: {}".format(error)) from error
|
||||
|
||||
return custom_tempdir
|
||||
|
|
@ -32,7 +32,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
thumbnail_paths_by_instance_id.get(None)
|
||||
)
|
||||
|
||||
project_name = create_context.project_name
|
||||
project_name = create_context.get_current_project_name()
|
||||
if project_name:
|
||||
context.data["projectName"] = project_name
|
||||
|
||||
|
|
@ -53,11 +53,15 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
context.data.update(create_context.context_data_to_store())
|
||||
context.data["newPublishing"] = True
|
||||
# Update context data
|
||||
for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"):
|
||||
value = create_context.dbcon.Session.get(key)
|
||||
if value is not None:
|
||||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
asset_name = create_context.get_current_asset_name()
|
||||
task_name = create_context.get_current_task_name()
|
||||
for key, value in (
|
||||
("AVALON_PROJECT", project_name),
|
||||
("AVALON_ASSET", asset_name),
|
||||
("AVALON_TASK", task_name)
|
||||
):
|
||||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
|
||||
def create_instance(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -246,6 +246,7 @@
|
|||
"sourcetype": "python",
|
||||
"title": "Gizmo Note",
|
||||
"command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')",
|
||||
"icon": "",
|
||||
"shortcut": ""
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -17,6 +17,11 @@
|
|||
"key": "menu",
|
||||
"label": "OpenPype Menu shortcuts",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "create",
|
||||
"label": "Create..."
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "publish",
|
||||
|
|
@ -288,4 +293,4 @@
|
|||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1573,20 +1573,19 @@ class PublisherController(BasePublisherController):
|
|||
Handle both creation and publishing parts.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to mongo with context.
|
||||
headless (bool): Headless publishing. ATM not implemented or used.
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self, dbcon=None, headless=False):
|
||||
def __init__(self, headless=False):
|
||||
super(PublisherController, self).__init__()
|
||||
|
||||
self._host = registered_host()
|
||||
self._headless = headless
|
||||
|
||||
self._create_context = CreateContext(
|
||||
self._host, dbcon, headless=headless, reset=False
|
||||
self._host, headless=headless, reset=False
|
||||
)
|
||||
|
||||
self._publish_plugins_proxy = None
|
||||
|
|
@ -1740,7 +1739,7 @@ class PublisherController(BasePublisherController):
|
|||
self._create_context.reset_preparation()
|
||||
|
||||
# Reset avalon context
|
||||
self._create_context.reset_avalon_context()
|
||||
self._create_context.reset_current_context()
|
||||
|
||||
self._asset_docs_cache.reset()
|
||||
|
||||
|
|
@ -2004,9 +2003,10 @@ class PublisherController(BasePublisherController):
|
|||
|
||||
success = True
|
||||
try:
|
||||
self._create_context.create(
|
||||
self._create_context.create_with_unified_error(
|
||||
creator_identifier, subset_name, instance_data, options
|
||||
)
|
||||
|
||||
except CreatorsOperationFailed as exc:
|
||||
success = False
|
||||
self._emit_event(
|
||||
|
|
|
|||
|
|
@ -566,24 +566,24 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
def _go_to_publish_tab(self):
|
||||
self._set_current_tab("publish")
|
||||
|
||||
def _go_to_details_tab(self):
|
||||
self._set_current_tab("details")
|
||||
|
||||
def _go_to_report_tab(self):
|
||||
self._set_current_tab("report")
|
||||
|
||||
def _go_to_details_tab(self):
|
||||
self._set_current_tab("details")
|
||||
|
||||
def _is_on_create_tab(self):
|
||||
return self._is_current_tab("create")
|
||||
|
||||
def _is_on_publish_tab(self):
|
||||
return self._is_current_tab("publish")
|
||||
|
||||
def _is_on_details_tab(self):
|
||||
return self._is_current_tab("details")
|
||||
|
||||
def _is_on_report_tab(self):
|
||||
return self._is_current_tab("report")
|
||||
|
||||
def _is_on_details_tab(self):
|
||||
return self._is_current_tab("details")
|
||||
|
||||
def _set_publish_overlay_visibility(self, visible):
|
||||
if visible:
|
||||
widget = self._publish_overlay
|
||||
|
|
@ -647,16 +647,10 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
# otherwise 'create' is used
|
||||
# - this happens only on first show
|
||||
if first_reset:
|
||||
if self._overview_widget.has_items():
|
||||
self._go_to_publish_tab()
|
||||
else:
|
||||
self._go_to_create_tab()
|
||||
self._go_to_create_tab()
|
||||
|
||||
elif (
|
||||
not self._is_on_create_tab()
|
||||
and not self._is_on_publish_tab()
|
||||
):
|
||||
# If current tab is not 'Create' or 'Publish' go to 'Publish'
|
||||
elif self._is_on_report_tab():
|
||||
# Go to 'Publish' tab if is on 'Details' tab
|
||||
# - this can happen when publishing started and was reset
|
||||
# at that moment it doesn't make sense to stay at publish
|
||||
# specific tabs.
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.1-nightly.5"
|
||||
__version__ = "3.15.1-nightly.6"
|
||||
|
|
|
|||
30
website/docs/admin_environment.md
Normal file
30
website/docs/admin_environment.md
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
id: admin_environment
|
||||
title: Environment
|
||||
sidebar_label: Environment
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## OPENPYPE_TMPDIR:
|
||||
- Custom staging dir directory
|
||||
- Supports anatomy keys formatting. ex `{root[work]}/{project[name]}/temp`
|
||||
- supported formatting keys:
|
||||
- root[work]
|
||||
- project[name | code]
|
||||
|
||||
## OPENPYPE_DEBUG
|
||||
- setting logger to debug mode
|
||||
- example value: "1" (to activate)
|
||||
|
||||
## OPENPYPE_LOG_LEVEL
|
||||
- stringified numeric value of log level. [Here for more info](https://docs.python.org/3/library/logging.html#logging-levels)
|
||||
- example value: "10"
|
||||
|
||||
## OPENPYPE_MONGO
|
||||
- If set it takes precedence over the one set in keyring
|
||||
- for more details on how to use it go [here](admin_use#check-for-mongodb-database-connection)
|
||||
|
||||
## OPENPYPE_USERNAME
|
||||
- if set it overides system created username
|
||||
|
|
@ -13,18 +13,23 @@ Settings applicable to the full studio.
|
|||
|
||||

|
||||
|
||||
**`Studio Name`** - Full name of the studio (can be used as variable on some places)
|
||||
### Studio Name
|
||||
Full name of the studio (can be used as variable on some places)
|
||||
|
||||
**`Studio Code`** - Studio acronym or a short code (can be used as variable on some places)
|
||||
### Studio Code
|
||||
Studio acronym or a short code (can be used as variable on some places)
|
||||
|
||||
**`Admin Password`** - After setting admin password, normal user won't have access to OpenPype settings
|
||||
### Admin Password
|
||||
After setting admin password, normal user won't have access to OpenPype settings
|
||||
and Project Manager GUI. Please keep in mind that this is a studio wide password and it is meant purely
|
||||
as a simple barrier to prevent artists from accidental setting changes.
|
||||
|
||||
**`Environment`** - Globally applied environment variables that will be appended to any OpenPype process in the studio.
|
||||
### Environment
|
||||
Globally applied environment variables that will be appended to any OpenPype process in the studio.
|
||||
|
||||
**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
|
||||
Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
|
||||
### Disk mapping
|
||||
- Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
|
||||
- Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
|
||||
|
||||
### FFmpeg and OpenImageIO tools
|
||||
We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default, bundled tools are used, but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory.
|
||||
|
|
@ -171,4 +176,4 @@ In the image before you can see that we set most of the environment variables in
|
|||
In this example MTOA will automatically will the `MAYA_VERSION`(which is set by Maya Application environment) and `MTOA_VERSION` into the `MTOA` variable. We then use the `MTOA` to set all the other variables needed for it to function within Maya.
|
||||

|
||||
|
||||
All of the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible.
|
||||
All the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible.
|
||||
|
|
|
|||
|
|
@ -308,6 +308,8 @@ Select its root and Go **OpenPype → Create...** and select **Point Cache**.
|
|||
|
||||
After that, publishing will create corresponding **abc** files.
|
||||
|
||||
When creating the instance, a objectset child `proxy` will be created. Meshes in the `proxy` objectset will be the viewport representation where loading supports proxies. Proxy representations are stored as `resources` of the subset.
|
||||
|
||||
Example setup:
|
||||
|
||||

|
||||
|
|
@ -315,6 +317,7 @@ Example setup:
|
|||
:::note Publish on farm
|
||||
If your studio has Deadline configured, artists could choose to offload potentially long running export of pointache and publish it to the farm.
|
||||
Only thing that is necessary is to toggle `Farm` property in created pointcache instance to True.
|
||||
:::
|
||||
|
||||
### Loading Point Caches
|
||||
|
||||
|
|
|
|||
30
website/docs/artist_hosts_maya_arnold.md
Normal file
30
website/docs/artist_hosts_maya_arnold.md
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
id: artist_hosts_maya_arnold
|
||||
title: Arnold for Maya
|
||||
sidebar_label: Arnold
|
||||
---
|
||||
## Arnold Scene Source (.ass)
|
||||
Arnold Scene Source can be published as a single file or a sequence of files, determined by the frame range.
|
||||
|
||||
When creating the instance, two objectsets are created; `content` and `proxy`. Meshes in the `proxy` objectset will be the viewport representation when loading as `standin`. Proxy representations are stored as `resources` of the subset.
|
||||
|
||||
### Arnold Scene Source Proxy Workflow
|
||||
In order to utilize operators and proxies, the content and proxy nodes need to share the same names (including the shape names). This is done by parenting the content and proxy nodes into separate groups. For example:
|
||||
|
||||

|
||||
|
||||
## Standin
|
||||
Arnold Scene Source `ass` and Alembic `abc` are supported to load as standins.
|
||||
|
||||
### Standin Proxy Workflow
|
||||
If a subset has a proxy representation, this will be used as display in the viewport. At render time the standin path will be replaced using the recommended string replacement workflow;
|
||||
|
||||
https://help.autodesk.com/view/ARNOL/ENU/?guid=arnold_for_maya_operators_am_Updating_procedural_file_paths_with_string_replace_html
|
||||
|
||||
Since the content and proxy nodes share the same names and hierarchy, any manually shader assignments will be shared.
|
||||
|
||||
|
||||
:::note for advanced users
|
||||
You can stop the proxy swapping by disabling the string replacement operator found in the container.
|
||||

|
||||
:::
|
||||
BIN
website/docs/assets/maya-arnold_scene_source.png
Normal file
BIN
website/docs/assets/maya-arnold_scene_source.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
BIN
website/docs/assets/maya-arnold_standin.png
Normal file
BIN
website/docs/assets/maya-arnold_standin.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 86 KiB |
|
|
@ -44,6 +44,7 @@ module.exports = {
|
|||
"artist_hosts_maya_multiverse",
|
||||
"artist_hosts_maya_yeti",
|
||||
"artist_hosts_maya_xgen",
|
||||
"artist_hosts_maya_arnold",
|
||||
"artist_hosts_maya_vray",
|
||||
"artist_hosts_maya_redshift",
|
||||
],
|
||||
|
|
@ -86,6 +87,7 @@ module.exports = {
|
|||
type: "category",
|
||||
label: "Configuration",
|
||||
items: [
|
||||
"admin_environment",
|
||||
"admin_settings",
|
||||
"admin_settings_system",
|
||||
"admin_settings_project_anatomy",
|
||||
|
|
|
|||
|
|
@ -7180,9 +7180,9 @@ typedarray-to-buffer@^3.1.5:
|
|||
is-typedarray "^1.0.0"
|
||||
|
||||
ua-parser-js@^0.7.30:
|
||||
version "0.7.31"
|
||||
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.31.tgz#649a656b191dffab4f21d5e053e27ca17cbff5c6"
|
||||
integrity sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==
|
||||
version "0.7.33"
|
||||
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532"
|
||||
integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw==
|
||||
|
||||
unherit@^1.0.4:
|
||||
version "1.1.3"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue