mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
a46c7cfff3
52 changed files with 2888 additions and 1205 deletions
|
|
@ -1,945 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Module handling expected render output from Maya.
|
||||
|
||||
This module is used in :mod:`collect_render` and :mod:`collect_vray_scene`.
|
||||
|
||||
Note:
|
||||
To implement new renderer, just create new class inheriting from
|
||||
:class:`AExpectedFiles` and add it to :func:`ExpectedFiles.get()`.
|
||||
|
||||
Attributes:
|
||||
R_SINGLE_FRAME (:class:`re.Pattern`): Find single frame number.
|
||||
R_FRAME_RANGE (:class:`re.Pattern`): Find frame range.
|
||||
R_FRAME_NUMBER (:class:`re.Pattern`): Find frame number in string.
|
||||
R_LAYER_TOKEN (:class:`re.Pattern`): Find layer token in image prefixes.
|
||||
R_AOV_TOKEN (:class:`re.Pattern`): Find AOV token in image prefixes.
|
||||
R_SUBSTITUTE_AOV_TOKEN (:class:`re.Pattern`): Find and substitute AOV token
|
||||
in image prefixes.
|
||||
R_REMOVE_AOV_TOKEN (:class:`re.Pattern`): Find and remove AOV token in
|
||||
image prefixes.
|
||||
R_CLEAN_FRAME_TOKEN (:class:`re.Pattern`): Find and remove unfilled
|
||||
Renderman frame token in image prefix.
|
||||
R_CLEAN_EXT_TOKEN (:class:`re.Pattern`): Find and remove unfilled Renderman
|
||||
extension token in image prefix.
|
||||
R_SUBSTITUTE_LAYER_TOKEN (:class:`re.Pattern`): Find and substitute render
|
||||
layer token in image prefixes.
|
||||
R_SUBSTITUTE_SCENE_TOKEN (:class:`re.Pattern`): Find and substitute scene
|
||||
token in image prefixes.
|
||||
R_SUBSTITUTE_CAMERA_TOKEN (:class:`re.Pattern`): Find and substitute camera
|
||||
token in image prefixes.
|
||||
RENDERER_NAMES (dict): Renderer names mapping between reported name and
|
||||
*human readable* name.
|
||||
IMAGE_PREFIXES (dict): Mapping between renderers and their respective
|
||||
image prefix attribute names.
|
||||
|
||||
Todo:
|
||||
Determine `multipart` from render instance.
|
||||
|
||||
"""
|
||||
|
||||
import types
|
||||
import re
|
||||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
import attr
|
||||
|
||||
import openpype.hosts.maya.api.lib as lib
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
|
||||
R_SINGLE_FRAME = re.compile(r"^(-?)\d+$")
|
||||
R_FRAME_RANGE = re.compile(r"^(?P<sf>(-?)\d+)-(?P<ef>(-?)\d+)$")
|
||||
R_FRAME_NUMBER = re.compile(r".+\.(?P<frame>[0-9]+)\..+")
|
||||
R_LAYER_TOKEN = re.compile(
|
||||
r".*((?:%l)|(?:<layer>)|(?:<renderlayer>)).*", re.IGNORECASE
|
||||
)
|
||||
R_AOV_TOKEN = re.compile(r".*%a.*|.*<aov>.*|.*<renderpass>.*", re.IGNORECASE)
|
||||
R_SUBSTITUTE_AOV_TOKEN = re.compile(r"%a|<aov>|<renderpass>", re.IGNORECASE)
|
||||
R_REMOVE_AOV_TOKEN = re.compile(
|
||||
r"_%a|\.%a|_<aov>|\.<aov>|_<renderpass>|\.<renderpass>", re.IGNORECASE)
|
||||
# to remove unused renderman tokens
|
||||
R_CLEAN_FRAME_TOKEN = re.compile(r"\.?<f\d>\.?", re.IGNORECASE)
|
||||
R_CLEAN_EXT_TOKEN = re.compile(r"\.?<ext>\.?", re.IGNORECASE)
|
||||
|
||||
R_SUBSTITUTE_LAYER_TOKEN = re.compile(
|
||||
r"%l|<layer>|<renderlayer>", re.IGNORECASE
|
||||
)
|
||||
R_SUBSTITUTE_CAMERA_TOKEN = re.compile(r"%c|<camera>", re.IGNORECASE)
|
||||
R_SUBSTITUTE_SCENE_TOKEN = re.compile(r"%s|<scene>", re.IGNORECASE)
|
||||
|
||||
RENDERER_NAMES = {
|
||||
"mentalray": "MentalRay",
|
||||
"vray": "V-Ray",
|
||||
"arnold": "Arnold",
|
||||
"renderman": "Renderman",
|
||||
"redshift": "Redshift",
|
||||
}
|
||||
|
||||
# not sure about the renderman image prefix
|
||||
IMAGE_PREFIXES = {
|
||||
"mentalray": "defaultRenderGlobals.imageFilePrefix",
|
||||
"vray": "vraySettings.fileNamePrefix",
|
||||
"arnold": "defaultRenderGlobals.imageFilePrefix",
|
||||
"renderman": "rmanGlobals.imageFileFormat",
|
||||
"redshift": "defaultRenderGlobals.imageFilePrefix",
|
||||
}
|
||||
|
||||
|
||||
@attr.s
|
||||
class LayerMetadata(object):
|
||||
"""Data class for Render Layer metadata."""
|
||||
frameStart = attr.ib()
|
||||
frameEnd = attr.ib()
|
||||
cameras = attr.ib()
|
||||
sceneName = attr.ib()
|
||||
layerName = attr.ib()
|
||||
renderer = attr.ib()
|
||||
defaultExt = attr.ib()
|
||||
filePrefix = attr.ib()
|
||||
enabledAOVs = attr.ib()
|
||||
frameStep = attr.ib(default=1)
|
||||
padding = attr.ib(default=4)
|
||||
|
||||
|
||||
class ExpectedFiles:
|
||||
"""Class grouping functionality for all supported renderers.
|
||||
|
||||
Attributes:
|
||||
multipart (bool): Flag if multipart exrs are used.
|
||||
|
||||
"""
|
||||
multipart = False
|
||||
|
||||
def __init__(self, render_instance):
|
||||
"""Constructor."""
|
||||
self._render_instance = render_instance
|
||||
|
||||
def get(self, renderer, layer):
|
||||
"""Get expected files for given renderer and render layer.
|
||||
|
||||
Args:
|
||||
renderer (str): Name of renderer
|
||||
layer (str): Name of render layer
|
||||
|
||||
Returns:
|
||||
dict: Expected rendered files by AOV
|
||||
|
||||
Raises:
|
||||
:exc:`UnsupportedRendererException`: If requested renderer
|
||||
is not supported. It needs to be implemented by extending
|
||||
:class:`AExpectedFiles` and added to this methods ``if``
|
||||
statement.
|
||||
|
||||
"""
|
||||
renderSetup.instance().switchToLayerUsingLegacyName(layer)
|
||||
|
||||
if renderer.lower() == "arnold":
|
||||
return self._get_files(ExpectedFilesArnold(layer,
|
||||
self._render_instance))
|
||||
if renderer.lower() == "vray":
|
||||
return self._get_files(ExpectedFilesVray(
|
||||
layer, self._render_instance))
|
||||
if renderer.lower() == "redshift":
|
||||
return self._get_files(ExpectedFilesRedshift(
|
||||
layer, self._render_instance))
|
||||
if renderer.lower() == "mentalray":
|
||||
return self._get_files(ExpectedFilesMentalray(
|
||||
layer, self._render_instance))
|
||||
if renderer.lower() == "renderman":
|
||||
return self._get_files(ExpectedFilesRenderman(
|
||||
layer, self._render_instance))
|
||||
|
||||
raise UnsupportedRendererException(
|
||||
"unsupported {}".format(renderer)
|
||||
)
|
||||
|
||||
def _get_files(self, renderer):
|
||||
# type: (AExpectedFiles) -> list
|
||||
files = renderer.get_files()
|
||||
self.multipart = renderer.multipart
|
||||
return files
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AExpectedFiles:
|
||||
"""Abstract class with common code for all renderers.
|
||||
|
||||
Attributes:
|
||||
renderer (str): name of renderer.
|
||||
layer (str): name of render layer.
|
||||
multipart (bool): flag for multipart exrs.
|
||||
|
||||
"""
|
||||
|
||||
renderer = None
|
||||
layer = None
|
||||
multipart = False
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Constructor."""
|
||||
self.layer = layer
|
||||
self.render_instance = render_instance
|
||||
|
||||
@abstractmethod
|
||||
def get_aovs(self):
|
||||
"""To be implemented by renderer class."""
|
||||
|
||||
@staticmethod
|
||||
def sanitize_camera_name(camera):
|
||||
"""Sanitize camera name.
|
||||
|
||||
Remove Maya illegal characters from camera name.
|
||||
|
||||
Args:
|
||||
camera (str): Maya camera name.
|
||||
|
||||
Returns:
|
||||
(str): sanitized camera name
|
||||
|
||||
Example:
|
||||
>>> AExpectedFiles.sanizite_camera_name('test:camera_01')
|
||||
test_camera_01
|
||||
|
||||
"""
|
||||
return re.sub('[^0-9a-zA-Z_]+', '_', camera)
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
"""Return prefix for specific renderer.
|
||||
|
||||
This is for most renderers the same and can be overridden if needed.
|
||||
|
||||
Returns:
|
||||
str: String with image prefix containing tokens
|
||||
|
||||
Raises:
|
||||
:exc:`UnsupportedRendererException`: If we requested image
|
||||
prefix for renderer we know nothing about.
|
||||
See :data:`IMAGE_PREFIXES` for mapping of renderers and
|
||||
image prefixes.
|
||||
|
||||
"""
|
||||
try:
|
||||
file_prefix = cmds.getAttr(IMAGE_PREFIXES[self.renderer])
|
||||
except KeyError:
|
||||
raise UnsupportedRendererException(
|
||||
"Unsupported renderer {}".format(self.renderer)
|
||||
)
|
||||
return file_prefix
|
||||
|
||||
def _get_layer_data(self):
|
||||
# type: () -> LayerMetadata
|
||||
# ______________________________________________
|
||||
# ____________________/ ____________________________________________/
|
||||
# 1 - get scene name /__________________/
|
||||
# ____________________/
|
||||
_, scene_basename = os.path.split(cmds.file(q=True, loc=True))
|
||||
scene_name, _ = os.path.splitext(scene_basename)
|
||||
|
||||
file_prefix = self.get_renderer_prefix()
|
||||
|
||||
if not file_prefix:
|
||||
raise RuntimeError("Image prefix not set")
|
||||
|
||||
layer_name = self.layer
|
||||
if self.layer.startswith("rs_"):
|
||||
layer_name = self.layer[3:]
|
||||
|
||||
return LayerMetadata(
|
||||
frameStart=int(self.get_render_attribute("startFrame")),
|
||||
frameEnd=int(self.get_render_attribute("endFrame")),
|
||||
frameStep=int(self.get_render_attribute("byFrameStep")),
|
||||
padding=int(self.get_render_attribute("extensionPadding")),
|
||||
# if we have <camera> token in prefix path we'll expect output for
|
||||
# every renderable camera in layer.
|
||||
cameras=self.get_renderable_cameras(),
|
||||
sceneName=scene_name,
|
||||
layerName=layer_name,
|
||||
renderer=self.renderer,
|
||||
defaultExt=cmds.getAttr("defaultRenderGlobals.imfPluginKey"),
|
||||
filePrefix=file_prefix,
|
||||
enabledAOVs=self.get_aovs()
|
||||
)
|
||||
|
||||
def _generate_single_file_sequence(
|
||||
self, layer_data, force_aov_name=None):
|
||||
# type: (LayerMetadata, str) -> list
|
||||
expected_files = []
|
||||
for cam in layer_data.cameras:
|
||||
file_prefix = layer_data.filePrefix
|
||||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data.sceneName),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data.layerName),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, self.sanitize_camera_name(cam)),
|
||||
# this is required to remove unfilled aov token, for example
|
||||
# in Redshift
|
||||
(R_REMOVE_AOV_TOKEN, "") if not force_aov_name \
|
||||
else (R_SUBSTITUTE_AOV_TOKEN, force_aov_name),
|
||||
|
||||
(R_CLEAN_FRAME_TOKEN, ""),
|
||||
(R_CLEAN_EXT_TOKEN, ""),
|
||||
)
|
||||
|
||||
for regex, value in mappings:
|
||||
file_prefix = re.sub(regex, value, file_prefix)
|
||||
|
||||
for frame in range(
|
||||
int(layer_data.frameStart),
|
||||
int(layer_data.frameEnd) + 1,
|
||||
int(layer_data.frameStep),
|
||||
):
|
||||
expected_files.append(
|
||||
"{}.{}.{}".format(
|
||||
file_prefix,
|
||||
str(frame).rjust(layer_data.padding, "0"),
|
||||
layer_data.defaultExt,
|
||||
)
|
||||
)
|
||||
return expected_files
|
||||
|
||||
def _generate_aov_file_sequences(self, layer_data):
|
||||
# type: (LayerMetadata) -> list
|
||||
expected_files = []
|
||||
aov_file_list = {}
|
||||
for aov in layer_data.enabledAOVs:
|
||||
for cam in layer_data.cameras:
|
||||
file_prefix = layer_data.filePrefix
|
||||
|
||||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data.sceneName),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data.layerName),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN,
|
||||
self.sanitize_camera_name(cam)),
|
||||
(R_SUBSTITUTE_AOV_TOKEN, aov[0]),
|
||||
(R_CLEAN_FRAME_TOKEN, ""),
|
||||
(R_CLEAN_EXT_TOKEN, ""),
|
||||
)
|
||||
|
||||
for regex, value in mappings:
|
||||
file_prefix = re.sub(regex, value, file_prefix)
|
||||
|
||||
aov_files = []
|
||||
for frame in range(
|
||||
int(layer_data.frameStart),
|
||||
int(layer_data.frameEnd) + 1,
|
||||
int(layer_data.frameStep),
|
||||
):
|
||||
aov_files.append(
|
||||
"{}.{}.{}".format(
|
||||
file_prefix,
|
||||
str(frame).rjust(layer_data.padding, "0"),
|
||||
aov[1],
|
||||
)
|
||||
)
|
||||
|
||||
# if we have more then one renderable camera, append
|
||||
# camera name to AOV to allow per camera AOVs.
|
||||
aov_name = aov[0]
|
||||
if len(layer_data.cameras) > 1:
|
||||
aov_name = "{}_{}".format(aov[0],
|
||||
self.sanitize_camera_name(cam))
|
||||
|
||||
aov_file_list[aov_name] = aov_files
|
||||
file_prefix = layer_data.filePrefix
|
||||
|
||||
expected_files.append(aov_file_list)
|
||||
return expected_files
|
||||
|
||||
def get_files(self):
|
||||
"""Return list of expected files.
|
||||
|
||||
It will translate render token strings ('<RenderPass>', etc.) to
|
||||
their values. This task is tricky as every renderer deals with this
|
||||
differently. It depends on `get_aovs()` abstract method implemented
|
||||
for every supported renderer.
|
||||
|
||||
"""
|
||||
layer_data = self._get_layer_data()
|
||||
|
||||
expected_files = []
|
||||
if layer_data.enabledAOVs:
|
||||
return self._generate_aov_file_sequences(layer_data)
|
||||
else:
|
||||
return self._generate_single_file_sequence(layer_data)
|
||||
|
||||
def get_renderable_cameras(self):
|
||||
# type: () -> list
|
||||
"""Get all renderable cameras.
|
||||
|
||||
Returns:
|
||||
list: list of renderable cameras.
|
||||
|
||||
"""
|
||||
cam_parents = [
|
||||
cmds.listRelatives(x, ap=True)[-1] for x in cmds.ls(cameras=True)
|
||||
]
|
||||
|
||||
return [
|
||||
cam
|
||||
for cam in cam_parents
|
||||
if self.maya_is_true(cmds.getAttr("{}.renderable".format(cam)))
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def maya_is_true(attr_val):
|
||||
"""Whether a Maya attr evaluates to True.
|
||||
|
||||
When querying an attribute value from an ambiguous object the
|
||||
Maya API will return a list of values, which need to be properly
|
||||
handled to evaluate properly.
|
||||
|
||||
Args:
|
||||
attr_val (mixed): Maya attribute to be evaluated as bool.
|
||||
|
||||
Returns:
|
||||
bool: cast Maya attribute to Pythons boolean value.
|
||||
|
||||
"""
|
||||
if isinstance(attr_val, types.BooleanType):
|
||||
return attr_val
|
||||
if isinstance(attr_val, (types.ListType, types.GeneratorType)):
|
||||
return any(attr_val)
|
||||
|
||||
return bool(attr_val)
|
||||
|
||||
@staticmethod
|
||||
def get_layer_overrides(attribute):
|
||||
"""Get overrides for attribute on current render layer.
|
||||
|
||||
Args:
|
||||
attribute (str): Maya attribute name.
|
||||
|
||||
Returns:
|
||||
Value of attribute override.
|
||||
|
||||
"""
|
||||
connections = cmds.listConnections(attribute, plugs=True)
|
||||
if connections:
|
||||
for connection in connections:
|
||||
if connection:
|
||||
# node_name = connection.split(".")[0]
|
||||
|
||||
attr_name = "%s.value" % ".".join(
|
||||
connection.split(".")[:-1]
|
||||
)
|
||||
yield cmds.getAttr(attr_name)
|
||||
|
||||
def get_render_attribute(self, attribute):
|
||||
"""Get attribute from render options.
|
||||
|
||||
Args:
|
||||
attribute (str): name of attribute to be looked up.
|
||||
|
||||
Returns:
|
||||
Attribute value
|
||||
|
||||
"""
|
||||
return lib.get_attr_in_layer(
|
||||
"defaultRenderGlobals.{}".format(attribute), layer=self.layer
|
||||
)
|
||||
|
||||
|
||||
class ExpectedFilesArnold(AExpectedFiles):
|
||||
"""Expected files for Arnold renderer.
|
||||
|
||||
Attributes:
|
||||
aiDriverExtension (dict): Arnold AOV driver extension mapping.
|
||||
Is there a better way?
|
||||
renderer (str): name of renderer.
|
||||
|
||||
"""
|
||||
|
||||
aiDriverExtension = {
|
||||
"jpeg": "jpg",
|
||||
"exr": "exr",
|
||||
"deepexr": "exr",
|
||||
"png": "png",
|
||||
"tiff": "tif",
|
||||
"mtoa_shaders": "ass", # TODO: research what those last two should be
|
||||
"maya": "",
|
||||
}
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Constructor."""
|
||||
super(ExpectedFilesArnold, self).__init__(layer, render_instance)
|
||||
self.renderer = "arnold"
|
||||
|
||||
def get_aovs(self):
|
||||
"""Get all AOVs.
|
||||
|
||||
See Also:
|
||||
:func:`AExpectedFiles.get_aovs()`
|
||||
|
||||
Raises:
|
||||
:class:`AOVError`: If AOV cannot be determined.
|
||||
|
||||
"""
|
||||
enabled_aovs = []
|
||||
try:
|
||||
if not (
|
||||
cmds.getAttr("defaultArnoldRenderOptions.aovMode")
|
||||
and not cmds.getAttr("defaultArnoldDriver.mergeAOVs") # noqa: W503, E501
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
return enabled_aovs
|
||||
except ValueError:
|
||||
# this occurs when Render Setting windows was not opened yet. In
|
||||
# such case there are no Arnold options created so query for AOVs
|
||||
# will fail. We terminate here as there are no AOVs specified then.
|
||||
# This state will most probably fail later on some Validator
|
||||
# anyway.
|
||||
return enabled_aovs
|
||||
|
||||
# AOVs are set to be rendered separately. We should expect
|
||||
# <RenderPass> token in path.
|
||||
|
||||
# handle aovs from references
|
||||
use_ref_aovs = self.render_instance.data.get(
|
||||
"useReferencedAovs", False) or False
|
||||
|
||||
ai_aovs = cmds.ls(type="aiAOV")
|
||||
if not use_ref_aovs:
|
||||
ref_aovs = cmds.ls(type="aiAOV", referencedNodes=True)
|
||||
ai_aovs = list(set(ai_aovs) - set(ref_aovs))
|
||||
|
||||
for aov in ai_aovs:
|
||||
enabled = self.maya_is_true(cmds.getAttr("{}.enabled".format(aov)))
|
||||
ai_driver = cmds.listConnections("{}.outputs".format(aov))[0]
|
||||
ai_translator = cmds.getAttr("{}.aiTranslator".format(ai_driver))
|
||||
try:
|
||||
aov_ext = self.aiDriverExtension[ai_translator]
|
||||
except KeyError:
|
||||
msg = (
|
||||
"Unrecognized arnold " "driver format for AOV - {}"
|
||||
).format(cmds.getAttr("{}.name".format(aov)))
|
||||
raise AOVError(msg)
|
||||
|
||||
for override in self.get_layer_overrides(
|
||||
"{}.enabled".format(aov)
|
||||
):
|
||||
enabled = self.maya_is_true(override)
|
||||
if enabled:
|
||||
# If aov RGBA is selected, arnold will translate it to `beauty`
|
||||
aov_name = cmds.getAttr("%s.name" % aov)
|
||||
if aov_name == "RGBA":
|
||||
aov_name = "beauty"
|
||||
enabled_aovs.append((aov_name, aov_ext))
|
||||
# Append 'beauty' as this is arnolds
|
||||
# default. If <RenderPass> token is specified and no AOVs are
|
||||
# defined, this will be used.
|
||||
enabled_aovs.append(
|
||||
(u"beauty", cmds.getAttr("defaultRenderGlobals.imfPluginKey"))
|
||||
)
|
||||
return enabled_aovs
|
||||
|
||||
|
||||
class ExpectedFilesVray(AExpectedFiles):
|
||||
"""Expected files for V-Ray renderer."""
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Constructor."""
|
||||
super(ExpectedFilesVray, self).__init__(layer, render_instance)
|
||||
self.renderer = "vray"
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
"""Get image prefix for V-Ray.
|
||||
|
||||
This overrides :func:`AExpectedFiles.get_renderer_prefix()` as
|
||||
we must add `<aov>` token manually.
|
||||
|
||||
See also:
|
||||
:func:`AExpectedFiles.get_renderer_prefix()`
|
||||
|
||||
"""
|
||||
prefix = super(ExpectedFilesVray, self).get_renderer_prefix()
|
||||
prefix = "{}_<aov>".format(prefix)
|
||||
return prefix
|
||||
|
||||
def _get_layer_data(self):
|
||||
# type: () -> LayerMetadata
|
||||
"""Override to get vray specific extension."""
|
||||
layer_data = super(ExpectedFilesVray, self)._get_layer_data()
|
||||
default_ext = cmds.getAttr("vraySettings.imageFormatStr")
|
||||
if default_ext in ["exr (multichannel)", "exr (deep)"]:
|
||||
default_ext = "exr"
|
||||
layer_data.defaultExt = default_ext
|
||||
layer_data.padding = cmds.getAttr("vraySettings.fileNamePadding")
|
||||
return layer_data
|
||||
|
||||
def get_files(self):
|
||||
"""Get expected files.
|
||||
|
||||
This overrides :func:`AExpectedFiles.get_files()` as we
|
||||
we need to add one sequence for plain beauty if AOVs are enabled
|
||||
as vray output beauty without 'beauty' in filename.
|
||||
|
||||
"""
|
||||
expected_files = super(ExpectedFilesVray, self).get_files()
|
||||
|
||||
layer_data = self._get_layer_data()
|
||||
# remove 'beauty' from filenames as vray doesn't output it
|
||||
update = {}
|
||||
if layer_data.enabledAOVs:
|
||||
for aov, seqs in expected_files[0].items():
|
||||
if aov.startswith("beauty"):
|
||||
new_list = []
|
||||
for seq in seqs:
|
||||
new_list.append(seq.replace("_beauty", ""))
|
||||
update[aov] = new_list
|
||||
|
||||
expected_files[0].update(update)
|
||||
return expected_files
|
||||
|
||||
def get_aovs(self):
|
||||
"""Get all AOVs.
|
||||
|
||||
See Also:
|
||||
:func:`AExpectedFiles.get_aovs()`
|
||||
|
||||
"""
|
||||
enabled_aovs = []
|
||||
|
||||
try:
|
||||
# really? do we set it in vray just by selecting multichannel exr?
|
||||
if (
|
||||
cmds.getAttr("vraySettings.imageFormatStr")
|
||||
== "exr (multichannel)" # noqa: W503
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
return enabled_aovs
|
||||
except ValueError:
|
||||
# this occurs when Render Setting windows was not opened yet. In
|
||||
# such case there are no VRay options created so query for AOVs
|
||||
# will fail. We terminate here as there are no AOVs specified then.
|
||||
# This state will most probably fail later on some Validator
|
||||
# anyway.
|
||||
return enabled_aovs
|
||||
|
||||
default_ext = cmds.getAttr("vraySettings.imageFormatStr")
|
||||
if default_ext in ["exr (multichannel)", "exr (deep)"]:
|
||||
default_ext = "exr"
|
||||
|
||||
# add beauty as default
|
||||
enabled_aovs.append(
|
||||
(u"beauty", default_ext)
|
||||
)
|
||||
|
||||
# handle aovs from references
|
||||
use_ref_aovs = self.render_instance.data.get(
|
||||
"useReferencedAovs", False) or False
|
||||
|
||||
# this will have list of all aovs no matter if they are coming from
|
||||
# reference or not.
|
||||
vr_aovs = cmds.ls(
|
||||
type=["VRayRenderElement", "VRayRenderElementSet"]) or []
|
||||
if not use_ref_aovs:
|
||||
ref_aovs = cmds.ls(
|
||||
type=["VRayRenderElement", "VRayRenderElementSet"],
|
||||
referencedNodes=True) or []
|
||||
# get difference
|
||||
vr_aovs = list(set(vr_aovs) - set(ref_aovs))
|
||||
|
||||
for aov in vr_aovs:
|
||||
enabled = self.maya_is_true(cmds.getAttr("{}.enabled".format(aov)))
|
||||
for override in self.get_layer_overrides(
|
||||
"{}.enabled".format(aov)
|
||||
):
|
||||
enabled = self.maya_is_true(override)
|
||||
|
||||
if enabled:
|
||||
enabled_aovs.append(
|
||||
(self._get_vray_aov_name(aov), default_ext))
|
||||
|
||||
return enabled_aovs
|
||||
|
||||
@staticmethod
|
||||
def _get_vray_aov_name(node):
|
||||
"""Get AOVs name from Vray.
|
||||
|
||||
Args:
|
||||
node (str): aov node name.
|
||||
|
||||
Returns:
|
||||
str: aov name.
|
||||
|
||||
"""
|
||||
vray_name = None
|
||||
vray_explicit_name = None
|
||||
vray_file_name = None
|
||||
for node_attr in cmds.listAttr(node):
|
||||
if node_attr.startswith("vray_filename"):
|
||||
vray_file_name = cmds.getAttr("{}.{}".format(node, node_attr))
|
||||
elif node_attr.startswith("vray_name"):
|
||||
vray_name = cmds.getAttr("{}.{}".format(node, node_attr))
|
||||
elif node_attr.startswith("vray_explicit_name"):
|
||||
vray_explicit_name = cmds.getAttr(
|
||||
"{}.{}".format(node, node_attr))
|
||||
|
||||
if vray_file_name is not None and vray_file_name != "":
|
||||
final_name = vray_file_name
|
||||
elif vray_explicit_name is not None and vray_explicit_name != "":
|
||||
final_name = vray_explicit_name
|
||||
elif vray_name is not None and vray_name != "":
|
||||
final_name = vray_name
|
||||
else:
|
||||
continue
|
||||
# special case for Material Select elements - these are named
|
||||
# based on the materia they are connected to.
|
||||
if "vray_mtl_mtlselect" in cmds.listAttr(node):
|
||||
connections = cmds.listConnections(
|
||||
"{}.vray_mtl_mtlselect".format(node))
|
||||
if connections:
|
||||
final_name += '_{}'.format(str(connections[0]))
|
||||
|
||||
return final_name
|
||||
|
||||
|
||||
class ExpectedFilesRedshift(AExpectedFiles):
|
||||
"""Expected files for Redshift renderer.
|
||||
|
||||
Attributes:
|
||||
|
||||
unmerged_aovs (list): Name of aovs that are not merged into resulting
|
||||
exr and we need them specified in expectedFiles output.
|
||||
|
||||
"""
|
||||
|
||||
unmerged_aovs = ["Cryptomatte"]
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Construtor."""
|
||||
super(ExpectedFilesRedshift, self).__init__(layer, render_instance)
|
||||
self.renderer = "redshift"
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
"""Get image prefix for Redshift.
|
||||
|
||||
This overrides :func:`AExpectedFiles.get_renderer_prefix()` as
|
||||
we must add `<aov>` token manually.
|
||||
|
||||
See also:
|
||||
:func:`AExpectedFiles.get_renderer_prefix()`
|
||||
|
||||
"""
|
||||
prefix = super(ExpectedFilesRedshift, self).get_renderer_prefix()
|
||||
prefix = "{}.<aov>".format(prefix)
|
||||
return prefix
|
||||
|
||||
def get_files(self):
|
||||
"""Get expected files.
|
||||
|
||||
This overrides :func:`AExpectedFiles.get_files()` as we
|
||||
we need to add one sequence for plain beauty if AOVs are enabled
|
||||
as vray output beauty without 'beauty' in filename.
|
||||
|
||||
"""
|
||||
expected_files = super(ExpectedFilesRedshift, self).get_files()
|
||||
layer_data = self._get_layer_data()
|
||||
|
||||
# Redshift doesn't merge Cryptomatte AOV to final exr. We need to check
|
||||
# for such condition and add it to list of expected files.
|
||||
|
||||
for aov in layer_data.enabledAOVs:
|
||||
if aov[0].lower() == "cryptomatte":
|
||||
aov_name = aov[0]
|
||||
expected_files.append(
|
||||
{aov_name: self._generate_single_file_sequence(layer_data)}
|
||||
)
|
||||
|
||||
if layer_data.get("enabledAOVs"):
|
||||
# because if Beauty is added manually, it will be rendered as
|
||||
# 'Beauty_other' in file name and "standard" beauty will have
|
||||
# 'Beauty' in its name. When disabled, standard output will be
|
||||
# without `Beauty`.
|
||||
if expected_files[0].get(u"Beauty"):
|
||||
expected_files[0][u"Beauty_other"] = expected_files[0].pop(
|
||||
u"Beauty")
|
||||
new_list = [
|
||||
seq.replace(".Beauty", ".Beauty_other")
|
||||
for seq in expected_files[0][u"Beauty_other"]
|
||||
]
|
||||
|
||||
expected_files[0][u"Beauty_other"] = new_list
|
||||
expected_files[0][u"Beauty"] = self._generate_single_file_sequence( # noqa: E501
|
||||
layer_data, force_aov_name="Beauty"
|
||||
)
|
||||
else:
|
||||
expected_files[0][u"Beauty"] = self._generate_single_file_sequence( # noqa: E501
|
||||
layer_data
|
||||
)
|
||||
|
||||
return expected_files
|
||||
|
||||
def get_aovs(self):
|
||||
"""Get all AOVs.
|
||||
|
||||
See Also:
|
||||
:func:`AExpectedFiles.get_aovs()`
|
||||
|
||||
"""
|
||||
enabled_aovs = []
|
||||
|
||||
try:
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
return enabled_aovs
|
||||
except ValueError:
|
||||
# this occurs when Render Setting windows was not opened yet. In
|
||||
# such case there are no Redshift options created so query for AOVs
|
||||
# will fail. We terminate here as there are no AOVs specified then.
|
||||
# This state will most probably fail later on some Validator
|
||||
# anyway.
|
||||
return enabled_aovs
|
||||
|
||||
default_ext = cmds.getAttr(
|
||||
"redshiftOptions.imageFormat", asString=True)
|
||||
rs_aovs = cmds.ls(type="RedshiftAOV", referencedNodes=False)
|
||||
|
||||
for aov in rs_aovs:
|
||||
enabled = self.maya_is_true(cmds.getAttr("{}.enabled".format(aov)))
|
||||
for override in self.get_layer_overrides(
|
||||
"{}.enabled".format(aov)
|
||||
):
|
||||
enabled = self.maya_is_true(override)
|
||||
|
||||
if enabled:
|
||||
# If AOVs are merged into multipart exr, append AOV only if it
|
||||
# is in the list of AOVs that renderer cannot (or will not)
|
||||
# merge into final exr.
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
if cmds.getAttr("%s.name" % aov) in self.unmerged_aovs:
|
||||
enabled_aovs.append(
|
||||
(cmds.getAttr("%s.name" % aov), default_ext)
|
||||
)
|
||||
else:
|
||||
enabled_aovs.append(
|
||||
(cmds.getAttr("%s.name" % aov), default_ext)
|
||||
)
|
||||
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
|
||||
return enabled_aovs
|
||||
|
||||
|
||||
class ExpectedFilesRenderman(AExpectedFiles):
|
||||
"""Expected files for Renderman renderer.
|
||||
|
||||
Warning:
|
||||
This is very rudimentary and needs more love and testing.
|
||||
"""
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Constructor."""
|
||||
super(ExpectedFilesRenderman, self).__init__(layer, render_instance)
|
||||
self.renderer = "renderman"
|
||||
|
||||
def get_aovs(self):
|
||||
"""Get all AOVs.
|
||||
|
||||
See Also:
|
||||
:func:`AExpectedFiles.get_aovs()`
|
||||
|
||||
"""
|
||||
enabled_aovs = []
|
||||
|
||||
default_ext = "exr"
|
||||
displays = cmds.listConnections("rmanGlobals.displays")
|
||||
for aov in displays:
|
||||
aov_name = str(aov)
|
||||
if aov_name == "rmanDefaultDisplay":
|
||||
aov_name = "beauty"
|
||||
|
||||
enabled = self.maya_is_true(cmds.getAttr("{}.enable".format(aov)))
|
||||
for override in self.get_layer_overrides(
|
||||
"{}.enable".format(aov)
|
||||
):
|
||||
enabled = self.maya_is_true(override)
|
||||
|
||||
if enabled:
|
||||
enabled_aovs.append((aov_name, default_ext))
|
||||
|
||||
return enabled_aovs
|
||||
|
||||
def get_files(self):
|
||||
"""Get expected files.
|
||||
|
||||
This overrides :func:`AExpectedFiles.get_files()` as we
|
||||
we need to add one sequence for plain beauty if AOVs are enabled
|
||||
as vray output beauty without 'beauty' in filename.
|
||||
|
||||
In renderman we hack it with prepending path. This path would
|
||||
normally be translated from `rmanGlobals.imageOutputDir`. We skip
|
||||
this and hardcode prepend path we expect. There is no place for user
|
||||
to mess around with this settings anyway and it is enforced in
|
||||
render settings validator.
|
||||
"""
|
||||
layer_data = self._get_layer_data()
|
||||
new_aovs = {}
|
||||
|
||||
expected_files = super(ExpectedFilesRenderman, self).get_files()
|
||||
# we always get beauty
|
||||
for aov, files in expected_files[0].items():
|
||||
new_files = []
|
||||
for file in files:
|
||||
new_file = "{}/{}/{}".format(
|
||||
layer_data["sceneName"], layer_data["layerName"], file
|
||||
)
|
||||
new_files.append(new_file)
|
||||
new_aovs[aov] = new_files
|
||||
|
||||
return [new_aovs]
|
||||
|
||||
|
||||
class ExpectedFilesMentalray(AExpectedFiles):
|
||||
"""Skeleton unimplemented class for Mentalray renderer."""
|
||||
|
||||
def __init__(self, layer, render_instance):
|
||||
"""Constructor.
|
||||
|
||||
Raises:
|
||||
:exc:`UnimplementedRendererException`: as it is not implemented.
|
||||
|
||||
"""
|
||||
super(ExpectedFilesMentalray, self).__init__(layer, render_instance)
|
||||
raise UnimplementedRendererException("Mentalray not implemented")
|
||||
|
||||
def get_aovs(self):
|
||||
"""Get all AOVs.
|
||||
|
||||
See Also:
|
||||
:func:`AExpectedFiles.get_aovs()`
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
|
||||
class AOVError(Exception):
|
||||
"""Custom exception for determining AOVs."""
|
||||
|
||||
|
||||
class UnsupportedRendererException(Exception):
|
||||
"""Custom exception.
|
||||
|
||||
Raised when requesting data from unsupported renderer.
|
||||
"""
|
||||
|
||||
|
||||
class UnimplementedRendererException(Exception):
|
||||
"""Custom exception.
|
||||
|
||||
Raised when requesting data from renderer that is not implemented yet.
|
||||
"""
|
||||
|
|
@ -2252,10 +2252,8 @@ def get_attr_in_layer(attr, layer):
|
|||
|
||||
try:
|
||||
if cmds.mayaHasRenderSetup():
|
||||
log.debug("lib.get_attr_in_layer is not "
|
||||
"optimized for render setup")
|
||||
with renderlayer(layer):
|
||||
return cmds.getAttr(attr)
|
||||
from . import lib_rendersetup
|
||||
return lib_rendersetup.get_attr_in_layer(attr, layer)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
|
|
|||
1039
openpype/hosts/maya/api/lib_renderproducts.py
Normal file
1039
openpype/hosts/maya/api/lib_renderproducts.py
Normal file
File diff suppressed because it is too large
Load diff
343
openpype/hosts/maya/api/lib_rendersetup.py
Normal file
343
openpype/hosts/maya/api/lib_rendersetup.py
Normal file
|
|
@ -0,0 +1,343 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Library for handling Render Setup in Maya."""
|
||||
from maya import cmds
|
||||
import maya.api.OpenMaya as om
|
||||
import logging
|
||||
|
||||
import maya.app.renderSetup.model.utils as utils
|
||||
from maya.app.renderSetup.model import (
|
||||
renderSetup
|
||||
)
|
||||
from maya.app.renderSetup.model.override import (
|
||||
AbsOverride,
|
||||
RelOverride,
|
||||
UniqueOverride
|
||||
)
|
||||
|
||||
ExactMatch = 0
|
||||
ParentMatch = 1
|
||||
ChildMatch = 2
|
||||
|
||||
DefaultRenderLayer = "defaultRenderLayer"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_rendersetup_layer(layer):
|
||||
"""Return render setup layer name.
|
||||
|
||||
This also converts names from legacy renderLayer node name to render setup
|
||||
name.
|
||||
|
||||
Note: `defaultRenderLayer` is not a renderSetupLayer node but it is however
|
||||
the valid layer name for Render Setup - so we return that as is.
|
||||
|
||||
Example:
|
||||
>>> for legacy_layer in cmds.ls(type="renderLayer"):
|
||||
>>> layer = get_rendersetup_layer(legacy_layer)
|
||||
|
||||
Returns:
|
||||
str or None: Returns renderSetupLayer node name if `layer` is a valid
|
||||
layer name in legacy renderlayers or render setup layers.
|
||||
Returns None if the layer can't be found or Render Setup is
|
||||
currently disabled.
|
||||
|
||||
|
||||
"""
|
||||
if layer == DefaultRenderLayer:
|
||||
# defaultRenderLayer doesn't have a `renderSetupLayer`
|
||||
return layer
|
||||
|
||||
if not cmds.mayaHasRenderSetup():
|
||||
return None
|
||||
|
||||
if not cmds.objExists(layer):
|
||||
return None
|
||||
|
||||
if cmds.nodeType(layer) == "renderSetupLayer":
|
||||
return layer
|
||||
|
||||
# By default Render Setup renames the legacy renderlayer
|
||||
# to `rs_<layername>` but lets not rely on that as the
|
||||
# layer node can be renamed manually
|
||||
connections = cmds.listConnections(layer + ".message",
|
||||
type="renderSetupLayer",
|
||||
exactType=True,
|
||||
source=False,
|
||||
destination=True,
|
||||
plugs=True) or []
|
||||
return next((conn.split(".", 1)[0] for conn in connections
|
||||
if conn.endswith(".legacyRenderLayer")), None)
|
||||
|
||||
|
||||
def get_attr_in_layer(node_attr, layer):
|
||||
"""Return attribute value in Render Setup layer.
|
||||
|
||||
This will only work for attributes which can be
|
||||
retrieved with `maya.cmds.getAttr` and for which
|
||||
Relative and Absolute overrides are applicable.
|
||||
|
||||
Examples:
|
||||
>>> get_attr_in_layer("defaultResolution.width", layer="layer1")
|
||||
>>> get_attr_in_layer("defaultRenderGlobals.startFrame", layer="layer")
|
||||
>>> get_attr_in_layer("transform.translate", layer="layer3")
|
||||
|
||||
Args:
|
||||
attr (str): attribute name as 'node.attribute'
|
||||
layer (str): layer name
|
||||
|
||||
Returns:
|
||||
object: attribute value in layer
|
||||
|
||||
"""
|
||||
|
||||
# Delay pymel import to here because it's slow to load
|
||||
import pymel.core as pm
|
||||
|
||||
def _layer_needs_update(layer):
|
||||
"""Return whether layer needs updating."""
|
||||
# Use `getattr` as e.g. DefaultRenderLayer does not have the attribute
|
||||
return getattr(layer, "needsMembershipUpdate", False) or \
|
||||
getattr(layer, "needsApplyUpdate", False)
|
||||
|
||||
def get_default_layer_value(node_attr_):
|
||||
"""Return attribute value in defaultRenderLayer"""
|
||||
inputs = cmds.listConnections(node_attr_,
|
||||
source=True,
|
||||
destination=False,
|
||||
# We want to skip conversion nodes since
|
||||
# an override to `endFrame` could have
|
||||
# a `unitToTimeConversion` node
|
||||
# in-between
|
||||
skipConversionNodes=True,
|
||||
type="applyOverride") or []
|
||||
if inputs:
|
||||
_override = inputs[0]
|
||||
history_overrides = cmds.ls(cmds.listHistory(_override,
|
||||
pruneDagObjects=True),
|
||||
type="applyOverride")
|
||||
node = history_overrides[-1] if history_overrides else _override
|
||||
node_attr_ = node + ".original"
|
||||
|
||||
return pm.getAttr(node_attr_, asString=True)
|
||||
|
||||
layer = get_rendersetup_layer(layer)
|
||||
rs = renderSetup.instance()
|
||||
current_layer = rs.getVisibleRenderLayer()
|
||||
if current_layer.name() == layer:
|
||||
|
||||
# Ensure layer is up-to-date
|
||||
if _layer_needs_update(current_layer):
|
||||
try:
|
||||
rs.switchToLayer(current_layer)
|
||||
except RuntimeError:
|
||||
# Some cases can cause errors on switching
|
||||
# the first time with Render Setup layers
|
||||
# e.g. different overrides to compounds
|
||||
# and its children plugs. So we just force
|
||||
# it another time. If it then still fails
|
||||
# we will let it error out.
|
||||
rs.switchToLayer(current_layer)
|
||||
|
||||
return pm.getAttr(node_attr, asString=True)
|
||||
|
||||
overrides = get_attr_overrides(node_attr, layer)
|
||||
default_layer_value = get_default_layer_value(node_attr)
|
||||
if not overrides:
|
||||
return default_layer_value
|
||||
|
||||
value = default_layer_value
|
||||
for match, layer_override, index in overrides:
|
||||
if isinstance(layer_override, AbsOverride):
|
||||
# Absolute override
|
||||
value = pm.getAttr(layer_override.name() + ".attrValue")
|
||||
if match == ExactMatch:
|
||||
value = value
|
||||
if match == ParentMatch:
|
||||
value = value[index]
|
||||
if match == ChildMatch:
|
||||
value[index] = value
|
||||
|
||||
elif isinstance(layer_override, RelOverride):
|
||||
# Relative override
|
||||
# Value = Original * Multiply + Offset
|
||||
multiply = pm.getAttr(layer_override.name() + ".multiply")
|
||||
offset = pm.getAttr(layer_override.name() + ".offset")
|
||||
|
||||
if match == ExactMatch:
|
||||
value = value * multiply + offset
|
||||
if match == ParentMatch:
|
||||
value = value * multiply[index] + offset[index]
|
||||
if match == ChildMatch:
|
||||
value[index] = value[index] * multiply + offset
|
||||
|
||||
else:
|
||||
raise TypeError("Unsupported override: %s" % layer_override)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def get_attr_overrides(node_attr, layer,
|
||||
skip_disabled=True,
|
||||
skip_local_render=True,
|
||||
stop_at_absolute_override=True):
|
||||
"""Return all Overrides applicable to the attribute.
|
||||
|
||||
Overrides are returned as a 3-tuple:
|
||||
(Match, Override, Index)
|
||||
|
||||
Match:
|
||||
This is any of ExactMatch, ParentMatch, ChildMatch
|
||||
and defines whether the override is exactly on the
|
||||
plug, on the parent or on a child plug.
|
||||
|
||||
Override:
|
||||
This is the RenderSetup Override instance.
|
||||
|
||||
Index:
|
||||
This is the Plug index under the parent or for
|
||||
the child that matches. The ExactMatch index will
|
||||
always be None. For ParentMatch the index is which
|
||||
index the plug is under the parent plug. For ChildMatch
|
||||
the index is which child index matches the plug.
|
||||
|
||||
Args:
|
||||
node_attr (str): attribute name as 'node.attribute'
|
||||
layer (str): layer name
|
||||
skip_disabled (bool): exclude disabled overrides
|
||||
skip_local_render (bool): exclude overrides marked
|
||||
as local render.
|
||||
stop_at_absolute_override: exclude overrides prior
|
||||
to the last absolute override as they have
|
||||
no influence on the resulting value.
|
||||
|
||||
Returns:
|
||||
list: Ordered Overrides in order of strength
|
||||
|
||||
"""
|
||||
|
||||
def get_mplug_children(plug):
|
||||
"""Return children MPlugs of compound MPlug"""
|
||||
children = []
|
||||
if plug.isCompound:
|
||||
for i in range(plug.numChildren()):
|
||||
children.append(plug.child(i))
|
||||
return children
|
||||
|
||||
def get_mplug_names(mplug):
|
||||
"""Return long and short name of MPlug"""
|
||||
long_name = mplug.partialName(useLongNames=True)
|
||||
short_name = mplug.partialName(useLongNames=False)
|
||||
return {long_name, short_name}
|
||||
|
||||
def iter_override_targets(_override):
|
||||
try:
|
||||
for target in _override._targets():
|
||||
yield target
|
||||
except AssertionError:
|
||||
# Workaround: There is a bug where the private `_targets()` method
|
||||
# fails on some attribute plugs. For example overrides
|
||||
# to the defaultRenderGlobals.endFrame
|
||||
# (Tested in Maya 2020.2)
|
||||
log.debug("Workaround for %s" % _override)
|
||||
from maya.app.renderSetup.common.utils import findPlug
|
||||
|
||||
attr = _override.attributeName()
|
||||
if isinstance(_override, UniqueOverride):
|
||||
node = _override.targetNodeName()
|
||||
yield findPlug(node, attr)
|
||||
else:
|
||||
nodes = _override.parent().selector().nodes()
|
||||
for node in nodes:
|
||||
if cmds.attributeQuery(attr, node=node, exists=True):
|
||||
yield findPlug(node, attr)
|
||||
|
||||
# Get the MPlug for the node.attr
|
||||
sel = om.MSelectionList()
|
||||
sel.add(node_attr)
|
||||
plug = sel.getPlug(0)
|
||||
|
||||
layer = get_rendersetup_layer(layer)
|
||||
if layer == DefaultRenderLayer:
|
||||
# DefaultRenderLayer will never have overrides
|
||||
# since it's the default layer
|
||||
return []
|
||||
|
||||
rs_layer = renderSetup.instance().getRenderLayer(layer)
|
||||
if rs_layer is None:
|
||||
# Renderlayer does not exist
|
||||
return
|
||||
|
||||
# Get any parent or children plugs as we also
|
||||
# want to include them in the attribute match
|
||||
# for overrides
|
||||
parent = plug.parent() if plug.isChild else None
|
||||
parent_index = None
|
||||
if parent:
|
||||
parent_index = get_mplug_children(parent).index(plug)
|
||||
|
||||
children = get_mplug_children(plug)
|
||||
|
||||
# Create lookup for the attribute by both long
|
||||
# and short names
|
||||
attr_names = get_mplug_names(plug)
|
||||
for child in children:
|
||||
attr_names.update(get_mplug_names(child))
|
||||
if parent:
|
||||
attr_names.update(get_mplug_names(parent))
|
||||
|
||||
# Get all overrides of the layer
|
||||
# And find those that are relevant to the attribute
|
||||
plug_overrides = []
|
||||
|
||||
# Iterate over the overrides in reverse so we get the last
|
||||
# overrides first and can "break" whenever an absolute
|
||||
# override is reached
|
||||
layer_overrides = list(utils.getOverridesRecursive(rs_layer))
|
||||
for layer_override in reversed(layer_overrides):
|
||||
|
||||
if skip_disabled and not layer_override.isEnabled():
|
||||
# Ignore disabled overrides
|
||||
continue
|
||||
|
||||
if skip_local_render and layer_override.isLocalRender():
|
||||
continue
|
||||
|
||||
# The targets list can be very large so we'll do
|
||||
# a quick filter by attribute name to detect whether
|
||||
# it matches the attribute name, or its parent or child
|
||||
if layer_override.attributeName() not in attr_names:
|
||||
continue
|
||||
|
||||
override_match = None
|
||||
for override_plug in iter_override_targets(layer_override):
|
||||
|
||||
override_match = None
|
||||
if plug == override_plug:
|
||||
override_match = (ExactMatch, layer_override, None)
|
||||
|
||||
elif parent and override_plug == parent:
|
||||
override_match = (ParentMatch, layer_override, parent_index)
|
||||
|
||||
elif children and override_plug in children:
|
||||
child_index = children.index(override_plug)
|
||||
override_match = (ChildMatch, layer_override, child_index)
|
||||
|
||||
if override_match:
|
||||
plug_overrides.append(override_match)
|
||||
break
|
||||
|
||||
if (
|
||||
override_match and
|
||||
stop_at_absolute_override and
|
||||
isinstance(layer_override, AbsOverride) and
|
||||
# When the override is only on a child plug then it doesn't
|
||||
# override the entire value so we not stop at this override
|
||||
not override_match[0] == ChildMatch
|
||||
):
|
||||
# If override is absolute override, then BREAK out
|
||||
# of parent loop we don't need to look any further as
|
||||
# this is the absolute override
|
||||
break
|
||||
|
||||
return reversed(plug_overrides)
|
||||
|
|
@ -99,14 +99,24 @@ class ReferenceLoader(api.Loader):
|
|||
nodes = self[:]
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
loaded_containers.append(containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
))
|
||||
# FIXME: there is probably better way to do this for looks.
|
||||
if "look" in self.families:
|
||||
loaded_containers.append(containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
))
|
||||
else:
|
||||
ref_node = self._get_reference_node(nodes)
|
||||
loaded_containers.append(containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=[ref_node],
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
))
|
||||
|
||||
c += 1
|
||||
namespace = None
|
||||
|
|
@ -235,9 +245,6 @@ class ReferenceLoader(api.Loader):
|
|||
self.log.info("Setting %s.verticesOnlySet to False", node)
|
||||
cmds.setAttr("{}.verticesOnlySet".format(node), False)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import os
|
|||
import json
|
||||
import appdirs
|
||||
import requests
|
||||
import six
|
||||
import sys
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
|
@ -12,7 +14,13 @@ from openpype.hosts.maya.api import (
|
|||
lib,
|
||||
plugin
|
||||
)
|
||||
from openpype.api import (get_system_settings, get_asset)
|
||||
from openpype.api import (
|
||||
get_system_settings,
|
||||
get_project_settings,
|
||||
get_asset)
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
from avalon.api import Session
|
||||
|
||||
|
||||
class CreateRender(plugin.Creator):
|
||||
|
|
@ -83,6 +91,32 @@ class CreateRender(plugin.Creator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
"""Constructor."""
|
||||
super(CreateRender, self).__init__(*args, **kwargs)
|
||||
deadline_settings = get_system_settings()["modules"]["deadline"]
|
||||
if not deadline_settings["enabled"]:
|
||||
self.deadline_servers = {}
|
||||
return
|
||||
project_settings = get_project_settings(Session["AVALON_PROJECT"])
|
||||
try:
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
project_servers = (
|
||||
project_settings["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
self.deadline_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
if not self.deadline_servers:
|
||||
self.deadline_servers = default_servers
|
||||
|
||||
except AttributeError:
|
||||
# Handle situation were we had only one url for deadline.
|
||||
manager = ModulesManager()
|
||||
deadline_module = manager.modules_by_name["deadline"]
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_servers = deadline_module.deadline_urls
|
||||
|
||||
def process(self):
|
||||
"""Entry point."""
|
||||
|
|
@ -94,10 +128,10 @@ class CreateRender(plugin.Creator):
|
|||
use_selection = self.options.get("useSelection")
|
||||
with lib.undo_chunk():
|
||||
self._create_render_settings()
|
||||
instance = super(CreateRender, self).process()
|
||||
self.instance = super(CreateRender, self).process()
|
||||
# create namespace with instance
|
||||
index = 1
|
||||
namespace_name = "_{}".format(str(instance))
|
||||
namespace_name = "_{}".format(str(self.instance))
|
||||
try:
|
||||
cmds.namespace(rm=namespace_name)
|
||||
except RuntimeError:
|
||||
|
|
@ -105,12 +139,20 @@ class CreateRender(plugin.Creator):
|
|||
pass
|
||||
|
||||
while cmds.namespace(exists=namespace_name):
|
||||
namespace_name = "_{}{}".format(str(instance), index)
|
||||
namespace_name = "_{}{}".format(str(self.instance), index)
|
||||
index += 1
|
||||
|
||||
namespace = cmds.namespace(add=namespace_name)
|
||||
|
||||
cmds.setAttr("{}.machineList".format(instance), lock=True)
|
||||
# add Deadline server selection list
|
||||
if self.deadline_servers:
|
||||
cmds.scriptJob(
|
||||
attributeChange=[
|
||||
"{}.deadlineServers".format(self.instance),
|
||||
self._deadline_webservice_changed
|
||||
])
|
||||
|
||||
cmds.setAttr("{}.machineList".format(self.instance), lock=True)
|
||||
self._rs = renderSetup.instance()
|
||||
layers = self._rs.getRenderLayers()
|
||||
if use_selection:
|
||||
|
|
@ -122,7 +164,7 @@ class CreateRender(plugin.Creator):
|
|||
render_set = cmds.sets(
|
||||
n="{}:{}".format(namespace, layer.name()))
|
||||
sets.append(render_set)
|
||||
cmds.sets(sets, forceElement=instance)
|
||||
cmds.sets(sets, forceElement=self.instance)
|
||||
|
||||
# if no render layers are present, create default one with
|
||||
# asterisk selector
|
||||
|
|
@ -138,62 +180,61 @@ class CreateRender(plugin.Creator):
|
|||
renderer = 'renderman'
|
||||
|
||||
self._set_default_renderer_settings(renderer)
|
||||
return self.instance
|
||||
|
||||
def _deadline_webservice_changed(self):
|
||||
"""Refresh Deadline server dependent options."""
|
||||
# get selected server
|
||||
from maya import cmds
|
||||
webservice = self.deadline_servers[
|
||||
self.server_aliases[
|
||||
cmds.getAttr("{}.deadlineServers".format(self.instance))
|
||||
]
|
||||
]
|
||||
pools = self._get_deadline_pools(webservice)
|
||||
cmds.deleteAttr("{}.primaryPool".format(self.instance))
|
||||
cmds.deleteAttr("{}.secondaryPool".format(self.instance))
|
||||
cmds.addAttr(self.instance, longName="primaryPool",
|
||||
attributeType="enum",
|
||||
enumName=":".join(pools))
|
||||
cmds.addAttr(self.instance, longName="secondaryPool",
|
||||
attributeType="enum",
|
||||
enumName=":".join(["-"] + pools))
|
||||
|
||||
def _get_deadline_pools(self, webservice):
|
||||
# type: (str) -> list
|
||||
"""Get pools from Deadline.
|
||||
Args:
|
||||
webservice (str): Server url.
|
||||
Returns:
|
||||
list: Pools.
|
||||
Throws:
|
||||
RuntimeError: If deadline webservice is unreachable.
|
||||
|
||||
"""
|
||||
argument = "{}/api/pools?NamesOnly=true".format(webservice)
|
||||
try:
|
||||
response = self._requests_get(argument)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
msg = 'Cannot connect to deadline web service'
|
||||
self.log.error(msg)
|
||||
six.reraise(
|
||||
RuntimeError,
|
||||
RuntimeError('{} - {}'.format(msg, exc)),
|
||||
sys.exc_info()[2])
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
return []
|
||||
|
||||
return response.json()
|
||||
|
||||
def _create_render_settings(self):
|
||||
"""Create instance settings."""
|
||||
# get pools
|
||||
pools = []
|
||||
|
||||
system_settings = get_system_settings()["modules"]
|
||||
|
||||
deadline_enabled = system_settings["deadline"]["enabled"]
|
||||
muster_enabled = system_settings["muster"]["enabled"]
|
||||
deadline_url = system_settings["deadline"]["DEADLINE_REST_URL"]
|
||||
muster_url = system_settings["muster"]["MUSTER_REST_URL"]
|
||||
|
||||
if deadline_enabled and muster_enabled:
|
||||
self.log.error(
|
||||
"Both Deadline and Muster are enabled. " "Cannot support both."
|
||||
)
|
||||
raise RuntimeError("Both Deadline and Muster are enabled")
|
||||
|
||||
if deadline_enabled:
|
||||
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
|
||||
try:
|
||||
response = self._requests_get(argument)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
msg = 'Cannot connect to deadline web service'
|
||||
self.log.error(msg)
|
||||
raise RuntimeError('{} - {}'.format(msg, e))
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
else:
|
||||
pools = response.json()
|
||||
self.data["primaryPool"] = pools
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pools
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
self._load_credentials()
|
||||
self.log.info(">>> Getting pools ...")
|
||||
try:
|
||||
pools = self._get_muster_pools()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.startswith("401"):
|
||||
self.log.warning("access token expired")
|
||||
self._show_login()
|
||||
raise RuntimeError("Access token expired")
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Cannot connect to Muster API endpoint.")
|
||||
raise RuntimeError("Cannot connect to {}".format(muster_url))
|
||||
pool_names = []
|
||||
for pool in pools:
|
||||
self.log.info(" - pool: {}".format(pool["name"]))
|
||||
pool_names.append(pool["name"])
|
||||
|
||||
self.data["primaryPool"] = pool_names
|
||||
pool_names = []
|
||||
|
||||
self.server_aliases = self.deadline_servers.keys()
|
||||
self.data["deadlineServers"] = self.server_aliases
|
||||
self.data["suspendPublishJob"] = False
|
||||
self.data["review"] = True
|
||||
self.data["extendFrames"] = False
|
||||
|
|
@ -212,6 +253,54 @@ class CreateRender(plugin.Creator):
|
|||
# Disable for now as this feature is not working yet
|
||||
# self.data["assScene"] = False
|
||||
|
||||
system_settings = get_system_settings()["modules"]
|
||||
|
||||
deadline_enabled = system_settings["deadline"]["enabled"]
|
||||
muster_enabled = system_settings["muster"]["enabled"]
|
||||
muster_url = system_settings["muster"]["MUSTER_REST_URL"]
|
||||
|
||||
if deadline_enabled and muster_enabled:
|
||||
self.log.error(
|
||||
"Both Deadline and Muster are enabled. " "Cannot support both."
|
||||
)
|
||||
raise RuntimeError("Both Deadline and Muster are enabled")
|
||||
|
||||
if deadline_enabled:
|
||||
# if default server is not between selected, use first one for
|
||||
# initial list of pools.
|
||||
try:
|
||||
deadline_url = self.deadline_servers["default"]
|
||||
except KeyError:
|
||||
deadline_url = [
|
||||
self.deadline_servers[k]
|
||||
for k in self.deadline_servers.keys()
|
||||
][0]
|
||||
|
||||
pool_names = self._get_deadline_pools(deadline_url)
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
self._load_credentials()
|
||||
self.log.info(">>> Getting pools ...")
|
||||
pools = []
|
||||
try:
|
||||
pools = self._get_muster_pools()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.startswith("401"):
|
||||
self.log.warning("access token expired")
|
||||
self._show_login()
|
||||
raise RuntimeError("Access token expired")
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Cannot connect to Muster API endpoint.")
|
||||
raise RuntimeError("Cannot connect to {}".format(muster_url))
|
||||
for pool in pools:
|
||||
self.log.info(" - pool: {}".format(pool["name"]))
|
||||
pool_names.append(pool["name"])
|
||||
|
||||
self.data["primaryPool"] = pool_names
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pool_names
|
||||
self.options = {"useSelection": False} # Force no content
|
||||
|
||||
def _load_credentials(self):
|
||||
|
|
@ -293,9 +382,7 @@ class CreateRender(plugin.Creator):
|
|||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = (
|
||||
False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True
|
||||
) # noqa
|
||||
kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True)
|
||||
return requests.post(*args, **kwargs)
|
||||
|
||||
def _requests_get(self, *args, **kwargs):
|
||||
|
|
@ -312,9 +399,7 @@ class CreateRender(plugin.Creator):
|
|||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = (
|
||||
False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True
|
||||
) # noqa
|
||||
kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True)
|
||||
return requests.get(*args, **kwargs)
|
||||
|
||||
def _set_default_renderer_settings(self, renderer):
|
||||
|
|
@ -332,14 +417,10 @@ class CreateRender(plugin.Creator):
|
|||
|
||||
if renderer == "arnold":
|
||||
# set format to exr
|
||||
|
||||
cmds.setAttr(
|
||||
"defaultArnoldDriver.ai_translator", "exr", type="string")
|
||||
# enable animation
|
||||
cmds.setAttr("defaultRenderGlobals.outFormatControl", 0)
|
||||
cmds.setAttr("defaultRenderGlobals.animation", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.extensionPadding", 4)
|
||||
|
||||
self._set_global_output_settings()
|
||||
# resolution
|
||||
cmds.setAttr(
|
||||
"defaultResolution.width",
|
||||
|
|
@ -349,43 +430,12 @@ class CreateRender(plugin.Creator):
|
|||
asset["data"].get("resolutionHeight"))
|
||||
|
||||
if renderer == "vray":
|
||||
vray_settings = cmds.ls(type="VRaySettingsNode")
|
||||
if not vray_settings:
|
||||
node = cmds.createNode("VRaySettingsNode")
|
||||
else:
|
||||
node = vray_settings[0]
|
||||
|
||||
# set underscore as element separator instead of default `.`
|
||||
cmds.setAttr(
|
||||
"{}.fileNameRenderElementSeparator".format(
|
||||
node),
|
||||
"_"
|
||||
)
|
||||
# set format to exr
|
||||
cmds.setAttr(
|
||||
"{}.imageFormatStr".format(node), 5)
|
||||
|
||||
# animType
|
||||
cmds.setAttr(
|
||||
"{}.animType".format(node), 1)
|
||||
|
||||
# resolution
|
||||
cmds.setAttr(
|
||||
"{}.width".format(node),
|
||||
asset["data"].get("resolutionWidth"))
|
||||
cmds.setAttr(
|
||||
"{}.height".format(node),
|
||||
asset["data"].get("resolutionHeight"))
|
||||
|
||||
self._set_vray_settings(asset)
|
||||
if renderer == "redshift":
|
||||
redshift_settings = cmds.ls(type="RedshiftOptions")
|
||||
if not redshift_settings:
|
||||
node = cmds.createNode("RedshiftOptions")
|
||||
else:
|
||||
node = redshift_settings[0]
|
||||
_ = self._set_renderer_option(
|
||||
"RedshiftOptions", "{}.imageFormat", 1
|
||||
)
|
||||
|
||||
# set exr
|
||||
cmds.setAttr("{}.imageFormat".format(node), 1)
|
||||
# resolution
|
||||
cmds.setAttr(
|
||||
"defaultResolution.width",
|
||||
|
|
@ -394,8 +444,56 @@ class CreateRender(plugin.Creator):
|
|||
"defaultResolution.height",
|
||||
asset["data"].get("resolutionHeight"))
|
||||
|
||||
# enable animation
|
||||
cmds.setAttr("defaultRenderGlobals.outFormatControl", 0)
|
||||
cmds.setAttr("defaultRenderGlobals.animation", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.extensionPadding", 4)
|
||||
self._set_global_output_settings()
|
||||
|
||||
@staticmethod
|
||||
def _set_renderer_option(renderer_node, arg=None, value=None):
|
||||
# type: (str, str, str) -> str
|
||||
"""Set option on renderer node.
|
||||
|
||||
If renderer settings node doesn't exists, it is created first.
|
||||
|
||||
Args:
|
||||
renderer_node (str): Renderer name.
|
||||
arg (str, optional): Argument name.
|
||||
value (str, optional): Argument value.
|
||||
|
||||
Returns:
|
||||
str: Renderer settings node.
|
||||
|
||||
"""
|
||||
settings = cmds.ls(type=renderer_node)
|
||||
result = settings[0] if settings else cmds.createNode(renderer_node)
|
||||
cmds.setAttr(arg.format(result), value)
|
||||
return result
|
||||
|
||||
def _set_vray_settings(self, asset):
|
||||
# type: (dict) -> None
|
||||
"""Sets important settings for Vray."""
|
||||
node = self._set_renderer_option(
|
||||
"VRaySettingsNode", "{}.fileNameRenderElementSeparator", "_"
|
||||
)
|
||||
|
||||
# set format to exr
|
||||
cmds.setAttr(
|
||||
"{}.imageFormatStr".format(node), 5)
|
||||
|
||||
# animType
|
||||
cmds.setAttr(
|
||||
"{}.animType".format(node), 1)
|
||||
|
||||
# resolution
|
||||
cmds.setAttr(
|
||||
"{}.width".format(node),
|
||||
asset["data"].get("resolutionWidth"))
|
||||
cmds.setAttr(
|
||||
"{}.height".format(node),
|
||||
asset["data"].get("resolutionHeight"))
|
||||
|
||||
@staticmethod
|
||||
def _set_global_output_settings():
|
||||
# enable animation
|
||||
cmds.setAttr("defaultRenderGlobals.outFormatControl", 0)
|
||||
cmds.setAttr("defaultRenderGlobals.animation", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1)
|
||||
cmds.setAttr("defaultRenderGlobals.extensionPadding", 4)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ import maya.app.renderSetup.model.renderSetup as renderSetup
|
|||
import pyblish.api
|
||||
|
||||
from avalon import maya, api
|
||||
from openpype.hosts.maya.api.expected_files import ExpectedFiles
|
||||
from openpype.hosts.maya.api.lib_renderproducts import get as get_layer_render_products # noqa: E501
|
||||
from openpype.hosts.maya.api import lib
|
||||
|
||||
|
||||
|
|
@ -64,6 +64,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
"""Entry point to collector."""
|
||||
render_instance = None
|
||||
deadline_url = None
|
||||
|
||||
for instance in context:
|
||||
if "rendering" in instance.data["families"]:
|
||||
render_instance = instance
|
||||
|
|
@ -86,6 +88,15 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
asset = api.Session["AVALON_ASSET"]
|
||||
workspace = context.data["workspaceDir"]
|
||||
|
||||
deadline_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
)
|
||||
|
||||
if deadline_settings["enabled"]:
|
||||
deadline_url = render_instance.data.get("deadlineUrl")
|
||||
self._rs = renderSetup.instance()
|
||||
current_layer = self._rs.getVisibleRenderLayer()
|
||||
maya_render_layers = {
|
||||
|
|
@ -157,10 +168,21 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
|
||||
# return all expected files for all cameras and aovs in given
|
||||
# frame range
|
||||
ef = ExpectedFiles(render_instance)
|
||||
exp_files = ef.get(renderer, layer_name)
|
||||
self.log.info("multipart: {}".format(ef.multipart))
|
||||
layer_render_products = get_layer_render_products(
|
||||
layer_name, render_instance)
|
||||
render_products = layer_render_products.layer_data.products
|
||||
assert render_products, "no render products generated"
|
||||
exp_files = []
|
||||
for product in render_products:
|
||||
for camera in layer_render_products.layer_data.cameras:
|
||||
exp_files.append(
|
||||
{product.productName: layer_render_products.get_files(
|
||||
product, camera)})
|
||||
|
||||
self.log.info("multipart: {}".format(
|
||||
layer_render_products.multipart))
|
||||
assert exp_files, "no file names were generated, this is bug"
|
||||
self.log.info(exp_files)
|
||||
|
||||
# if we want to attach render to subset, check if we have AOV's
|
||||
# in expectedFiles. If so, raise error as we cannot attach AOV
|
||||
|
|
@ -175,24 +197,15 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
full_exp_files = []
|
||||
aov_dict = {}
|
||||
|
||||
# we either get AOVs or just list of files. List of files can
|
||||
# mean two things - there are no AOVs enabled or multipass EXR
|
||||
# is produced. In either case we treat those as `beauty`.
|
||||
if isinstance(exp_files[0], dict):
|
||||
for aov, files in exp_files[0].items():
|
||||
full_paths = []
|
||||
for e in files:
|
||||
full_path = os.path.join(workspace, "renders", e)
|
||||
full_path = full_path.replace("\\", "/")
|
||||
full_paths.append(full_path)
|
||||
aov_dict[aov] = full_paths
|
||||
else:
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
for aov in exp_files:
|
||||
full_paths = []
|
||||
for e in exp_files:
|
||||
full_path = os.path.join(workspace, "renders", e)
|
||||
for file in aov[aov.keys()[0]]:
|
||||
full_path = os.path.join(workspace, "renders", file)
|
||||
full_path = full_path.replace("\\", "/")
|
||||
full_paths.append(full_path)
|
||||
aov_dict["beauty"] = full_paths
|
||||
aov_dict[aov.keys()[0]] = full_paths
|
||||
|
||||
frame_start_render = int(self.get_render_attribute(
|
||||
"startFrame", layer=layer_name))
|
||||
|
|
@ -224,7 +237,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"subset": expected_layer_name,
|
||||
"attachTo": attach_to,
|
||||
"setMembers": layer_name,
|
||||
"multipartExr": ef.multipart,
|
||||
"multipartExr": layer_render_products.multipart,
|
||||
"review": render_instance.data.get("review") or False,
|
||||
"publish": True,
|
||||
|
||||
|
|
@ -263,6 +276,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"vrayUseReferencedAovs") or False
|
||||
}
|
||||
|
||||
if deadline_url:
|
||||
data["deadlineUrl"] = deadline_url
|
||||
|
||||
if self.sync_workfile_version:
|
||||
data["version"] = context.data["version"]
|
||||
|
||||
|
|
@ -306,10 +322,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
instance.data.update(data)
|
||||
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
|
||||
|
||||
# Restore current layer.
|
||||
self.log.info("Restoring to {}".format(current_layer.name()))
|
||||
self._rs.switchToLayer(current_layer)
|
||||
|
||||
def parse_options(self, render_globals):
|
||||
"""Get all overrides with a value, skip those without.
|
||||
|
||||
|
|
@ -392,11 +404,13 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
rset = self.maya_layers[layer].renderSettingsCollectionInstance()
|
||||
return rset.getOverrides()
|
||||
|
||||
def get_render_attribute(self, attr, layer):
|
||||
@staticmethod
|
||||
def get_render_attribute(attr, layer):
|
||||
"""Get attribute from render options.
|
||||
|
||||
Args:
|
||||
attr (str): name of attribute to be looked up.
|
||||
attr (str): name of attribute to be looked up
|
||||
layer (str): name of render layer
|
||||
|
||||
Returns:
|
||||
Attribute value
|
||||
|
|
|
|||
|
|
@ -415,13 +415,11 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert self._deadline_url, "Requires DEADLINE_REST_URL"
|
||||
self._deadline_url = context.data.get("defaultDeadline")
|
||||
self._deadline_url = instance.data.get(
|
||||
"deadlineUrl", self._deadline_url)
|
||||
|
||||
assert self._deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
file_path = None
|
||||
if self.use_published:
|
||||
|
|
|
|||
|
|
@ -72,6 +72,8 @@ class PypeStreamHandler(logging.StreamHandler):
|
|||
msg = self.format(record)
|
||||
msg = Terminal.log(msg)
|
||||
stream = self.stream
|
||||
if stream is None:
|
||||
return
|
||||
fs = "%s\n"
|
||||
# if no unicode support...
|
||||
if not USE_UNICODE:
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ from .muster import MusterModule
|
|||
from .deadline import DeadlineModule
|
||||
from .project_manager_action import ProjectManagerAction
|
||||
from .standalonepublish_action import StandAlonePublishAction
|
||||
from .python_console_interpreter import PythonInterpreterAction
|
||||
from .sync_server import SyncServerModule
|
||||
from .slack import SlackIntegrationModule
|
||||
|
||||
|
|
@ -77,6 +78,7 @@ __all__ = (
|
|||
"DeadlineModule",
|
||||
"ProjectManagerAction",
|
||||
"StandAlonePublishAction",
|
||||
"PythonInterpreterAction",
|
||||
|
||||
"SyncServerModule",
|
||||
|
||||
|
|
|
|||
|
|
@ -6,17 +6,25 @@ from openpype.modules import (
|
|||
class DeadlineModule(PypeModule, IPluginPaths):
|
||||
name = "deadline"
|
||||
|
||||
def __init__(self, manager, settings):
|
||||
self.deadline_urls = {}
|
||||
super(DeadlineModule, self).__init__(manager, settings)
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
# This module is always enabled
|
||||
deadline_settings = modules_settings[self.name]
|
||||
self.enabled = deadline_settings["enabled"]
|
||||
self.deadline_url = deadline_settings["DEADLINE_REST_URL"]
|
||||
deadline_url = deadline_settings.get("DEADLINE_REST_URL")
|
||||
if deadline_url:
|
||||
self.deadline_urls = {"default": deadline_url}
|
||||
else:
|
||||
self.deadline_urls = deadline_settings.get("deadline_urls") # noqa: E501
|
||||
|
||||
def get_global_environments(self):
|
||||
"""Deadline global environments for OpenPype implementation."""
|
||||
return {
|
||||
"DEADLINE_REST_URL": self.deadline_url
|
||||
}
|
||||
if not self.deadline_urls:
|
||||
self.enabled = False
|
||||
self.log.warning(("default Deadline Webservice URL "
|
||||
"not specified. Disabling module."))
|
||||
return
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Deadline servers from instance.
|
||||
|
||||
This is resolving index of server lists stored in `deadlineServers` instance
|
||||
attribute or using default server if that attribute doesn't exists.
|
||||
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Deadline Webservice from the Instance"
|
||||
families = ["rendering"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
|
||||
self.log.info(
|
||||
"Using {} for submission.".format(instance.data["deadlineUrl"]))
|
||||
|
||||
@staticmethod
|
||||
def _collect_deadline_url(render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Deadline Webservice URL from render instance.
|
||||
|
||||
This will get all configured Deadline Webservice URLs and create
|
||||
subset of them based upon project configuration. It will then take
|
||||
`deadlineServers` from render instance that is now basically `int`
|
||||
index of that list.
|
||||
|
||||
Args:
|
||||
render_instance (pyblish.api.Instance): Render instance created
|
||||
by Creator in Maya.
|
||||
|
||||
Returns:
|
||||
str: Selected Deadline Webservice URL.
|
||||
|
||||
"""
|
||||
|
||||
deadline_settings = (
|
||||
render_instance.context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
)
|
||||
|
||||
try:
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
deadline_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except AttributeError:
|
||||
# Handle situation were we had only one url for deadline.
|
||||
return render_instance.context.data["defaultDeadline"]
|
||||
|
||||
return deadline_servers[
|
||||
list(deadline_servers.keys())[
|
||||
int(render_instance.data.get("deadlineServers"))
|
||||
]
|
||||
]
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
||||
"""Collect default Deadline Webservice URL."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Default Deadline Webservice"
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
deadline_module = context.data.get("openPypeModules")["deadline"]
|
||||
except AttributeError:
|
||||
self.log.error("Cannot get OpenPype Deadline module.")
|
||||
raise AssertionError("OpenPype Deadline module not found.")
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.log.debug(deadline_module.deadline_urls)
|
||||
context.data["defaultDeadline"] = deadline_module.deadline_urls["default"] # noqa: E501
|
||||
|
|
@ -264,12 +264,13 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
self._instance = instance
|
||||
self.payload_skeleton = copy.deepcopy(payload_skeleton_template)
|
||||
self._deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data.get("defaultDeadline")
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self._job_info = (
|
||||
context.data["project_settings"].get(
|
||||
|
|
@ -287,8 +288,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"pluginInfo", {})
|
||||
)
|
||||
|
||||
assert self._deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
anatomy = context.data['anatomy']
|
||||
|
|
@ -683,7 +682,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info(
|
||||
"Submitting tile job(s) [{}] ...".format(len(frame_payloads)))
|
||||
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501
|
||||
|
||||
for tile_job in frame_payloads:
|
||||
|
|
@ -767,7 +766,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = self._requests_post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
@ -975,7 +974,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
payload = self._get_arnold_export_payload(data)
|
||||
self.log.info("Submitting ass export job.")
|
||||
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = self._requests_post(url, json=payload)
|
||||
if not response.ok:
|
||||
self.log.error("Submition failed!")
|
||||
|
|
|
|||
|
|
@ -42,13 +42,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
node = instance[0]
|
||||
context = instance.context
|
||||
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert deadline_url, "Requires DEADLINE_REST_URL"
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = context.data.get("comment", "")
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import os
|
|||
import json
|
||||
import re
|
||||
from copy import copy, deepcopy
|
||||
import sys
|
||||
import openpype.api
|
||||
|
||||
from avalon import api, io
|
||||
|
|
@ -615,14 +614,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance["families"] = families
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of renderfarm submission and create and post dependend job
|
||||
in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
:param instance: Instance data
|
||||
:type instance: dict
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
|
|
@ -908,13 +909,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
}
|
||||
|
||||
if submission_type == "deadline":
|
||||
self.deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert self.deadline_url, "Requires DEADLINE_REST_URL"
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import pyblish.api
|
||||
|
||||
from avalon.vendor import requests
|
||||
from openpype.plugin import contextplugin_should_run
|
||||
import os
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
||||
label = "Validate Deadline Web Service"
|
||||
|
|
@ -13,18 +12,16 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
|||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Workaround bug pyblish-base#250
|
||||
if not contextplugin_should_run(self, context):
|
||||
return
|
||||
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.info(
|
||||
"We have deadline URL on instance {}".format(
|
||||
deadline_url))
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Check response
|
||||
response = self._requests_get(deadline_url)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import pyblish.api
|
|||
|
||||
from avalon.vendor import requests
|
||||
|
||||
from openpype.api import get_system_settings
|
||||
from openpype.lib.abstract_submit_deadline import requests_get
|
||||
from openpype.lib.delivery import collect_frames
|
||||
|
||||
|
|
@ -22,6 +21,7 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
allow_user_override = True
|
||||
|
||||
def process(self, instance):
|
||||
self.instance = instance
|
||||
frame_list = self._get_frame_list(instance.data["render_job_id"])
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
|
|
@ -129,13 +129,12 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
"""
|
||||
deadline_url = (
|
||||
get_system_settings()
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert deadline_url, "Requires DEADLINE_REST_URL"
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = self.instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if self.instance.data.get("deadlineUrl"):
|
||||
deadline_url = self.instance.data.get("deadlineUrl")
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
try:
|
||||
|
|
|
|||
8
openpype/modules/python_console_interpreter/__init__.py
Normal file
8
openpype/modules/python_console_interpreter/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
from .module import (
|
||||
PythonInterpreterAction
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"PythonInterpreterAction",
|
||||
)
|
||||
45
openpype/modules/python_console_interpreter/module.py
Normal file
45
openpype/modules/python_console_interpreter/module.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
from .. import PypeModule, ITrayAction
|
||||
|
||||
|
||||
class PythonInterpreterAction(PypeModule, ITrayAction):
|
||||
label = "Console"
|
||||
name = "python_interpreter"
|
||||
admin_action = True
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
self.enabled = True
|
||||
self._interpreter_window = None
|
||||
|
||||
def tray_init(self):
|
||||
self.create_interpreter_window()
|
||||
|
||||
def tray_exit(self):
|
||||
if self._interpreter_window is not None:
|
||||
self._interpreter_window.save_registry()
|
||||
|
||||
def connect_with_modules(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def create_interpreter_window(self):
|
||||
"""Initializa Settings Qt window."""
|
||||
if self._interpreter_window:
|
||||
return
|
||||
|
||||
from openpype.modules.python_console_interpreter.window import (
|
||||
PythonInterpreterWidget
|
||||
)
|
||||
|
||||
self._interpreter_window = PythonInterpreterWidget()
|
||||
|
||||
def on_action_trigger(self):
|
||||
self.show_interpreter_window()
|
||||
|
||||
def show_interpreter_window(self):
|
||||
self.create_interpreter_window()
|
||||
|
||||
if self._interpreter_window.isVisible():
|
||||
self._interpreter_window.activateWindow()
|
||||
self._interpreter_window.raise_()
|
||||
return
|
||||
|
||||
self._interpreter_window.show()
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .widgets import (
|
||||
PythonInterpreterWidget
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"PythonInterpreterWidget",
|
||||
)
|
||||
583
openpype/modules/python_console_interpreter/window/widgets.py
Normal file
583
openpype/modules/python_console_interpreter/window/widgets.py
Normal file
|
|
@ -0,0 +1,583 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import collections
|
||||
from code import InteractiveInterpreter
|
||||
|
||||
import appdirs
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
|
||||
from openpype import resources
|
||||
from openpype.style import load_stylesheet
|
||||
from openpype.lib import JSONSettingRegistry
|
||||
|
||||
|
||||
openpype_art = """
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~. .. ~2p. .. .... . .
|
||||
.Ppo . .pPO3Op.. . O:. . . .
|
||||
.3Pp . oP3'. 'P33. . 4 .. . . . .. . . .
|
||||
.~OP 3PO. .Op3 : . .. _____ _____ _____
|
||||
.P3O . oP3oP3O3P' . . . . / /./ /./ /
|
||||
O3:. O3p~ . .:. . ./____/./____/ /____/
|
||||
'P . 3p3. oP3~. ..P:. . . .. . . .. . . .
|
||||
. ': . Po' .Opo'. .3O. . o[ by Pype Club ]]]==- - - . .
|
||||
. '_ .. . . _OP3.. . .https://openpype.io.. .
|
||||
~P3.OPPPO3OP~ . .. .
|
||||
. ' '. . .. . . . .. .
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class PythonInterpreterRegistry(JSONSettingRegistry):
|
||||
"""Class handling OpenPype general settings registry.
|
||||
|
||||
Attributes:
|
||||
vendor (str): Name used for path construction.
|
||||
product (str): Additional name used for path construction.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.vendor = "pypeclub"
|
||||
self.product = "openpype"
|
||||
name = "python_interpreter_tool"
|
||||
path = appdirs.user_data_dir(self.product, self.vendor)
|
||||
super(PythonInterpreterRegistry, self).__init__(name, path)
|
||||
|
||||
|
||||
class StdOEWrap:
|
||||
def __init__(self):
|
||||
self._origin_stdout_write = None
|
||||
self._origin_stderr_write = None
|
||||
self._listening = False
|
||||
self.lines = collections.deque()
|
||||
|
||||
if not sys.stdout:
|
||||
sys.stdout = open(os.devnull, "w")
|
||||
|
||||
if not sys.stderr:
|
||||
sys.stderr = open(os.devnull, "w")
|
||||
|
||||
if self._origin_stdout_write is None:
|
||||
self._origin_stdout_write = sys.stdout.write
|
||||
|
||||
if self._origin_stderr_write is None:
|
||||
self._origin_stderr_write = sys.stderr.write
|
||||
|
||||
self._listening = True
|
||||
sys.stdout.write = self._stdout_listener
|
||||
sys.stderr.write = self._stderr_listener
|
||||
|
||||
def stop_listen(self):
|
||||
self._listening = False
|
||||
|
||||
def _stdout_listener(self, text):
|
||||
if self._listening:
|
||||
self.lines.append(text)
|
||||
if self._origin_stdout_write is not None:
|
||||
self._origin_stdout_write(text)
|
||||
|
||||
def _stderr_listener(self, text):
|
||||
if self._listening:
|
||||
self.lines.append(text)
|
||||
if self._origin_stderr_write is not None:
|
||||
self._origin_stderr_write(text)
|
||||
|
||||
|
||||
class PythonCodeEditor(QtWidgets.QPlainTextEdit):
|
||||
execute_requested = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent):
|
||||
super(PythonCodeEditor, self).__init__(parent)
|
||||
|
||||
self.setObjectName("PythonCodeEditor")
|
||||
|
||||
self._indent = 4
|
||||
|
||||
def _tab_shift_right(self):
|
||||
cursor = self.textCursor()
|
||||
selected_text = cursor.selectedText()
|
||||
if not selected_text:
|
||||
cursor.insertText(" " * self._indent)
|
||||
return
|
||||
|
||||
sel_start = cursor.selectionStart()
|
||||
sel_end = cursor.selectionEnd()
|
||||
cursor.setPosition(sel_end)
|
||||
end_line = cursor.blockNumber()
|
||||
cursor.setPosition(sel_start)
|
||||
while True:
|
||||
cursor.movePosition(QtGui.QTextCursor.StartOfLine)
|
||||
text = cursor.block().text()
|
||||
spaces = len(text) - len(text.lstrip(" "))
|
||||
new_spaces = spaces % self._indent
|
||||
if not new_spaces:
|
||||
new_spaces = self._indent
|
||||
|
||||
cursor.insertText(" " * new_spaces)
|
||||
if cursor.blockNumber() == end_line:
|
||||
break
|
||||
|
||||
cursor.movePosition(QtGui.QTextCursor.NextBlock)
|
||||
|
||||
def _tab_shift_left(self):
|
||||
tmp_cursor = self.textCursor()
|
||||
sel_start = tmp_cursor.selectionStart()
|
||||
sel_end = tmp_cursor.selectionEnd()
|
||||
|
||||
cursor = QtGui.QTextCursor(self.document())
|
||||
cursor.setPosition(sel_end)
|
||||
end_line = cursor.blockNumber()
|
||||
cursor.setPosition(sel_start)
|
||||
while True:
|
||||
cursor.movePosition(QtGui.QTextCursor.StartOfLine)
|
||||
text = cursor.block().text()
|
||||
spaces = len(text) - len(text.lstrip(" "))
|
||||
if spaces:
|
||||
spaces_to_remove = (spaces % self._indent) or self._indent
|
||||
if spaces_to_remove > spaces:
|
||||
spaces_to_remove = spaces
|
||||
|
||||
cursor.setPosition(
|
||||
cursor.position() + spaces_to_remove,
|
||||
QtGui.QTextCursor.KeepAnchor
|
||||
)
|
||||
cursor.removeSelectedText()
|
||||
|
||||
if cursor.blockNumber() == end_line:
|
||||
break
|
||||
|
||||
cursor.movePosition(QtGui.QTextCursor.NextBlock)
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
if event.key() == QtCore.Qt.Key_Backtab:
|
||||
self._tab_shift_left()
|
||||
event.accept()
|
||||
return
|
||||
|
||||
if event.key() == QtCore.Qt.Key_Tab:
|
||||
if event.modifiers() == QtCore.Qt.NoModifier:
|
||||
self._tab_shift_right()
|
||||
event.accept()
|
||||
return
|
||||
|
||||
if (
|
||||
event.key() == QtCore.Qt.Key_Return
|
||||
and event.modifiers() == QtCore.Qt.ControlModifier
|
||||
):
|
||||
self.execute_requested.emit()
|
||||
event.accept()
|
||||
return
|
||||
|
||||
super(PythonCodeEditor, self).keyPressEvent(event)
|
||||
|
||||
|
||||
class PythonTabWidget(QtWidgets.QWidget):
|
||||
before_execute = QtCore.Signal(str)
|
||||
|
||||
def __init__(self, parent):
|
||||
super(PythonTabWidget, self).__init__(parent)
|
||||
|
||||
code_input = PythonCodeEditor(self)
|
||||
|
||||
self.setFocusProxy(code_input)
|
||||
|
||||
execute_btn = QtWidgets.QPushButton("Execute", self)
|
||||
execute_btn.setToolTip("Execute command (Ctrl + Enter)")
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout()
|
||||
btns_layout.setContentsMargins(0, 0, 0, 0)
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(execute_btn)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(code_input, 1)
|
||||
layout.addLayout(btns_layout, 0)
|
||||
|
||||
execute_btn.clicked.connect(self._on_execute_clicked)
|
||||
code_input.execute_requested.connect(self.execute)
|
||||
|
||||
self._code_input = code_input
|
||||
self._interpreter = InteractiveInterpreter()
|
||||
|
||||
def _on_execute_clicked(self):
|
||||
self.execute()
|
||||
|
||||
def get_code(self):
|
||||
return self._code_input.toPlainText()
|
||||
|
||||
def set_code(self, code_text):
|
||||
self._code_input.setPlainText(code_text)
|
||||
|
||||
def execute(self):
|
||||
code_text = self._code_input.toPlainText()
|
||||
self.before_execute.emit(code_text)
|
||||
self._interpreter.runcode(code_text)
|
||||
|
||||
|
||||
class TabNameDialog(QtWidgets.QDialog):
|
||||
default_width = 330
|
||||
default_height = 85
|
||||
|
||||
def __init__(self, parent):
|
||||
super(TabNameDialog, self).__init__(parent)
|
||||
|
||||
self.setWindowTitle("Enter tab name")
|
||||
|
||||
name_label = QtWidgets.QLabel("Tab name:", self)
|
||||
name_input = QtWidgets.QLineEdit(self)
|
||||
|
||||
inputs_layout = QtWidgets.QHBoxLayout()
|
||||
inputs_layout.addWidget(name_label)
|
||||
inputs_layout.addWidget(name_input)
|
||||
|
||||
ok_btn = QtWidgets.QPushButton("Ok", self)
|
||||
cancel_btn = QtWidgets.QPushButton("Cancel", self)
|
||||
btns_layout = QtWidgets.QHBoxLayout()
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(ok_btn)
|
||||
btns_layout.addWidget(cancel_btn)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addLayout(inputs_layout)
|
||||
layout.addStretch(1)
|
||||
layout.addLayout(btns_layout)
|
||||
|
||||
ok_btn.clicked.connect(self._on_ok_clicked)
|
||||
cancel_btn.clicked.connect(self._on_cancel_clicked)
|
||||
|
||||
self._name_input = name_input
|
||||
self._ok_btn = ok_btn
|
||||
self._cancel_btn = cancel_btn
|
||||
|
||||
self._result = None
|
||||
|
||||
self.resize(self.default_width, self.default_height)
|
||||
|
||||
def set_tab_name(self, name):
|
||||
self._name_input.setText(name)
|
||||
|
||||
def result(self):
|
||||
return self._result
|
||||
|
||||
def showEvent(self, event):
|
||||
super(TabNameDialog, self).showEvent(event)
|
||||
btns_width = max(
|
||||
self._ok_btn.width(),
|
||||
self._cancel_btn.width()
|
||||
)
|
||||
|
||||
self._ok_btn.setMinimumWidth(btns_width)
|
||||
self._cancel_btn.setMinimumWidth(btns_width)
|
||||
|
||||
def _on_ok_clicked(self):
|
||||
self._result = self._name_input.text()
|
||||
self.accept()
|
||||
|
||||
def _on_cancel_clicked(self):
|
||||
self._result = None
|
||||
self.reject()
|
||||
|
||||
|
||||
class OutputTextWidget(QtWidgets.QTextEdit):
|
||||
v_max_offset = 4
|
||||
|
||||
def vertical_scroll_at_max(self):
|
||||
v_scroll = self.verticalScrollBar()
|
||||
return v_scroll.value() > v_scroll.maximum() - self.v_max_offset
|
||||
|
||||
def scroll_to_bottom(self):
|
||||
v_scroll = self.verticalScrollBar()
|
||||
return v_scroll.setValue(v_scroll.maximum())
|
||||
|
||||
|
||||
class EnhancedTabBar(QtWidgets.QTabBar):
|
||||
double_clicked = QtCore.Signal(QtCore.QPoint)
|
||||
right_clicked = QtCore.Signal(QtCore.QPoint)
|
||||
mid_clicked = QtCore.Signal(QtCore.QPoint)
|
||||
|
||||
def __init__(self, parent):
|
||||
super(EnhancedTabBar, self).__init__(parent)
|
||||
|
||||
self.setDrawBase(False)
|
||||
|
||||
def mouseDoubleClickEvent(self, event):
|
||||
self.double_clicked.emit(event.globalPos())
|
||||
event.accept()
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
if event.button() == QtCore.Qt.RightButton:
|
||||
self.right_clicked.emit(event.globalPos())
|
||||
event.accept()
|
||||
return
|
||||
|
||||
elif event.button() == QtCore.Qt.MidButton:
|
||||
self.mid_clicked.emit(event.globalPos())
|
||||
event.accept()
|
||||
|
||||
else:
|
||||
super(EnhancedTabBar, self).mouseReleaseEvent(event)
|
||||
|
||||
|
||||
class PythonInterpreterWidget(QtWidgets.QWidget):
|
||||
default_width = 1000
|
||||
default_height = 600
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(PythonInterpreterWidget, self).__init__(parent)
|
||||
|
||||
self.setWindowTitle("OpenPype Console")
|
||||
self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
|
||||
|
||||
self.ansi_escape = re.compile(
|
||||
r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]"
|
||||
)
|
||||
|
||||
self._tabs = []
|
||||
|
||||
self._stdout_err_wrapper = StdOEWrap()
|
||||
|
||||
output_widget = OutputTextWidget(self)
|
||||
output_widget.setObjectName("PythonInterpreterOutput")
|
||||
output_widget.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
|
||||
output_widget.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)
|
||||
|
||||
tab_widget = QtWidgets.QTabWidget(self)
|
||||
tab_bar = EnhancedTabBar(tab_widget)
|
||||
tab_widget.setTabBar(tab_bar)
|
||||
tab_widget.setTabsClosable(False)
|
||||
tab_widget.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
|
||||
add_tab_btn = QtWidgets.QPushButton("+", tab_widget)
|
||||
tab_widget.setCornerWidget(add_tab_btn, QtCore.Qt.TopLeftCorner)
|
||||
|
||||
widgets_splitter = QtWidgets.QSplitter(self)
|
||||
widgets_splitter.setOrientation(QtCore.Qt.Vertical)
|
||||
widgets_splitter.addWidget(output_widget)
|
||||
widgets_splitter.addWidget(tab_widget)
|
||||
widgets_splitter.setStretchFactor(0, 1)
|
||||
widgets_splitter.setStretchFactor(1, 1)
|
||||
height = int(self.default_height / 2)
|
||||
widgets_splitter.setSizes([height, self.default_height - height])
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(widgets_splitter)
|
||||
|
||||
line_check_timer = QtCore.QTimer()
|
||||
line_check_timer.setInterval(200)
|
||||
|
||||
line_check_timer.timeout.connect(self._on_timer_timeout)
|
||||
add_tab_btn.clicked.connect(self._on_add_clicked)
|
||||
tab_bar.right_clicked.connect(self._on_tab_right_click)
|
||||
tab_bar.double_clicked.connect(self._on_tab_double_click)
|
||||
tab_bar.mid_clicked.connect(self._on_tab_mid_click)
|
||||
tab_widget.tabCloseRequested.connect(self._on_tab_close_req)
|
||||
|
||||
self._widgets_splitter = widgets_splitter
|
||||
self._add_tab_btn = add_tab_btn
|
||||
self._output_widget = output_widget
|
||||
self._tab_widget = tab_widget
|
||||
self._line_check_timer = line_check_timer
|
||||
|
||||
self._append_lines([openpype_art])
|
||||
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
|
||||
self.resize(self.default_width, self.default_height)
|
||||
|
||||
self._init_from_registry()
|
||||
|
||||
if self._tab_widget.count() < 1:
|
||||
self.add_tab("Python")
|
||||
|
||||
def _init_from_registry(self):
|
||||
setting_registry = PythonInterpreterRegistry()
|
||||
|
||||
try:
|
||||
width = setting_registry.get_item("width")
|
||||
height = setting_registry.get_item("height")
|
||||
if width is not None and height is not None:
|
||||
self.resize(width, height)
|
||||
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
sizes = setting_registry.get_item("splitter_sizes")
|
||||
if len(sizes) == len(self._widgets_splitter.sizes()):
|
||||
self._widgets_splitter.setSizes(sizes)
|
||||
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
tab_defs = setting_registry.get_item("tabs") or []
|
||||
for tab_def in tab_defs:
|
||||
widget = self.add_tab(tab_def["name"])
|
||||
widget.set_code(tab_def["code"])
|
||||
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def save_registry(self):
|
||||
setting_registry = PythonInterpreterRegistry()
|
||||
|
||||
setting_registry.set_item("width", self.width())
|
||||
setting_registry.set_item("height", self.height())
|
||||
|
||||
setting_registry.set_item(
|
||||
"splitter_sizes", self._widgets_splitter.sizes()
|
||||
)
|
||||
|
||||
tabs = []
|
||||
for tab_idx in range(self._tab_widget.count()):
|
||||
widget = self._tab_widget.widget(tab_idx)
|
||||
tab_code = widget.get_code()
|
||||
tab_name = self._tab_widget.tabText(tab_idx)
|
||||
tabs.append({
|
||||
"name": tab_name,
|
||||
"code": tab_code
|
||||
})
|
||||
|
||||
setting_registry.set_item("tabs", tabs)
|
||||
|
||||
def _on_tab_right_click(self, global_point):
|
||||
point = self._tab_widget.mapFromGlobal(global_point)
|
||||
tab_bar = self._tab_widget.tabBar()
|
||||
tab_idx = tab_bar.tabAt(point)
|
||||
last_index = tab_bar.count() - 1
|
||||
if tab_idx < 0 or tab_idx > last_index:
|
||||
return
|
||||
|
||||
menu = QtWidgets.QMenu(self._tab_widget)
|
||||
menu.addAction("Rename")
|
||||
result = menu.exec_(global_point)
|
||||
if result is None:
|
||||
return
|
||||
|
||||
if result.text() == "Rename":
|
||||
self._rename_tab_req(tab_idx)
|
||||
|
||||
def _rename_tab_req(self, tab_idx):
|
||||
dialog = TabNameDialog(self)
|
||||
dialog.set_tab_name(self._tab_widget.tabText(tab_idx))
|
||||
dialog.exec_()
|
||||
tab_name = dialog.result()
|
||||
if tab_name:
|
||||
self._tab_widget.setTabText(tab_idx, tab_name)
|
||||
|
||||
def _on_tab_mid_click(self, global_point):
|
||||
point = self._tab_widget.mapFromGlobal(global_point)
|
||||
tab_bar = self._tab_widget.tabBar()
|
||||
tab_idx = tab_bar.tabAt(point)
|
||||
last_index = tab_bar.count() - 1
|
||||
if tab_idx < 0 or tab_idx > last_index:
|
||||
return
|
||||
|
||||
self._on_tab_close_req(tab_idx)
|
||||
|
||||
def _on_tab_double_click(self, global_point):
|
||||
point = self._tab_widget.mapFromGlobal(global_point)
|
||||
tab_bar = self._tab_widget.tabBar()
|
||||
tab_idx = tab_bar.tabAt(point)
|
||||
last_index = tab_bar.count() - 1
|
||||
if tab_idx < 0 or tab_idx > last_index:
|
||||
return
|
||||
|
||||
self._rename_tab_req(tab_idx)
|
||||
|
||||
def _on_tab_close_req(self, tab_index):
|
||||
if self._tab_widget.count() == 1:
|
||||
return
|
||||
|
||||
widget = self._tab_widget.widget(tab_index)
|
||||
if widget in self._tabs:
|
||||
self._tabs.remove(widget)
|
||||
self._tab_widget.removeTab(tab_index)
|
||||
|
||||
if self._tab_widget.count() == 1:
|
||||
self._tab_widget.setTabsClosable(False)
|
||||
|
||||
def _append_lines(self, lines):
|
||||
at_max = self._output_widget.vertical_scroll_at_max()
|
||||
tmp_cursor = QtGui.QTextCursor(self._output_widget.document())
|
||||
tmp_cursor.movePosition(QtGui.QTextCursor.End)
|
||||
for line in lines:
|
||||
tmp_cursor.insertText(line)
|
||||
|
||||
if at_max:
|
||||
self._output_widget.scroll_to_bottom()
|
||||
|
||||
def _on_timer_timeout(self):
|
||||
if self._stdout_err_wrapper.lines:
|
||||
lines = []
|
||||
while self._stdout_err_wrapper.lines:
|
||||
line = self._stdout_err_wrapper.lines.popleft()
|
||||
lines.append(self.ansi_escape.sub("", line))
|
||||
self._append_lines(lines)
|
||||
|
||||
def _on_add_clicked(self):
|
||||
dialog = TabNameDialog(self)
|
||||
dialog.exec_()
|
||||
tab_name = dialog.result()
|
||||
if tab_name:
|
||||
self.add_tab(tab_name)
|
||||
|
||||
def _on_before_execute(self, code_text):
|
||||
at_max = self._output_widget.vertical_scroll_at_max()
|
||||
document = self._output_widget.document()
|
||||
tmp_cursor = QtGui.QTextCursor(document)
|
||||
tmp_cursor.movePosition(QtGui.QTextCursor.End)
|
||||
tmp_cursor.insertText("{}\nExecuting command:\n".format(20 * "-"))
|
||||
|
||||
code_block_format = QtGui.QTextFrameFormat()
|
||||
code_block_format.setBackground(QtGui.QColor(27, 27, 27))
|
||||
code_block_format.setPadding(4)
|
||||
|
||||
tmp_cursor.insertFrame(code_block_format)
|
||||
char_format = tmp_cursor.charFormat()
|
||||
char_format.setForeground(
|
||||
QtGui.QBrush(QtGui.QColor(114, 224, 198))
|
||||
)
|
||||
tmp_cursor.setCharFormat(char_format)
|
||||
tmp_cursor.insertText(code_text)
|
||||
|
||||
# Create new cursor
|
||||
tmp_cursor = QtGui.QTextCursor(document)
|
||||
tmp_cursor.movePosition(QtGui.QTextCursor.End)
|
||||
tmp_cursor.insertText("{}\n".format(20 * "-"))
|
||||
|
||||
if at_max:
|
||||
self._output_widget.scroll_to_bottom()
|
||||
|
||||
def add_tab(self, tab_name, index=None):
|
||||
widget = PythonTabWidget(self)
|
||||
widget.before_execute.connect(self._on_before_execute)
|
||||
if index is None:
|
||||
if self._tab_widget.count() > 0:
|
||||
index = self._tab_widget.currentIndex() + 1
|
||||
else:
|
||||
index = 0
|
||||
|
||||
self._tabs.append(widget)
|
||||
self._tab_widget.insertTab(index, widget, tab_name)
|
||||
self._tab_widget.setCurrentIndex(index)
|
||||
|
||||
if self._tab_widget.count() > 1:
|
||||
self._tab_widget.setTabsClosable(True)
|
||||
widget.setFocus()
|
||||
return widget
|
||||
|
||||
def showEvent(self, event):
|
||||
self._line_check_timer.start()
|
||||
super(PythonInterpreterWidget, self).showEvent(event)
|
||||
self._output_widget.scroll_to_bottom()
|
||||
|
||||
def closeEvent(self, event):
|
||||
self.save_registry()
|
||||
super(PythonInterpreterWidget, self).closeEvent(event)
|
||||
self._line_check_timer.stop()
|
||||
15
openpype/plugins/publish/collect_modules.py
Normal file
15
openpype/plugins/publish/collect_modules.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect OpenPype modules."""
|
||||
from openpype.modules import ModulesManager
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectModules(pyblish.api.ContextPlugin):
|
||||
"""Collect OpenPype modules."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "OpenPype Modules"
|
||||
|
||||
def process(self, context):
|
||||
manager = ModulesManager()
|
||||
context.data["openPypeModules"] = manager.modules_by_name
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"deadline_servers": [],
|
||||
"publish": {
|
||||
"ValidateExpectedFiles": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -44,6 +44,12 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateRender": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateAnimation": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
@ -94,12 +100,6 @@
|
|||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateRender": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"CreateRenderSetup": {
|
||||
"enabled": true,
|
||||
"defaults": [
|
||||
|
|
|
|||
|
|
@ -140,7 +140,9 @@
|
|||
},
|
||||
"deadline": {
|
||||
"enabled": true,
|
||||
"DEADLINE_REST_URL": "http://localhost:8082"
|
||||
"deadline_urls": {
|
||||
"default": "http://127.0.0.1:8082"
|
||||
}
|
||||
},
|
||||
"muster": {
|
||||
"enabled": false,
|
||||
|
|
|
|||
|
|
@ -105,7 +105,8 @@ from .enum_entity import (
|
|||
AppsEnumEntity,
|
||||
ToolsEnumEntity,
|
||||
TaskTypeEnumEntity,
|
||||
ProvidersEnum
|
||||
ProvidersEnum,
|
||||
DeadlineUrlEnumEntity
|
||||
)
|
||||
|
||||
from .list_entity import ListEntity
|
||||
|
|
@ -160,6 +161,7 @@ __all__ = (
|
|||
"ToolsEnumEntity",
|
||||
"TaskTypeEnumEntity",
|
||||
"ProvidersEnum",
|
||||
"DeadlineUrlEnumEntity",
|
||||
|
||||
"ListEntity",
|
||||
|
||||
|
|
|
|||
|
|
@ -443,3 +443,54 @@ class ProvidersEnum(BaseEnumEntity):
|
|||
self._current_value = value_on_not_set
|
||||
|
||||
self.value_on_not_set = value_on_not_set
|
||||
|
||||
|
||||
class DeadlineUrlEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["deadline_url-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
|
||||
if self.multiselection:
|
||||
self.valid_value_types = (list,)
|
||||
self.value_on_not_set = []
|
||||
else:
|
||||
for key in self.valid_keys:
|
||||
if self.value_on_not_set is NOT_SET:
|
||||
self.value_on_not_set = key
|
||||
break
|
||||
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
|
||||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
||||
def _get_enum_values(self):
|
||||
system_settings_entity = self.get_entity_from_path("system_settings")
|
||||
|
||||
valid_keys = set()
|
||||
enum_items_list = []
|
||||
deadline_urls_entity = (
|
||||
system_settings_entity
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["deadline_urls"]
|
||||
)
|
||||
for server_name, url_entity in deadline_urls_entity.items():
|
||||
enum_items_list.append(
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)})
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
def set_override_state(self, *args, **kwargs):
|
||||
super(DeadlineUrlEnumEntity, self).set_override_state(*args, **kwargs)
|
||||
|
||||
self.enum_items, self.valid_keys = self._get_enum_values()
|
||||
new_value = []
|
||||
for key in self._current_value:
|
||||
if key in self.valid_keys:
|
||||
new_value.append(key)
|
||||
self._current_value = new_value
|
||||
|
|
|
|||
|
|
@ -5,6 +5,12 @@
|
|||
"collapsible": true,
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "deadline_url-enum",
|
||||
"key": "deadline_servers",
|
||||
"label": "Deadline Webservice URLs",
|
||||
"multiselect": true
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -29,6 +29,26 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateRender",
|
||||
"label": "Create Render",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_create_plugin",
|
||||
|
|
@ -65,10 +85,6 @@
|
|||
"key": "CreatePointCache",
|
||||
"label": "Create Cache"
|
||||
},
|
||||
{
|
||||
"key": "CreateRender",
|
||||
"label": "Create Render"
|
||||
},
|
||||
{
|
||||
"key": "CreateRenderSetup",
|
||||
"label": "Create Render Setup"
|
||||
|
|
|
|||
|
|
@ -130,9 +130,11 @@
|
|||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "DEADLINE_REST_URL",
|
||||
"label": "Deadline Resl URL"
|
||||
"type": "dict-modifiable",
|
||||
"object_type": "text",
|
||||
"key": "deadline_urls",
|
||||
"required_keys": ["default"],
|
||||
"label": "Deadline Webservice URLs"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ def _load_font():
|
|||
font_dirs = []
|
||||
font_dirs.append(os.path.join(fonts_dirpath, "Montserrat"))
|
||||
font_dirs.append(os.path.join(fonts_dirpath, "Spartan"))
|
||||
font_dirs.append(os.path.join(fonts_dirpath, "RobotoMono", "static"))
|
||||
|
||||
loaded_fonts = []
|
||||
for font_dir in font_dirs:
|
||||
|
|
|
|||
202
openpype/style/fonts/RobotoMono/LICENSE.txt
Normal file
202
openpype/style/fonts/RobotoMono/LICENSE.txt
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
77
openpype/style/fonts/RobotoMono/README.txt
Normal file
77
openpype/style/fonts/RobotoMono/README.txt
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
Roboto Mono Variable Font
|
||||
=========================
|
||||
|
||||
This download contains Roboto Mono as both variable fonts and static fonts.
|
||||
|
||||
Roboto Mono is a variable font with this axis:
|
||||
wght
|
||||
|
||||
This means all the styles are contained in these files:
|
||||
RobotoMono-VariableFont_wght.ttf
|
||||
RobotoMono-Italic-VariableFont_wght.ttf
|
||||
|
||||
If your app fully supports variable fonts, you can now pick intermediate styles
|
||||
that aren’t available as static fonts. Not all apps support variable fonts, and
|
||||
in those cases you can use the static font files for Roboto Mono:
|
||||
static/RobotoMono-Thin.ttf
|
||||
static/RobotoMono-ExtraLight.ttf
|
||||
static/RobotoMono-Light.ttf
|
||||
static/RobotoMono-Regular.ttf
|
||||
static/RobotoMono-Medium.ttf
|
||||
static/RobotoMono-SemiBold.ttf
|
||||
static/RobotoMono-Bold.ttf
|
||||
static/RobotoMono-ThinItalic.ttf
|
||||
static/RobotoMono-ExtraLightItalic.ttf
|
||||
static/RobotoMono-LightItalic.ttf
|
||||
static/RobotoMono-Italic.ttf
|
||||
static/RobotoMono-MediumItalic.ttf
|
||||
static/RobotoMono-SemiBoldItalic.ttf
|
||||
static/RobotoMono-BoldItalic.ttf
|
||||
|
||||
Get started
|
||||
-----------
|
||||
|
||||
1. Install the font files you want to use
|
||||
|
||||
2. Use your app's font picker to view the font family and all the
|
||||
available styles
|
||||
|
||||
Learn more about variable fonts
|
||||
-------------------------------
|
||||
|
||||
https://developers.google.com/web/fundamentals/design-and-ux/typography/variable-fonts
|
||||
https://variablefonts.typenetwork.com
|
||||
https://medium.com/variable-fonts
|
||||
|
||||
In desktop apps
|
||||
|
||||
https://theblog.adobe.com/can-variable-fonts-illustrator-cc
|
||||
https://helpx.adobe.com/nz/photoshop/using/fonts.html#variable_fonts
|
||||
|
||||
Online
|
||||
|
||||
https://developers.google.com/fonts/docs/getting_started
|
||||
https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Fonts/Variable_Fonts_Guide
|
||||
https://developer.microsoft.com/en-us/microsoft-edge/testdrive/demos/variable-fonts
|
||||
|
||||
Installing fonts
|
||||
|
||||
MacOS: https://support.apple.com/en-us/HT201749
|
||||
Linux: https://www.google.com/search?q=how+to+install+a+font+on+gnu%2Blinux
|
||||
Windows: https://support.microsoft.com/en-us/help/314960/how-to-install-or-remove-a-font-in-windows
|
||||
|
||||
Android Apps
|
||||
|
||||
https://developers.google.com/fonts/docs/android
|
||||
https://developer.android.com/guide/topics/ui/look-and-feel/downloadable-fonts
|
||||
|
||||
License
|
||||
-------
|
||||
Please read the full license text (LICENSE.txt) to understand the permissions,
|
||||
restrictions and requirements for usage, redistribution, and modification.
|
||||
|
||||
You can use them freely in your products & projects - print or digital,
|
||||
commercial or otherwise.
|
||||
|
||||
This isn't legal advice, please consider consulting a lawyer and see the full
|
||||
license for all details.
|
||||
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/RobotoMono-VariableFont_wght.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/RobotoMono-VariableFont_wght.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Bold.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Bold.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-BoldItalic.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-ExtraLight.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-ExtraLight.ttf
Normal file
Binary file not shown.
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Italic.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Italic.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Light.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Light.ttf
Normal file
Binary file not shown.
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Medium.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Medium.ttf
Normal file
Binary file not shown.
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Regular.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Regular.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-SemiBold.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-SemiBold.ttf
Normal file
Binary file not shown.
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Thin.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-Thin.ttf
Normal file
Binary file not shown.
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-ThinItalic.ttf
Normal file
BIN
openpype/style/fonts/RobotoMono/static/RobotoMono-ThinItalic.ttf
Normal file
Binary file not shown.
|
|
@ -271,37 +271,38 @@ QTabWidget::tab-bar {
|
|||
}
|
||||
|
||||
QTabBar::tab {
|
||||
border-top-left-radius: 4px;
|
||||
border-top-right-radius: 4px;
|
||||
padding: 5px;
|
||||
|
||||
border-left: 3px solid transparent;
|
||||
border-top: 1px solid {color:border};
|
||||
border-right: 1px solid {color:border};
|
||||
background: qlineargradient(
|
||||
x1: 0, y1: 1, x2: 0, y2: 0,
|
||||
stop: 0.5 {color:bg}, stop: 1.0 {color:bg-inputs}
|
||||
);
|
||||
}
|
||||
|
||||
QTabBar::tab:selected {
|
||||
background: {color:grey-lighter};
|
||||
/* background: qradialgradient(
|
||||
cx:0.5, cy:0.5, radius: 2,
|
||||
fx:0.5, fy:1,
|
||||
stop:0.3 {color:bg}, stop:1 white
|
||||
) */
|
||||
/* background: qlineargradient(
|
||||
x1: 0, y1: 0, x2: 0, y2: 1,
|
||||
stop: 0 {color:bg-inputs}, stop: 1.0 {color:bg}
|
||||
); */
|
||||
border-left: 3px solid {color:border-focus};
|
||||
background: qlineargradient(
|
||||
x1: 0, y1: 1, x2: 0, y2: 0,
|
||||
stop: 0.5 {color:bg}, stop: 1.0 {color:border}
|
||||
);
|
||||
}
|
||||
|
||||
QTabBar::tab:!selected {
|
||||
/* Make it smaller*/
|
||||
margin-top: 3px;
|
||||
background: {color:grey-light};
|
||||
}
|
||||
|
||||
QTabBar::tab:!selected:hover {
|
||||
background: {color:grey-lighter};
|
||||
}
|
||||
|
||||
QTabBar::tab:first {
|
||||
border-left: 1px solid {color:border};
|
||||
}
|
||||
QTabBar::tab:first:selected {
|
||||
margin-left: 0;
|
||||
border-left: 3px solid {color:border-focus};
|
||||
}
|
||||
|
||||
QTabBar::tab:last:selected {
|
||||
|
|
@ -623,3 +624,8 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
|
|||
border: 1px solid {color:border};
|
||||
border-radius: 0.1em;
|
||||
}
|
||||
|
||||
/* Python console interpreter */
|
||||
#PythonInterpreterOutput, #PythonCodeEditor {
|
||||
font-family: "Roboto Mono";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -103,12 +103,19 @@ def create_asset_id_hash(nodes):
|
|||
"""
|
||||
node_id_hash = defaultdict(list)
|
||||
for node in nodes:
|
||||
value = lib.get_id(node)
|
||||
if value is None:
|
||||
continue
|
||||
# iterate over content of reference node
|
||||
if cmds.nodeType(node) == "reference":
|
||||
ref_hashes = create_asset_id_hash(
|
||||
cmds.referenceQuery(node, nodes=True))
|
||||
for asset_id, ref_nodes in ref_hashes.items():
|
||||
node_id_hash[asset_id] += ref_nodes
|
||||
else:
|
||||
value = lib.get_id(node)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
asset_id = value.split(":")[0]
|
||||
node_id_hash[asset_id].append(node)
|
||||
asset_id = value.split(":")[0]
|
||||
node_id_hash[asset_id].append(node)
|
||||
|
||||
return dict(node_id_hash)
|
||||
|
||||
|
|
@ -135,18 +142,19 @@ def create_items_from_nodes(nodes):
|
|||
id_hashes = create_asset_id_hash(nodes)
|
||||
|
||||
# get ids from alembic
|
||||
vray_proxy_nodes = cmds.ls(nodes, type="VRayProxy")
|
||||
for vp in vray_proxy_nodes:
|
||||
path = cmds.getAttr("{}.fileName".format(vp))
|
||||
ids = vray_proxies.get_alembic_ids_cache(path)
|
||||
parent_id = {}
|
||||
for k, _ in ids.items():
|
||||
pid = k.split(":")[0]
|
||||
if not parent_id.get(pid):
|
||||
parent_id.update({pid: [vp]})
|
||||
if cmds.pluginInfo('vrayformaya', query=True, loaded=True):
|
||||
vray_proxy_nodes = cmds.ls(nodes, type="VRayProxy")
|
||||
for vp in vray_proxy_nodes:
|
||||
path = cmds.getAttr("{}.fileName".format(vp))
|
||||
ids = vray_proxies.get_alembic_ids_cache(path)
|
||||
parent_id = {}
|
||||
for k, _ in ids.items():
|
||||
pid = k.split(":")[0]
|
||||
if not parent_id.get(pid):
|
||||
parent_id.update({pid: [vp]})
|
||||
|
||||
print("Adding ids from alembic {}".format(path))
|
||||
id_hashes.update(parent_id)
|
||||
print("Adding ids from alembic {}".format(path))
|
||||
id_hashes.update(parent_id)
|
||||
|
||||
if not id_hashes:
|
||||
return asset_view_items
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue