Merge branch 'develop' into feature/OP-3926_gpu-cache

# Conflicts:
#	openpype/settings/defaults/project_settings/maya.json
#	openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json
This commit is contained in:
Toke Stuart Jepsen 2023-03-31 16:20:08 +01:00
commit 7fb5242fca
174 changed files with 1566 additions and 794 deletions

View file

@ -69,7 +69,7 @@ def _resolution_from_document(doc):
resolution_width = doc["data"].get("resolution_width")
resolution_height = doc["data"].get("resolution_height")
# Make sure both width and heigh are set
# Make sure both width and height are set
if resolution_width is None or resolution_height is None:
cmds.warning(
"No resolution information found for \"{}\"".format(doc["name"])

View file

@ -2478,8 +2478,8 @@ def load_capture_preset(data=None):
float(value[2]) / 255
]
disp_options[key] = value
else:
disp_options['displayGradient'] = True
elif key == "displayGradient":
disp_options[key] = value
options['display_options'] = disp_options

View file

@ -339,7 +339,7 @@ class ARenderProducts:
aov_tokens = ["<aov>", "<renderpass>"]
def match_last(tokens, text):
"""regex match the last occurence from a list of tokens"""
"""regex match the last occurrence from a list of tokens"""
pattern = "(?:.*)({})".format("|".join(tokens))
return re.search(pattern, text, re.IGNORECASE)
@ -1051,7 +1051,7 @@ class RenderProductsRedshift(ARenderProducts):
def get_files(self, product):
# When outputting AOVs we need to replace Redshift specific AOV tokens
# with Maya render tokens for generating file sequences. We validate to
# a specific AOV fileprefix so we only need to accout for one
# a specific AOV fileprefix so we only need to account for one
# replacement.
if not product.multipart and product.driver:
file_prefix = self._get_attr(product.driver + ".filePrefix")

View file

@ -33,7 +33,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
get_template_preset implementation)
Returns:
bool: Wether the template was succesfully imported or not
bool: Whether the template was successfully imported or not
"""
if cmds.objExists(PLACEHOLDER_SET):
@ -116,7 +116,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
placeholder_name_parts = placeholder_data["builder_type"].split("_")
pos = 1
# add famlily in any
# add family in any
placeholder_family = placeholder_data["family"]
if placeholder_family:
placeholder_name_parts.insert(pos, placeholder_family)

View file

@ -12,6 +12,7 @@ class CreateLook(plugin.Creator):
family = "look"
icon = "paint-brush"
make_tx = True
rs_tex = False
def __init__(self, *args, **kwargs):
super(CreateLook, self).__init__(*args, **kwargs)
@ -20,7 +21,8 @@ class CreateLook(plugin.Creator):
# Whether to automatically convert the textures to .tx upon publish.
self.data["maketx"] = self.make_tx
# Whether to automatically convert the textures to .rstex upon publish.
self.data["rstex"] = self.rs_tex
# Enable users to force a copy.
# - on Windows is "forceCopy" always changed to `True` because of
# windows implementation of hardlinks

View file

@ -118,7 +118,7 @@ class ImportMayaLoader(load.LoaderPlugin):
"clean_import",
label="Clean import",
default=False,
help="Should all occurences of cbId be purged?"
help="Should all occurrences of cbId be purged?"
)
]

View file

@ -180,7 +180,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
proxy_basename, proxy_path = self._get_proxy_path(path)
# Whether there is proxy or so, we still update the string operator.
# If no proxy exists, the string operator wont replace anything.
# If no proxy exists, the string operator won't replace anything.
cmds.setAttr(
string_replace_operator + ".match",
"resources/" + proxy_basename,

View file

@ -1,4 +1,6 @@
import os
import difflib
import contextlib
from maya import cmds
from openpype.settings import get_project_settings
@ -8,7 +10,82 @@ from openpype.pipeline.create import (
get_legacy_creator_by_name,
)
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api.lib import maintained_selection
from openpype.hosts.maya.api.lib import (
maintained_selection,
get_container_members
)
@contextlib.contextmanager
def preserve_modelpanel_cameras(container, log=None):
"""Preserve camera members of container in the modelPanels.
This is used to ensure a camera remains in the modelPanels after updating
to a new version.
"""
# Get the modelPanels that used the old camera
members = get_container_members(container)
old_cameras = set(cmds.ls(members, type="camera", long=True))
if not old_cameras:
# No need to manage anything
yield
return
panel_cameras = {}
for panel in cmds.getPanel(type="modelPanel"):
cam = cmds.ls(cmds.modelPanel(panel, query=True, camera=True),
long=True)
# Often but not always maya returns the transform from the
# modelPanel as opposed to the camera shape, so we convert it
# to explicitly be the camera shape
if cmds.nodeType(cam) != "camera":
cam = cmds.listRelatives(cam,
children=True,
fullPath=True,
type="camera")[0]
if cam in old_cameras:
panel_cameras[panel] = cam
if not panel_cameras:
# No need to manage anything
yield
return
try:
yield
finally:
new_members = get_container_members(container)
new_cameras = set(cmds.ls(new_members, type="camera", long=True))
if not new_cameras:
return
for panel, cam_name in panel_cameras.items():
new_camera = None
if cam_name in new_cameras:
new_camera = cam_name
elif len(new_cameras) == 1:
new_camera = next(iter(new_cameras))
else:
# Multiple cameras in the updated container but not an exact
# match detected by name. Find the closest match
matches = difflib.get_close_matches(word=cam_name,
possibilities=new_cameras,
n=1)
if matches:
new_camera = matches[0] # best match
if log:
log.info("Camera in '{}' restored with "
"closest match camera: {} (before: {})"
.format(panel, new_camera, cam_name))
if not new_camera:
# Unable to find the camera to re-apply in the modelpanel
continue
cmds.modelPanel(panel, edit=True, camera=new_camera)
class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
@ -68,6 +145,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
new_nodes = (list(set(nodes) - set(shapes)))
# if there are cameras, try to lock their transforms
self._lock_camera_transforms(new_nodes)
current_namespace = pm.namespaceInfo(currentNamespace=True)
if current_namespace != ":":
@ -136,6 +216,15 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
def switch(self, container, representation):
self.update(container, representation)
def update(self, container, representation):
with preserve_modelpanel_cameras(container, log=self.log):
super(ReferenceLoader, self).update(container, representation)
# We also want to lock camera transforms on any new cameras in the
# reference or for a camera which might have changed names.
members = get_container_members(container)
self._lock_camera_transforms(members)
def _post_process_rig(self, name, namespace, context, options):
output = next((node for node in self if
@ -168,3 +257,18 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
options={"useSelection": True},
data={"dependencies": dependency}
)
def _lock_camera_transforms(self, nodes):
cameras = cmds.ls(nodes, type="camera")
if not cameras:
return
# Check the Maya version, lockTransform has been introduced since
# Maya 2016.5 Ext 2
version = int(cmds.about(version=True))
if version >= 2016:
for camera in cameras:
cmds.camera(camera, edit=True, lockTransform=True)
else:
self.log.warning("This version of Maya does not support locking of"
" transforms of cameras.")

View file

@ -255,7 +255,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
Searches through the overrides finding all material overrides. From there
it extracts the shading group and then finds all texture files in the
shading group network. It also checks for mipmap versions of texture files
and adds them to the resouces to get published.
and adds them to the resources to get published.
"""

View file

@ -5,6 +5,7 @@ import pyblish.api
from openpype.client import get_subset_by_name
from openpype.pipeline import legacy_io
from openpype.hosts.maya.api.lib import get_attribute_input
class CollectReview(pyblish.api.InstancePlugin):
@ -146,3 +147,21 @@ class CollectReview(pyblish.api.InstancePlugin):
"filename": node.filename.get()
}
)
# Collect focal length.
attr = camera + ".focalLength"
focal_length = None
if get_attribute_input(attr):
start = instance.data["frameStart"]
end = instance.data["frameEnd"] + 1
focal_length = [
cmds.getAttr(attr, time=t) for t in range(int(start), int(end))
]
else:
focal_length = cmds.getAttr(attr)
key = "focalLength"
try:
instance.data["burninDataMembers"][key] = focal_length
except KeyError:
instance.data["burninDataMembers"] = {key: focal_length}

View file

@ -1,33 +1,42 @@
# -*- coding: utf-8 -*-
"""Maya look extractor."""
import os
import json
import tempfile
import platform
import contextlib
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from maya import cmds # noqa
import contextlib
import json
import logging
import os
import platform
import tempfile
import six
import attr
import pyblish.api
from openpype.lib import source_hash, run_subprocess
from openpype.pipeline import legacy_io, publish
from maya import cmds # noqa
from openpype.lib.vendor_bin_utils import find_executable
from openpype.lib import source_hash, run_subprocess, get_oiio_tools_path
from openpype.pipeline import legacy_io, publish, KnownPublishError
from openpype.hosts.maya.api import lib
from openpype.hosts.maya.api.lib import image_info, guess_colorspace
# Modes for transfer
COPY = 1
HARDLINK = 2
def _has_arnold():
"""Return whether the arnold package is available and can be imported."""
try:
import arnold # noqa: F401
return True
except (ImportError, ModuleNotFoundError):
return False
@attr.s
class TextureResult:
"""The resulting texture of a processed file for a resource"""
# Path to the file
path = attr.ib()
# Colorspace of the resulting texture. This might not be the input
# colorspace of the texture if a TextureProcessor has processed the file.
colorspace = attr.ib()
# Hash generated for the texture using openpype.lib.source_hash
file_hash = attr.ib()
# The transfer mode, e.g. COPY or HARDLINK
transfer_mode = attr.ib()
def find_paths_by_hash(texture_hash):
@ -46,61 +55,6 @@ def find_paths_by_hash(texture_hash):
return legacy_io.distinct(key, {"type": "version"})
def maketx(source, destination, args, logger):
"""Make `.tx` using `maketx` with some default settings.
The settings are based on default as used in Arnold's
txManager in the scene.
This function requires the `maketx` executable to be
on the `PATH`.
Args:
source (str): Path to source file.
destination (str): Writing destination path.
args (list): Additional arguments for `maketx`.
logger (logging.Logger): Logger to log messages to.
Returns:
str: Output of `maketx` command.
"""
from openpype.lib import get_oiio_tools_path
maketx_path = get_oiio_tools_path("maketx")
if not maketx_path:
print(
"OIIO tool not found in {}".format(maketx_path))
raise AssertionError("OIIO tool not found")
subprocess_args = [
maketx_path,
"-v", # verbose
"-u", # update mode
# unpremultiply before conversion (recommended when alpha present)
"--unpremult",
"--checknan",
# use oiio-optimized settings for tile-size, planarconfig, metadata
"--oiio",
"--filter", "lanczos3",
source
]
subprocess_args.extend(args)
subprocess_args.extend(["-o", destination])
cmd = " ".join(subprocess_args)
logger.debug(cmd)
try:
out = run_subprocess(subprocess_args)
except Exception:
logger.error("Maketx converion failed", exc_info=True)
raise
return out
@contextlib.contextmanager
def no_workspace_dir():
"""Force maya to a fake temporary workspace directory.
@ -133,6 +87,303 @@ def no_workspace_dir():
os.rmdir(fake_workspace_dir)
@six.add_metaclass(ABCMeta)
class TextureProcessor:
extension = None
def __init__(self, log=None):
if log is None:
log = logging.getLogger(self.__class__.__name__)
self.log = log
def apply_settings(self, system_settings, project_settings):
"""Apply OpenPype system/project settings to the TextureProcessor
Args:
system_settings (dict): OpenPype system settings
project_settings (dict): OpenPype project settings
Returns:
None
"""
pass
@abstractmethod
def process(self,
source,
colorspace,
color_management,
staging_dir):
"""Process the `source` texture.
Must be implemented on inherited class.
This must always return a TextureResult even when it does not generate
a texture. If it doesn't generate a texture then it should return a
TextureResult using the input path and colorspace.
Args:
source (str): Path to source file.
colorspace (str): Colorspace of the source file.
color_management (dict): Maya Color management data from
`lib.get_color_management_preferences`
staging_dir (str): Output directory to write to.
Returns:
TextureResult: The resulting texture information.
"""
pass
def __repr__(self):
# Log instance as class name
return self.__class__.__name__
class MakeRSTexBin(TextureProcessor):
"""Make `.rstexbin` using `redshiftTextureProcessor`"""
extension = ".rstexbin"
def process(self,
source,
colorspace,
color_management,
staging_dir):
texture_processor_path = self.get_redshift_tool(
"redshiftTextureProcessor"
)
if not texture_processor_path:
raise KnownPublishError("Must have Redshift available.")
subprocess_args = [
texture_processor_path,
source
]
hash_args = ["rstex"]
texture_hash = source_hash(source, *hash_args)
# Redshift stores the output texture next to the input but with
# the extension replaced to `.rstexbin`
basename, ext = os.path.splitext(source)
destination = "{}{}".format(basename, self.extension)
self.log.debug(" ".join(subprocess_args))
try:
run_subprocess(subprocess_args)
except Exception:
self.log.error("Texture .rstexbin conversion failed",
exc_info=True)
raise
return TextureResult(
path=destination,
file_hash=texture_hash,
colorspace=colorspace,
transfer_mode=COPY
)
@staticmethod
def get_redshift_tool(tool_name):
"""Path to redshift texture processor.
On Windows it adds .exe extension if missing from tool argument.
Args:
tool_name (string): Tool name.
Returns:
str: Full path to redshift texture processor executable.
"""
if "REDSHIFT_COREDATAPATH" not in os.environ:
raise RuntimeError("Must have Redshift available.")
redshift_tool_path = os.path.join(
os.environ["REDSHIFT_COREDATAPATH"],
"bin",
tool_name
)
return find_executable(redshift_tool_path)
class MakeTX(TextureProcessor):
"""Make `.tx` using `maketx` with some default settings.
Some hardcoded arguments passed to `maketx` are based on the defaults used
in Arnold's txManager tool.
"""
extension = ".tx"
def __init__(self, log=None):
super(MakeTX, self).__init__(log=log)
self.extra_args = []
def apply_settings(self, system_settings, project_settings):
# Allow extra maketx arguments from project settings
args_settings = (
project_settings["maya"]["publish"]
.get("ExtractLook", {}).get("maketx_arguments", [])
)
extra_args = []
for arg_data in args_settings:
argument = arg_data["argument"]
parameters = arg_data["parameters"]
if not argument:
self.log.debug("Ignoring empty parameter from "
"`maketx_arguments` setting..")
continue
extra_args.append(argument)
extra_args.extend(parameters)
self.extra_args = extra_args
def process(self,
source,
colorspace,
color_management,
staging_dir):
"""Process the texture.
This function requires the `maketx` executable to be available in an
OpenImageIO toolset detectable by OpenPype.
Args:
source (str): Path to source file.
colorspace (str): Colorspace of the source file.
color_management (dict): Maya Color management data from
`lib.get_color_management_preferences`
staging_dir (str): Output directory to write to.
Returns:
TextureResult: The resulting texture information.
"""
maketx_path = get_oiio_tools_path("maketx")
if not maketx_path:
raise AssertionError(
"OIIO 'maketx' tool not found. Result: {}".format(maketx_path)
)
# Define .tx filepath in staging if source file is not .tx
fname, ext = os.path.splitext(os.path.basename(source))
if ext == ".tx":
# Do nothing if the source file is already a .tx file.
return TextureResult(
path=source,
file_hash=None, # todo: unknown texture hash?
colorspace=colorspace,
transfer_mode=COPY
)
# Hardcoded default arguments for maketx conversion based on Arnold's
# txManager in Maya
args = [
# unpremultiply before conversion (recommended when alpha present)
"--unpremult",
# use oiio-optimized settings for tile-size, planarconfig, metadata
"--oiio",
"--filter", "lanczos3",
]
if color_management["enabled"]:
config_path = color_management["config"]
if not os.path.exists(config_path):
raise RuntimeError("OCIO config not found at: "
"{}".format(config_path))
render_colorspace = color_management["rendering_space"]
self.log.info("tx: converting colorspace {0} "
"-> {1}".format(colorspace,
render_colorspace))
args.extend(["--colorconvert", colorspace, render_colorspace])
args.extend(["--colorconfig", config_path])
else:
# Maya Color management is disabled. We cannot rely on an OCIO
self.log.debug("tx: Maya color management is disabled. No color "
"conversion will be applied to .tx conversion for: "
"{}".format(source))
# Assume linear
render_colorspace = "linear"
# Note: The texture hash is only reliable if we include any potential
# conversion arguments provide to e.g. `maketx`
hash_args = ["maketx"] + args + self.extra_args
texture_hash = source_hash(source, *hash_args)
# Ensure folder exists
resources_dir = os.path.join(staging_dir, "resources")
if not os.path.exists(resources_dir):
os.makedirs(resources_dir)
self.log.info("Generating .tx file for %s .." % source)
subprocess_args = [
maketx_path,
"-v", # verbose
"-u", # update mode
# --checknan doesn't influence the output file but aborts the
# conversion if it finds any. So we can avoid it for the file hash
"--checknan",
source
]
subprocess_args.extend(args)
if self.extra_args:
subprocess_args.extend(self.extra_args)
# Add source hash attribute after other arguments for log readability
# Note: argument is excluded from the hash since it is the hash itself
subprocess_args.extend([
"--sattrib",
"sourceHash",
texture_hash
])
destination = os.path.join(resources_dir, fname + ".tx")
subprocess_args.extend(["-o", destination])
# We want to make sure we are explicit about what OCIO config gets
# used. So when we supply no --colorconfig flag that no fallback to
# an OCIO env var occurs.
env = os.environ.copy()
env.pop("OCIO", None)
self.log.debug(" ".join(subprocess_args))
try:
run_subprocess(subprocess_args, env=env)
except Exception:
self.log.error("Texture maketx conversion failed",
exc_info=True)
raise
return TextureResult(
path=destination,
file_hash=texture_hash,
colorspace=render_colorspace,
transfer_mode=COPY
)
@staticmethod
def _has_arnold():
"""Return whether the arnold package is available and importable."""
try:
import arnold # noqa: F401
return True
except (ImportError, ModuleNotFoundError):
return False
class ExtractLook(publish.Extractor):
"""Extract Look (Maya Scene + JSON)
@ -149,22 +400,6 @@ class ExtractLook(publish.Extractor):
scene_type = "ma"
look_data_type = "json"
@staticmethod
def get_renderer_name():
"""Get renderer name from Maya.
Returns:
str: Renderer name.
"""
renderer = cmds.getAttr(
"defaultRenderGlobals.currentRenderer"
).lower()
# handle various renderman names
if renderer.startswith("renderman"):
renderer = "renderman"
return renderer
def get_maya_scene_type(self, instance):
"""Get Maya scene type from settings.
@ -204,16 +439,12 @@ class ExtractLook(publish.Extractor):
dir_path = self.staging_dir(instance)
maya_fname = "{0}.{1}".format(instance.name, self.scene_type)
json_fname = "{0}.{1}".format(instance.name, self.look_data_type)
# Make texture dump folder
maya_path = os.path.join(dir_path, maya_fname)
json_path = os.path.join(dir_path, json_fname)
self.log.info("Performing extraction..")
# Remove all members of the sets so they are not included in the
# exported file by accident
self.log.info("Extract sets (%s) ..." % _scene_type)
self.log.info("Processing sets..")
lookdata = instance.data["lookData"]
relationships = lookdata["relationships"]
sets = list(relationships.keys())
@ -221,13 +452,36 @@ class ExtractLook(publish.Extractor):
self.log.info("No sets found")
return
results = self.process_resources(instance, staging_dir=dir_path)
# Specify texture processing executables to activate
# TODO: Load these more dynamically once we support more processors
processors = []
context = instance.context
for key, Processor in {
# Instance data key to texture processor mapping
"maketx": MakeTX,
"rstex": MakeRSTexBin
}.items():
if instance.data.get(key, False):
processor = Processor()
processor.apply_settings(context.data["system_settings"],
context.data["project_settings"])
processors.append(processor)
if processors:
self.log.debug("Collected texture processors: "
"{}".format(processors))
self.log.debug("Processing resources..")
results = self.process_resources(instance,
staging_dir=dir_path,
processors=processors)
transfers = results["fileTransfers"]
hardlinks = results["fileHardlinks"]
hashes = results["fileHashes"]
remap = results["attrRemap"]
# Extract in correct render layer
self.log.info("Extracting look maya scene file: {}".format(maya_path))
layer = instance.data.get("renderlayer", "defaultRenderLayer")
with lib.renderlayer(layer):
# TODO: Ensure membership edits don't become renderlayer overrides
@ -235,7 +489,7 @@ class ExtractLook(publish.Extractor):
# To avoid Maya trying to automatically remap the file
# textures relative to the `workspace -directory` we force
# it to a fake temporary workspace. This fixes textures
# getting incorrectly remapped. (LKD-17, PLN-101)
# getting incorrectly remapped.
with no_workspace_dir():
with lib.attribute_values(remap):
with lib.maintained_selection():
@ -299,40 +553,38 @@ class ExtractLook(publish.Extractor):
# Source hash for the textures
instance.data["sourceHashes"] = hashes
"""
self.log.info("Returning colorspaces to their original values ...")
for attr, value in remap.items():
self.log.info(" - {}: {}".format(attr, value))
cmds.setAttr(attr, value, type="string")
"""
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
maya_path))
def process_resources(self, instance, staging_dir):
def _set_resource_result_colorspace(self, resource, colorspace):
"""Update resource resulting colorspace after texture processing"""
if "result_color_space" in resource:
if resource["result_color_space"] == colorspace:
return
self.log.warning(
"Resource already has a resulting colorspace but is now "
"being overridden to a new one: {} -> {}".format(
resource["result_color_space"], colorspace
)
)
resource["result_color_space"] = colorspace
def process_resources(self, instance, staging_dir, processors):
"""Process all resources in the instance.
It is assumed that all resources are nodes using file textures.
Extract the textures to transfer, possibly convert with maketx and
remap the node paths to the destination path. Note that a source
might be included more than once amongst the resources as they could
be the input file to multiple nodes.
"""
# Extract the textures to transfer, possibly convert with maketx and
# remap the node paths to the destination path. Note that a source
# might be included more than once amongst the resources as they could
# be the input file to multiple nodes.
resources = instance.data["resources"]
do_maketx = instance.data.get("maketx", False)
color_management = lib.get_color_management_preferences()
# Collect all unique files used in the resources
files_metadata = {}
for resource in resources:
# Preserve color space values (force value after filepath change)
# This will also trigger in the same order at end of context to
# ensure after context it's still the original value.
color_space = resource.get("color_space")
for f in resource["files"]:
files_metadata[os.path.normpath(f)] = {
"color_space": color_space}
# Process the resource files
transfers = []
hardlinks = []
hashes = {}
# Temporary fix to NOT create hardlinks on windows machines
if platform.system().lower() == "windows":
self.log.info(
@ -342,95 +594,114 @@ class ExtractLook(publish.Extractor):
else:
force_copy = instance.data.get("forceCopy", False)
for filepath in files_metadata:
destinations_cache = {}
linearize = False
# if OCIO color management enabled
# it won't take the condition of the files_metadata
def get_resource_destination_cached(path):
"""Get resource destination with cached result per filepath"""
if path not in destinations_cache:
destination = self.get_resource_destination(
path, instance.data["resourcesDir"], processors)
destinations_cache[path] = destination
return destinations_cache[path]
ocio_maya = cmds.colorManagementPrefs(q=True,
cmConfigFileEnabled=True,
cmEnabled=True)
if do_maketx and not ocio_maya:
if files_metadata[filepath]["color_space"].lower() == "srgb": # noqa: E501
linearize = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "Raw"
# if do_maketx:
# color_space = "Raw"
source, mode, texture_hash = self._process_texture(
filepath,
resource,
do_maketx,
staging=staging_dir,
linearize=linearize,
force=force_copy
)
destination = self.resource_destination(instance,
source,
do_maketx)
# Force copy is specified.
if force_copy:
mode = COPY
if mode == COPY:
transfers.append((source, destination))
self.log.info('file will be copied {} -> {}'.format(
source, destination))
elif mode == HARDLINK:
hardlinks.append((source, destination))
self.log.info('file will be hardlinked {} -> {}'.format(
source, destination))
# Store the hashes from hash to destination to include in the
# database
hashes[texture_hash] = destination
# Remap the resources to the destination path (change node attributes)
destinations = {}
remap = OrderedDict() # needs to be ordered, see color space values
# Process all resource's individual files
processed_files = {}
transfers = []
hardlinks = []
hashes = {}
remap = OrderedDict()
for resource in resources:
source = os.path.normpath(resource["source"])
if source not in destinations:
# Cache destination as source resource might be included
# multiple times
destinations[source] = self.resource_destination(
instance, source, do_maketx
colorspace = resource["color_space"]
for filepath in resource["files"]:
filepath = os.path.normpath(filepath)
if filepath in processed_files:
# The file was already processed, likely due to usage by
# another resource in the scene. We confirm here it
# didn't do color spaces different than the current
# resource.
processed_file = processed_files[filepath]
self.log.debug(
"File was already processed. Likely used by another "
"resource too: {}".format(filepath)
)
if colorspace != processed_file["color_space"]:
self.log.warning(
"File '{}' was already processed using colorspace "
"'{}' instead of the current resource's "
"colorspace '{}'. The already processed texture "
"result's colorspace '{}' will be used."
"".format(filepath,
colorspace,
processed_file["color_space"],
processed_file["result_color_space"]))
self._set_resource_result_colorspace(
resource,
colorspace=processed_file["result_color_space"]
)
continue
texture_result = self._process_texture(
filepath,
processors=processors,
staging_dir=staging_dir,
force_copy=force_copy,
color_management=color_management,
colorspace=colorspace
)
# Set the resulting color space on the resource
self._set_resource_result_colorspace(
resource, colorspace=texture_result.colorspace
)
processed_files[filepath] = {
"color_space": colorspace,
"result_color_space": texture_result.colorspace,
}
source = texture_result.path
destination = get_resource_destination_cached(source)
if force_copy or texture_result.transfer_mode == COPY:
transfers.append((source, destination))
self.log.info('file will be copied {} -> {}'.format(
source, destination))
elif texture_result.transfer_mode == HARDLINK:
hardlinks.append((source, destination))
self.log.info('file will be hardlinked {} -> {}'.format(
source, destination))
# Store the hashes from hash to destination to include in the
# database
hashes[texture_result.file_hash] = destination
# Set up remapping attributes for the node during the publish
# The order of these can be important if one attribute directly
# affects another, e.g. we set colorspace after filepath because
# maya sometimes tries to guess the colorspace when changing
# filepaths (which is avoidable, but we don't want to have those
# attributes changed in the resulting publish)
# Remap filepath to publish destination
# TODO It would be much better if we could use the destination path
# from the actual processed texture results, but since the
# attribute will need to preserve tokens like <f>, <udim> etc for
# now we will define the output path from the attribute value
# including the tokens to persist them.
filepath_attr = resource["attribute"]
remap[filepath_attr] = get_resource_destination_cached(
resource["source"]
)
# Preserve color space values (force value after filepath change)
# This will also trigger in the same order at end of context to
# ensure after context it's still the original value.
color_space_attr = resource["node"] + ".colorSpace"
try:
color_space = cmds.getAttr(color_space_attr)
except ValueError:
# node doesn't have color space attribute
color_space = "Raw"
else:
# get the resolved files
metadata = files_metadata.get(source)
# if the files are unresolved from `source`
# assume color space from the first file of
# the resource
if not metadata:
first_file = next(iter(resource.get(
"files", [])), None)
if not first_file:
continue
first_filepath = os.path.normpath(first_file)
metadata = files_metadata[first_filepath]
if metadata["color_space"] == "Raw":
# set color space to raw if we linearized it
color_space = "Raw"
# Remap file node filename to destination
remap[color_space_attr] = color_space
attr = resource["attribute"]
remap[attr] = destinations[source]
node = resource["node"]
if cmds.attributeQuery("colorSpace", node=node, exists=True):
color_space_attr = "{}.colorSpace".format(node)
remap[color_space_attr] = resource["result_color_space"]
self.log.info("Finished remapping destinations ...")
@ -441,134 +712,131 @@ class ExtractLook(publish.Extractor):
"attrRemap": remap,
}
def resource_destination(self, instance, filepath, do_maketx):
def get_resource_destination(self, filepath, resources_dir, processors):
"""Get resource destination path.
This is utility function to change path if resource file name is
changed by some external tool like `maketx`.
Args:
instance: Current Instance.
filepath (str): Resource path
do_maketx (bool): Flag if resource is processed by `maketx`.
filepath (str): Resource source path
resources_dir (str): Destination dir for resources in publish.
processors (list): Texture processors converting resource.
Returns:
str: Path to resource file
"""
resources_dir = instance.data["resourcesDir"]
# Compute destination location
basename, ext = os.path.splitext(os.path.basename(filepath))
# If `maketx` then the texture will always end with .tx
if do_maketx:
ext = ".tx"
# Get extension from the last processor
for processor in reversed(processors):
processor_ext = processor.extension
if processor_ext and ext != processor_ext:
self.log.debug("Processor {} overrides extension to '{}' "
"for path: {}".format(processor,
processor_ext,
filepath))
ext = processor_ext
break
return os.path.join(
resources_dir, basename + ext
)
def _process_texture(self, filepath, resource,
do_maketx, staging, linearize, force):
"""Process a single texture file on disk for publishing.
This will:
1. Check whether it's already published, if so it will do hardlink
2. If not published and maketx is enabled, generate a new .tx file.
3. Compute the destination path for the source file.
Args:
filepath (str): The source file path to process.
do_maketx (bool): Whether to produce a .tx file
Returns:
"""
fname, ext = os.path.splitext(os.path.basename(filepath))
args = []
if do_maketx:
args.append("maketx")
texture_hash = source_hash(filepath, *args)
def _get_existing_hashed_texture(self, texture_hash):
"""Return the first found filepath from a texture hash"""
# If source has been published before with the same settings,
# then don't reprocess but hardlink from the original
existing = find_paths_by_hash(texture_hash)
if existing and not force:
self.log.info("Found hash in database, preparing hardlink..")
if existing:
source = next((p for p in existing if os.path.exists(p)), None)
if source:
return source, HARDLINK, texture_hash
return source
else:
self.log.warning(
("Paths not found on disk, "
"skipping hardlink: %s") % (existing,)
"Paths not found on disk, "
"skipping hardlink: {}".format(existing)
)
if do_maketx and ext != ".tx":
# Produce .tx file in staging if source file is not .tx
converted = os.path.join(staging, "resources", fname + ".tx")
additional_args = [
"--sattrib",
"sourceHash",
texture_hash
]
if linearize:
if cmds.colorManagementPrefs(query=True, cmEnabled=True):
render_colorspace = cmds.colorManagementPrefs(query=True,
renderingSpaceName=True) # noqa
config_path = cmds.colorManagementPrefs(query=True,
configFilePath=True) # noqa
if not os.path.exists(config_path):
raise RuntimeError("No OCIO config path found!")
def _process_texture(self,
filepath,
processors,
staging_dir,
force_copy,
color_management,
colorspace):
"""Process a single texture file on disk for publishing.
color_space_attr = resource["node"] + ".colorSpace"
try:
color_space = cmds.getAttr(color_space_attr)
except ValueError:
# node doesn't have color space attribute
if _has_arnold():
img_info = image_info(filepath)
color_space = guess_colorspace(img_info)
else:
color_space = "Raw"
self.log.info("tx: converting {0} -> {1}".format(color_space, render_colorspace)) # noqa
This will:
1. Check whether it's already published, if so it will do hardlink
(if the texture hash is found and force copy is not enabled)
2. It will process the texture using the supplied texture
processors like MakeTX and MakeRSTexBin if enabled.
3. Compute the destination path for the source file.
additional_args.extend(["--colorconvert",
color_space,
render_colorspace])
else:
Args:
filepath (str): The source file path to process.
processors (list): List of TextureProcessor processing the texture
staging_dir (str): The staging directory to write to.
force_copy (bool): Whether to force a copy even if a file hash
might have existed already in the project, otherwise
hardlinking the existing file is allowed.
color_management (dict): Maya's Color Management settings from
`lib.get_color_management_preferences`
colorspace (str): The source colorspace of the resources this
texture belongs to.
if _has_arnold():
img_info = image_info(filepath)
color_space = guess_colorspace(img_info)
if color_space == "sRGB":
self.log.info("tx: converting sRGB -> linear")
additional_args.extend(["--colorconvert",
"sRGB",
"Raw"])
else:
self.log.info("tx: texture's colorspace "
"is already linear")
else:
self.log.warning("cannot guess the colorspace"
"color conversion won't be available!") # noqa
Returns:
TextureResult: The texture result information.
"""
additional_args.extend(["--colorconfig", config_path])
# Ensure folder exists
if not os.path.exists(os.path.dirname(converted)):
os.makedirs(os.path.dirname(converted))
self.log.info("Generating .tx file for %s .." % filepath)
maketx(
filepath,
converted,
additional_args,
self.log
if len(processors) > 1:
raise KnownPublishError(
"More than one texture processor not supported. "
"Current processors enabled: {}".format(processors)
)
return converted, COPY, texture_hash
for processor in processors:
self.log.debug("Processing texture {} with processor {}".format(
filepath, processor
))
return filepath, COPY, texture_hash
processed_result = processor.process(filepath,
colorspace,
color_management,
staging_dir)
if not processed_result:
raise RuntimeError("Texture Processor {} returned "
"no result.".format(processor))
self.log.info("Generated processed "
"texture: {}".format(processed_result.path))
# TODO: Currently all processors force copy instead of allowing
# hardlinks using source hashes. This should be refactored
return processed_result
# No texture processing for this file
texture_hash = source_hash(filepath)
if not force_copy:
existing = self._get_existing_hashed_texture(filepath)
if existing:
self.log.info("Found hash in database, preparing hardlink..")
return TextureResult(
path=filepath,
file_hash=texture_hash,
colorspace=colorspace,
transfer_mode=HARDLINK
)
return TextureResult(
path=filepath,
file_hash=texture_hash,
colorspace=colorspace,
transfer_mode=COPY
)
class ExtractModelRenderSets(ExtractLook):

View file

@ -102,7 +102,7 @@ class ExtractMultiverseUsdOverride(publish.Extractor):
long=True)
self.log.info("Collected object {}".format(members))
# TODO: Deal with asset, composition, overide with options.
# TODO: Deal with asset, composition, override with options.
import multiverse
time_opts = None

View file

@ -241,7 +241,6 @@ class ExtractPlayblast(publish.Extractor):
"frameStart": start,
"frameEnd": end,
"fps": fps,
"preview": True,
"tags": tags,
"camera_name": camera_node_name
}

View file

@ -30,7 +30,7 @@ class ResetXgenAttributes(pyblish.api.InstancePlugin):
cmds.setAttr(palette + ".xgExportAsDelta", True)
# Need to save the scene, cause the attribute changes above does not
# mark the scene as modified so user can exit without commiting the
# mark the scene as modified so user can exit without committing the
# changes.
self.log.info("Saving changes.")
cmds.file(save=True)

View file

@ -8,7 +8,7 @@ from openpype.pipeline.publish import ValidateContentsOrder
class ValidateCameraAttributes(pyblish.api.InstancePlugin):
"""Validates Camera has no invalid attribute keys or values.
The Alembic file format does not a specifc subset of attributes as such
The Alembic file format does not a specific subset of attributes as such
we validate that no values are set there as the output will not match the
current scene. For example the preScale, film offsets and film roll.

View file

@ -1,26 +0,0 @@
from maya import cmds
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline import PublishValidationError
class ValidateMayaColorSpace(pyblish.api.InstancePlugin):
"""
Check if the OCIO Color Management and maketx options
enabled at the same time
"""
order = ValidateContentsOrder
families = ['look']
hosts = ['maya']
label = 'Color Management with maketx'
def process(self, instance):
ocio_maya = cmds.colorManagementPrefs(q=True,
cmConfigFileEnabled=True,
cmEnabled=True)
maketx = instance.data["maketx"]
if ocio_maya and maketx:
raise PublishValidationError("Maya is color managed and maketx option is on. OpenPype doesn't support this combination yet.") # noqa

View file

@ -1,6 +1,7 @@
import pyblish.api
import openpype.hosts.maya.api.action
from openpype.pipeline.publish import ValidateContentsOrder
from maya import cmds # noqa
class ValidateLookContents(pyblish.api.InstancePlugin):
@ -85,6 +86,7 @@ class ValidateLookContents(pyblish.api.InstancePlugin):
invalid.add(instance.name)
return list(invalid)
@classmethod
def validate_looks(cls, instance):
@ -112,3 +114,23 @@ class ValidateLookContents(pyblish.api.InstancePlugin):
invalid.append(node)
return invalid
@classmethod
def validate_renderer(cls, instance):
# TODO: Rewrite this to be more specific and configurable
renderer = cmds.getAttr(
'defaultRenderGlobals.currentRenderer').lower()
do_maketx = instance.data.get("maketx", False)
do_rstex = instance.data.get("rstex", False)
processors = []
if do_maketx:
processors.append('arnold')
if do_rstex:
processors.append('redshift')
for processor in processors:
if processor == renderer:
continue
else:
cls.log.error("Converted texture does not match current renderer.") # noqa

View file

@ -34,7 +34,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
fps = context.data.get('fps')
# TODO repace query with using 'context.data["assetEntity"]'
# TODO replace query with using 'context.data["assetEntity"]'
asset_doc = get_current_project_asset()
asset_fps = mayalib.convert_to_maya_fps(asset_doc["data"]["fps"])
@ -86,7 +86,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
cls.log.debug(current_linear)
cls.log.info("Setting time unit to match project")
# TODO repace query with using 'context.data["assetEntity"]'
# TODO replace query with using 'context.data["assetEntity"]'
asset_doc = get_current_project_asset()
asset_fps = asset_doc["data"]["fps"]
mayalib.set_scene_fps(asset_fps)

View file

@ -42,7 +42,8 @@ class ValidateMvLookContents(pyblish.api.InstancePlugin):
resources = instance.data.get("resources", [])
for resource in resources:
files = resource["files"]
self.log.debug("Resouce '{}', files: [{}]".format(resource, files))
self.log.debug(
"Resource '{}', files: [{}]".format(resource, files))
node = resource["node"]
if len(files) == 0:
self.log.error("File node '{}' uses no or non-existing "

View file

@ -37,8 +37,8 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
project_name = legacy_io.active_project()
asset_doc = instance.data["assetEntity"]
render_passses = instance.data.get("renderPasses", [])
for render_pass in render_passses:
render_passes = instance.data.get("renderPasses", [])
for render_pass in render_passes:
is_valid = self.validate_subset_registered(
project_name, asset_doc, render_pass
)

View file

@ -21,7 +21,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
- nurbsSurface: _NRB
- locator: _LOC
- null/group: _GRP
Suffices can also be overriden by project settings.
Suffices can also be overridden by project settings.
.. warning::
This grabs the first child shape as a reference and doesn't use the