[Automated] Merged develop into main

This commit is contained in:
ynbot 2023-11-29 04:24:40 +01:00 committed by GitHub
commit 6c269ab961
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 143 additions and 109 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.17.7-nightly.3
- 3.17.7-nightly.2
- 3.17.7-nightly.1
- 3.17.6
@ -134,7 +135,6 @@ body:
- 3.15.2-nightly.6
- 3.15.2-nightly.5
- 3.15.2-nightly.4
- 3.15.2-nightly.3
validations:
required: true
- type: dropdown

View file

@ -25,20 +25,24 @@ def enabled_savers(comp, savers):
"""
passthrough_key = "TOOLB_PassThrough"
original_states = {}
enabled_save_names = {saver.Name for saver in savers}
enabled_saver_names = {saver.Name for saver in savers}
all_savers = comp.GetToolList(False, "Saver").values()
savers_by_name = {saver.Name: saver for saver in all_savers}
try:
all_savers = comp.GetToolList(False, "Saver").values()
for saver in all_savers:
original_state = saver.GetAttrs()[passthrough_key]
original_states[saver] = original_state
original_states[saver.Name] = original_state
# The passthrough state we want to set (passthrough != enabled)
state = saver.Name not in enabled_save_names
state = saver.Name not in enabled_saver_names
if state != original_state:
saver.SetAttrs({passthrough_key: state})
yield
finally:
for saver, original_state in original_states.items():
for saver_name, original_state in original_states.items():
saver = savers_by_name[saver_name]
saver.SetAttrs({"TOOLB_PassThrough": original_state})

View file

@ -13,7 +13,7 @@ var LD_OPENHARMONY_PATH = System.getenv('LIB_OPENHARMONY_PATH');
LD_OPENHARMONY_PATH = LD_OPENHARMONY_PATH + '/openHarmony.js';
LD_OPENHARMONY_PATH = LD_OPENHARMONY_PATH.replace(/\\/g, "/");
include(LD_OPENHARMONY_PATH);
this.__proto__['$'] = $;
//this.__proto__['$'] = $;
function Client() {
var self = this;

View file

@ -59,8 +59,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
args = [application_path, "-batch",
"-frames", str(frame_start), str(frame_end),
"-scene", scene_path]
self.log.info(f"running [ {application_path} {' '.join(args)}")
scene_path]
self.log.info(f"running: {' '.join(args)}")
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,

View file

@ -33,7 +33,7 @@ class ImportModelRender(InventoryAction):
)
def process(self, containers):
from maya import cmds
from maya import cmds # noqa: F401
project_name = get_current_project_name()
for container in containers:
@ -66,7 +66,7 @@ class ImportModelRender(InventoryAction):
None
"""
from maya import cmds
from maya import cmds # noqa: F401
project_name = get_current_project_name()
repre_docs = get_representations(
@ -85,12 +85,7 @@ class ImportModelRender(InventoryAction):
if scene_type_regex.fullmatch(repre_name):
look_repres.append(repre_doc)
# QUESTION should we care if there is more then one look
# representation? (since it's based on regex match)
look_repre = None
if look_repres:
look_repre = look_repres[0]
look_repre = look_repres[0] if look_repres else None
# QUESTION shouldn't be json representation validated too?
if not look_repre:
print("No model render sets for this model version..")

View file

@ -265,6 +265,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
class MayaUSDReferenceLoader(ReferenceLoader):
"""Reference USD file to native Maya nodes using MayaUSDImport reference"""
label = "Reference Maya USD"
families = ["usd"]
representations = ["usd"]
extensions = {"usd", "usda", "usdc"}

View file

@ -45,11 +45,23 @@ FILE_NODES = {
"PxrTexture": "filename"
}
RENDER_SET_TYPES = [
"VRayDisplacement",
"VRayLightMesh",
"VRayObjectProperties",
"RedshiftObjectId",
"RedshiftMeshParameters",
]
# Keep only node types that actually exist
all_node_types = set(cmds.allNodeTypes())
for node_type in list(FILE_NODES.keys()):
if node_type not in all_node_types:
FILE_NODES.pop(node_type)
for node_type in RENDER_SET_TYPES:
if node_type not in all_node_types:
RENDER_SET_TYPES.remove(node_type)
del all_node_types
# Cache pixar dependency node types so we can perform a type lookup against it
@ -69,9 +81,7 @@ def get_attributes(dictionary, attr, node=None):
else:
val = dictionary.get(attr, [])
if not isinstance(val, list):
return [val]
return val
return val if isinstance(val, list) else [val]
def get_look_attrs(node):
@ -106,7 +116,7 @@ def get_look_attrs(node):
def node_uses_image_sequence(node, node_path):
# type: (str) -> bool
# type: (str, str) -> bool
"""Return whether file node uses an image sequence or single image.
Determine if a node uses an image sequence or just a single image,
@ -114,6 +124,7 @@ def node_uses_image_sequence(node, node_path):
Args:
node (str): Name of the Maya node
node_path (str): The file path of the node
Returns:
bool: True if node uses an image sequence
@ -247,7 +258,7 @@ def get_file_node_files(node):
# For sequences get all files and filter to only existing files
result = []
for index, path in enumerate(paths):
for path in paths:
if node_uses_image_sequence(node, path):
glob_pattern = seq_to_glob(path)
result.extend(glob.glob(glob_pattern))
@ -358,6 +369,7 @@ class CollectLook(pyblish.api.InstancePlugin):
for attr in shader_attrs:
if cmds.attributeQuery(attr, node=look, exists=True):
existing_attrs.append("{}.{}".format(look, attr))
materials = cmds.listConnections(existing_attrs,
source=True,
destination=False) or []
@ -367,30 +379,32 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.debug("Found the following sets:\n{}".format(look_sets))
# Get the entire node chain of the look sets
# history = cmds.listHistory(look_sets, allConnections=True)
history = cmds.listHistory(materials, allConnections=True)
# if materials list is empty, listHistory() will crash with
# RuntimeError
history = set()
if materials:
history = set(
cmds.listHistory(materials, allConnections=True))
# Since we retrieved history only of the connected materials
# connected to the look sets above we now add direct history
# for some of the look sets directly
# handling render attribute sets
render_set_types = [
"VRayDisplacement",
"VRayLightMesh",
"VRayObjectProperties",
"RedshiftObjectId",
"RedshiftMeshParameters",
]
render_sets = cmds.ls(look_sets, type=render_set_types)
if render_sets:
history.extend(
cmds.listHistory(render_sets,
future=False,
pruneDagObjects=True)
or []
)
# Maya (at least 2024) crashes with Warning when render set type
# isn't available. cmds.ls() will return empty list
if RENDER_SET_TYPES:
render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES)
if render_sets:
history.update(
cmds.listHistory(render_sets,
future=False,
pruneDagObjects=True)
or []
)
# Ensure unique entries only
history = list(set(history))
history = list(history)
files = cmds.ls(history,
# It's important only node types are passed that

View file

@ -50,11 +50,11 @@ class ExtractRedshiftProxy(publish.Extractor):
# Padding is taken from number of digits of the end_frame.
# Not sure where Redshift is taking it.
repr_files = [
"{}.{}{}".format(root, str(frame).rjust(4, "0"), ext) # noqa: E501
"{}.{}{}".format(os.path.basename(root), str(frame).rjust(4, "0"), ext) # noqa: E501
for frame in range(
int(start_frame),
int(end_frame) + 1,
int(instance.data["step"]),
int(instance.data["step"])
)]
# vertex_colors = instance.data.get("vertexColors", False)

View file

@ -3,60 +3,76 @@ from maya import cmds
import pyblish.api
from openpype.pipeline.publish import (
ValidateContentsOrder,
RepairContextAction,
PublishValidationError
)
class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin):
class ValidateLookDefaultShadersConnections(pyblish.api.ContextPlugin):
"""Validate default shaders in the scene have their default connections.
For example the lambert1 could potentially be disconnected from the
initialShadingGroup. As such it's not lambert1 that will be identified
as the default shader which can have unpredictable results.
For example the standardSurface1 or lambert1 (maya 2023 and before) could
potentially be disconnected from the initialShadingGroup. As such it's not
lambert1 that will be identified as the default shader which can have
unpredictable results.
To fix the default connections need to be made again. See the logs for
more details on which connections are missing.
"""
order = ValidateContentsOrder
order = pyblish.api.ValidatorOrder - 0.4999
families = ['look']
hosts = ['maya']
label = 'Look Default Shader Connections'
actions = [RepairContextAction]
# The default connections to check
DEFAULTS = [("initialShadingGroup.surfaceShader", "lambert1"),
("initialParticleSE.surfaceShader", "lambert1"),
("initialParticleSE.volumeShader", "particleCloud1")
]
DEFAULTS = {
"initialShadingGroup.surfaceShader": ["standardSurface1.outColor",
"lambert1.outColor"],
"initialParticleSE.surfaceShader": ["standardSurface1.outColor",
"lambert1.outColor"],
"initialParticleSE.volumeShader": ["particleCloud1.outColor"]
}
def process(self, instance):
def process(self, context):
# Ensure check is run only once. We don't use ContextPlugin because
# of a bug where the ContextPlugin will always be visible. Even when
# the family is not present in an instance.
key = "__validate_look_default_shaders_connections_checked"
context = instance.context
is_run = context.data.get(key, False)
if is_run:
return
else:
context.data[key] = True
if self.get_invalid():
raise PublishValidationError(
"Default shaders in your scene do not have their "
"default shader connections. Please repair them to continue."
)
@classmethod
def get_invalid(cls):
# Process as usual
invalid = list()
for plug, input_node in self.DEFAULTS:
for plug, valid_inputs in cls.DEFAULTS.items():
inputs = cmds.listConnections(plug,
source=True,
destination=False) or None
if not inputs or inputs[0] != input_node:
self.log.error("{0} is not connected to {1}. "
"This can result in unexpected behavior. "
"Please reconnect to continue.".format(
plug,
input_node))
destination=False,
plugs=True) or None
if not inputs or inputs[0] not in valid_inputs:
cls.log.error(
"{0} is not connected to {1}. This can result in "
"unexpected behavior. Please reconnect to continue."
"".format(plug, " or ".join(valid_inputs))
)
invalid.append(plug)
if invalid:
raise PublishValidationError("Invalid connections.")
return invalid
@classmethod
def repair(cls, context):
invalid = cls.get_invalid()
for plug in invalid:
valid_inputs = cls.DEFAULTS[plug]
for valid_input in valid_inputs:
if cmds.objExists(valid_input):
cls.log.info(
"Connecting {} -> {}".format(valid_input, plug)
)
cmds.connectAttr(valid_input, plug, force=True)
break

View file

@ -298,7 +298,7 @@ def create_timeline_item(
if source_end:
clip_data["endFrame"] = source_end
if timecode_in:
clip_data["recordFrame"] = timecode_in
clip_data["recordFrame"] = timeline_in
# add to timeline
media_pool.AppendToTimeline([clip_data])

View file

@ -406,26 +406,42 @@ class ClipLoader:
self.active_bin
)
_clip_property = media_pool_item.GetClipProperty
source_in = int(_clip_property("Start"))
source_out = int(_clip_property("End"))
source_duration = int(_clip_property("Frames"))
# get handles
handle_start = self.data["versionData"].get("handleStart")
handle_end = self.data["versionData"].get("handleEnd")
if handle_start is None:
handle_start = int(self.data["assetData"]["handleStart"])
if handle_end is None:
handle_end = int(self.data["assetData"]["handleEnd"])
if not self.with_handles:
# Load file without the handles of the source media
# We remove the handles from the source in and source out
# so that the handles are excluded in the timeline
handle_start = 0
handle_end = 0
# check frame duration from versionData or assetData
frame_start = self.data["versionData"].get("frameStart")
if frame_start is None:
frame_start = self.data["assetData"]["frameStart"]
# get version data frame data from db
version_data = self.data["versionData"]
frame_start = version_data.get("frameStart")
frame_end = version_data.get("frameEnd")
# check frame duration from versionData or assetData
frame_end = self.data["versionData"].get("frameEnd")
if frame_end is None:
frame_end = self.data["assetData"]["frameEnd"]
db_frame_duration = int(frame_end) - int(frame_start) + 1
# The version data usually stored the frame range + handles of the
# media however certain representations may be shorter because they
# exclude those handles intentionally. Unfortunately the
# representation does not store that in the database currently;
# so we should compensate for those cases. If the media is shorter
# than the frame range specified in the database we assume it is
# without handles and thus we do not need to remove the handles
# from source and out
if frame_start is not None and frame_end is not None:
# Version has frame range data, so we can compare media length
handle_start = version_data.get("handleStart", 0)
handle_end = version_data.get("handleEnd", 0)
frame_start_handle = frame_start - handle_start
frame_end_handle = frame_start + handle_end
database_frame_duration = int(
frame_end_handle - frame_start_handle + 1
)
if source_duration >= database_frame_duration:
source_in += handle_start
source_out -= handle_end
# get timeline in
timeline_start = self.active_timeline.GetStartFrame()
@ -437,24 +453,6 @@ class ClipLoader:
timeline_in = int(
timeline_start + self.data["assetData"]["clipIn"])
source_in = int(_clip_property("Start"))
source_out = int(_clip_property("End"))
source_duration = int(_clip_property("Frames"))
# check if source duration is shorter than db frame duration
source_with_handles = True
if source_duration < db_frame_duration:
source_with_handles = False
# only exclude handles if source has no handles or
# if user wants to load without handles
if (
not self.with_handles
or not source_with_handles
):
source_in += handle_start
source_out -= handle_end
# make track item from source in bin as item
timeline_item = lib.create_timeline_item(
media_pool_item,
@ -868,7 +866,7 @@ class PublishClip:
def _convert_to_entity(self, key):
""" Converting input key to key with type. """
# convert to entity type
entity_type = self.types.get(key, None)
entity_type = self.types.get(key)
assert entity_type, "Missing entity type for `{}`".format(
key

View file

@ -7,6 +7,8 @@ from openpype.pipeline.plugin_discover import (
deregister_plugin_path
)
from .load.utils import get_representation_path_from_context
class LauncherAction(object):
"""A custom action available"""
@ -100,6 +102,10 @@ class InventoryAction(object):
"""
return True
@classmethod
def filepath_from_context(cls, context):
return get_representation_path_from_context(context)
# Launcher action
def discover_launcher_actions():