Merge branch 'develop' into bugfix/AY-3649_Fix-Redshift-cryptomatte-multipartEXR

This commit is contained in:
Toke Jepsen 2024-04-12 11:41:15 +01:00 committed by GitHub
commit 10c94754bf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 102 additions and 43 deletions

View file

@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
asset_group.empty_display_type = 'SINGLE_ARROW'
avalon_container.objects.link(asset_group)
self._process(libpath, asset, asset_group, None)
self._process(libpath, asset_name, asset_group, None)
bpy.context.scene.collection.objects.link(asset_group)

View file

@ -92,10 +92,6 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
folder_path, folder_name = self._get_folder_data(tag_data)
product_name = tag_data.get("productName")
if product_name is None:
product_name = tag_data["subset"]
families = [str(f) for f in tag_data["families"]]
# TODO: remove backward compatibility
@ -293,7 +289,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
label += " {}".format(product_name)
data.update({
"name": "{}_{}".format(folder_path, subset),
"name": "{}_{}".format(folder_path, product_name),
"label": label,
"productName": product_name,
"productType": product_type,

View file

@ -1,9 +1,21 @@
from collections import deque
import pyblish.api
from ayon_core.pipeline import registered_host
def collect_input_containers(nodes):
def get_container_members(container):
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
return members
def collect_input_containers(containers, nodes):
"""Collect containers that contain any of the node in `nodes`.
This will return any loaded Avalon container that contains at least one of
@ -11,30 +23,13 @@ def collect_input_containers(nodes):
there are member nodes of that container.
Returns:
list: Input avalon containers
list: Loaded containers that contain the `nodes`
"""
# Lookup by node ids
lookup = frozenset(nodes)
containers = []
host = registered_host()
for container in host.ls():
node = container["node"]
# Usually the loaded containers don't have any complex references
# and the contained children should be all we need. So we disregard
# checking for .references() on the nodes.
members = set(node.allSubChildren())
members.add(node) # include the node itself
# If there's an intersection
if not lookup.isdisjoint(members):
containers.append(container)
return containers
# Assume the containers have collected their cached '_members' data
# in the collector.
return [container for container in containers
if any(node in container["_members"] for node in nodes)]
def iter_upstream(node):
@ -54,7 +49,7 @@ def iter_upstream(node):
)
# Initialize process queue with the node's ancestors itself
queue = list(upstream)
queue = deque(upstream)
collected = set(upstream)
# Traverse upstream references for all nodes and yield them as we
@ -72,6 +67,10 @@ def iter_upstream(node):
# Include the references' ancestors that have not been collected yet.
for reference in references:
if reference in collected:
# Might have been collected in previous iteration
continue
ancestors = reference.inputAncestors(
include_ref_inputs=True, follow_subnets=True
)
@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin):
)
return
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# For large scenes the querying of "host.ls()" can be relatively slow
# e.g. up to a second. Many instances calling it easily slows this
# down. As such, we cache it so we trigger it only once.
# todo: Instead of hidden cache make "CollectContainers" plug-in
cache_key = "__cache_containers"
scene_containers = instance.context.data.get(cache_key, None)
if scene_containers is None:
# Query the scenes' containers if there's no cache yet
host = registered_host()
scene_containers = list(host.ls())
for container in scene_containers:
# Embed the members into the container dictionary
container_members = set(get_container_members(container))
container["_members"] = container_members
instance.context.data[cache_key] = scene_containers
# Collect containers for the given set of nodes
containers = collect_input_containers(nodes)
inputs = []
if scene_containers:
# Collect all upstream parents
nodes = list(iter_upstream(output))
nodes.append(output)
# Collect containers for the given set of nodes
containers = collect_input_containers(scene_containers, nodes)
inputs = [c["representation"] for c in containers]
inputs = [c["representation"] for c in containers]
instance.data["inputRepresentations"] = inputs
self.log.debug("Collected inputs: %s" % inputs)

View file

@ -1917,6 +1917,29 @@ def apply_attributes(attributes, nodes_by_id):
set_attribute(attr, value, node)
def is_valid_reference_node(reference_node):
"""Return whether Maya considers the reference node a valid reference.
Maya might report an error when using `maya.cmds.referenceQuery`:
Reference node 'reference_node' is not associated with a reference file.
Note that this does *not* check whether the reference node points to an
existing file. Instead it only returns whether maya considers it valid
and thus is not an unassociated reference node
Arguments:
reference_node (str): Reference node name
Returns:
bool: Whether reference node is a valid reference
"""
sel = OpenMaya.MSelectionList()
sel.add(reference_node)
depend_node = sel.getDependNode(0)
return OpenMaya.MFnReference(depend_node).isValidReference()
def get_container_members(container):
"""Returns the members of a container.
This includes the nodes from any loaded references in the container.
@ -1942,7 +1965,16 @@ def get_container_members(container):
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
continue
reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True)
try:
reference_members = cmds.referenceQuery(ref,
nodes=True,
dagPath=True)
except RuntimeError:
# Ignore reference nodes that are not associated with a
# referenced file on which `referenceQuery` command fails
if not is_valid_reference_node(ref):
continue
raise
reference_members = cmds.ls(reference_members,
long=True,
objectsOnly=True)
@ -4238,6 +4270,9 @@ def get_reference_node(members, log=None):
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
continue
if not is_valid_reference_node(ref):
continue
references.add(ref)
assert references, "No reference node found in container"
@ -4268,15 +4303,19 @@ def get_reference_node_parents(ref):
list: The upstream parent reference nodes.
"""
parent = cmds.referenceQuery(ref,
referenceNode=True,
parent=True)
def _get_parent(reference_node):
"""Return parent reference node, but ignore invalid reference nodes"""
if not is_valid_reference_node(reference_node):
return
return cmds.referenceQuery(reference_node,
referenceNode=True,
parent=True)
parent = _get_parent(ref)
parents = []
while parent:
parents.append(parent)
parent = cmds.referenceQuery(parent,
referenceNode=True,
parent=True)
parent = _get_parent(parent)
return parents

View file

@ -299,4 +299,10 @@ def transfer_image_planes(source_cameras, target_cameras,
def _attach_image_plane(camera, image_plane):
cmds.imagePlane(image_plane, edit=True, detach=True)
# Attaching to a camera resets it to identity size, so we counter that
size_x = cmds.getAttr(f"{image_plane}.sizeX")
size_y = cmds.getAttr(f"{image_plane}.sizeY")
cmds.imagePlane(image_plane, edit=True, camera=camera)
cmds.setAttr(f"{image_plane}.sizeX", size_x)
cmds.setAttr(f"{image_plane}.sizeY", size_y)