Merge remote-tracking branch 'origin/develop' into develop

This commit is contained in:
Milan Kolar 2022-06-22 11:06:52 +02:00
commit e4b2fed408
13 changed files with 233 additions and 165 deletions

View file

@ -3,6 +3,7 @@ from __future__ import absolute_import
import pyblish.api
from openpype.client import get_asset_by_name
from openpype.pipeline import legacy_io
from openpype.api import get_errored_instances_from_context
@ -74,12 +75,21 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
from . import lib
asset = instance.data['asset']
asset_id = legacy_io.find_one(
{"name": asset, "type": "asset"},
projection={"_id": True}
)['_id']
for node, _id in lib.generate_ids(nodes, asset_id=asset_id):
# Expecting this is called on validators in which case 'assetEntity'
# should be always available, but kept a way to query it by name.
asset_doc = instance.data.get("assetEntity")
if not asset_doc:
asset_name = instance.data["asset"]
project_name = legacy_io.active_project()
self.log.info((
"Asset is not stored on instance."
" Querying by name \"{}\" from project \"{}\""
).format(asset_name, project_name))
asset_doc = get_asset_by_name(
project_name, asset_name, fields=["_id"]
)
for node, _id in lib.generate_ids(nodes, asset_id=asset_doc["_id"]):
lib.set_id(node, _id, overwrite=True)

View file

@ -2,6 +2,7 @@
"""OpenPype script commands to be used directly in Maya."""
from maya import cmds
from openpype.client import get_asset_by_name, get_project
from openpype.pipeline import legacy_io
@ -79,8 +80,9 @@ def reset_frame_range():
cmds.currentUnit(time=fps)
# Set frame start/end
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
asset = legacy_io.find_one({"name": asset_name, "type": "asset"})
asset = get_asset_by_name(project_name, asset_name)
frame_start = asset["data"].get("frameStart")
frame_end = asset["data"].get("frameEnd")
@ -145,8 +147,9 @@ def reset_resolution():
resolution_height = 1080
# Get resolution from asset
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
asset_doc = legacy_io.find_one({"name": asset_name, "type": "asset"})
asset_doc = get_asset_by_name(project_name, asset_name)
resolution = _resolution_from_document(asset_doc)
# Try get resolution from project
if resolution is None:
@ -155,7 +158,7 @@ def reset_resolution():
"Asset \"{}\" does not have set resolution."
" Trying to get resolution from project"
).format(asset_name))
project_doc = legacy_io.find_one({"type": "project"})
project_doc = get_project(project_name)
resolution = _resolution_from_document(project_doc)
if resolution is None:

View file

@ -12,11 +12,17 @@ import contextlib
from collections import OrderedDict, defaultdict
from math import ceil
from six import string_types
import bson
from maya import cmds, mel
import maya.api.OpenMaya as om
from openpype.client import (
get_project,
get_asset_by_name,
get_subsets,
get_last_versions,
get_representation_by_name
)
from openpype import lib
from openpype.api import get_anatomy_settings
from openpype.pipeline import (
@ -1387,15 +1393,11 @@ def generate_ids(nodes, asset_id=None):
if asset_id is None:
# Get the asset ID from the database for the asset of current context
asset_data = legacy_io.find_one(
{
"type": "asset",
"name": legacy_io.Session["AVALON_ASSET"]
},
projection={"_id": True}
)
assert asset_data, "No current asset found in Session"
asset_id = asset_data['_id']
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
assert asset_doc, "No current asset found in Session"
asset_id = asset_doc['_id']
node_ids = []
for node in nodes:
@ -1548,13 +1550,15 @@ def list_looks(asset_id):
# # get all subsets with look leading in
# the name associated with the asset
subset = legacy_io.find({
"parent": bson.ObjectId(asset_id),
"type": "subset",
"name": {"$regex": "look*"}
})
return list(subset)
# TODO this should probably look for family 'look' instead of checking
# subset name that can not start with family
project_name = legacy_io.active_project()
subset_docs = get_subsets(project_name, asset_ids=[asset_id])
return [
subset_doc
for subset_doc in subset_docs
if subset_doc["name"].startswith("look")
]
def assign_look_by_version(nodes, version_id):
@ -1570,18 +1574,15 @@ def assign_look_by_version(nodes, version_id):
None
"""
# Get representations of shader file and relationships
look_representation = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": "ma"
})
project_name = legacy_io.active_project()
json_representation = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": "json"
})
# Get representations of shader file and relationships
look_representation = get_representation_by_name(
project_name, "ma", version_id
)
json_representation = get_representation_by_name(
project_name, "json", version_id
)
# See if representation is already loaded, if so reuse it.
host = registered_host()
@ -1639,42 +1640,54 @@ def assign_look(nodes, subset="lookDefault"):
parts = pype_id.split(":", 1)
grouped[parts[0]].append(node)
project_name = legacy_io.active_project()
subset_docs = get_subsets(
project_name, subset_names=[subset], asset_ids=grouped.keys()
)
subset_docs_by_asset_id = {
str(subset_doc["parent"]): subset_doc
for subset_doc in subset_docs
}
subset_ids = {
subset_doc["_id"]
for subset_doc in subset_docs_by_asset_id.values()
}
last_version_docs = get_last_versions(
project_name,
subset_ids=subset_ids,
fields=["_id", "name", "data.families"]
)
last_version_docs_by_subset_id = {
last_version_doc["parent"]: last_version_doc
for last_version_doc in last_version_docs
}
for asset_id, asset_nodes in grouped.items():
# create objectId for database
try:
asset_id = bson.ObjectId(asset_id)
except bson.errors.InvalidId:
log.warning("Asset ID is not compatible with bson")
continue
subset_data = legacy_io.find_one({
"type": "subset",
"name": subset,
"parent": asset_id
})
if not subset_data:
subset_doc = subset_docs_by_asset_id.get(asset_id)
if not subset_doc:
log.warning("No subset '{}' found for {}".format(subset, asset_id))
continue
# get last version
# with backwards compatibility
version = legacy_io.find_one(
{
"parent": subset_data['_id'],
"type": "version",
"data.families": {"$in": ["look"]}
},
sort=[("name", -1)],
projection={
"_id": True,
"name": True
}
)
last_version = last_version_docs_by_subset_id.get(subset_doc["_id"])
if not last_version:
log.warning((
"Not found last version for subset '{}' on asset with id {}"
).format(subset, asset_id))
continue
log.debug("Assigning look '{}' <v{:03d}>".format(subset,
version["name"]))
families = last_version.get("data", {}).get("families") or []
if "look" not in families:
log.warning((
"Last version for subset '{}' on asset with id {}"
" does not have look family"
).format(subset, asset_id))
continue
assign_look_by_version(asset_nodes, version['_id'])
log.debug("Assigning look '{}' <v{:03d}>".format(
subset, last_version["name"]))
assign_look_by_version(asset_nodes, last_version["_id"])
def apply_shaders(relationships, shadernodes, nodes):
@ -2158,7 +2171,8 @@ def reset_scene_resolution():
None
"""
project_doc = legacy_io.find_one({"type": "project"})
project_name = legacy_io.active_project()
project_doc = get_project(project_name)
project_data = project_doc["data"]
asset_data = lib.get_asset()["data"]
@ -2191,7 +2205,8 @@ def set_context_settings():
"""
# Todo (Wijnand): apply renderer and resolution of project
project_doc = legacy_io.find_one({"type": "project"})
project_name = legacy_io.active_project()
project_doc = get_project(project_name)
project_data = project_doc["data"]
asset_data = lib.get_asset()["data"]

View file

@ -6,10 +6,16 @@ import contextlib
import copy
import six
from bson.objectid import ObjectId
from maya import cmds
from openpype.client import (
get_version_by_name,
get_last_version_by_subset_id,
get_representation_by_id,
get_representation_by_name,
get_representation_parents,
)
from openpype.pipeline import (
schema,
legacy_io,
@ -283,36 +289,35 @@ def update_package_version(container, version):
"""
# Versioning (from `core.maya.pipeline`)
current_representation = legacy_io.find_one({
"_id": ObjectId(container["representation"])
})
project_name = legacy_io.active_project()
current_representation = get_representation_by_id(
project_name, container["representation"]
)
assert current_representation is not None, "This is a bug"
version_, subset, asset, project = legacy_io.parenthood(
current_representation
repre_parents = get_representation_parents(
project_name, current_representation
)
version_doc = subset_doc = asset_doc = project_doc = None
if repre_parents:
version_doc, subset_doc, asset_doc, project_doc = repre_parents
if version == -1:
new_version = legacy_io.find_one({
"type": "version",
"parent": subset["_id"]
}, sort=[("name", -1)])
new_version = get_last_version_by_subset_id(
project_name, subset_doc["_id"]
)
else:
new_version = legacy_io.find_one({
"type": "version",
"parent": subset["_id"],
"name": version,
})
new_version = get_version_by_name(
project_name, version, subset_doc["_id"]
)
assert new_version is not None, "This is a bug"
# Get the new representation (new file)
new_representation = legacy_io.find_one({
"type": "representation",
"parent": new_version["_id"],
"name": current_representation["name"]
})
new_representation = get_representation_by_name(
project_name, current_representation["name"], new_version["_id"]
)
update_package(container, new_representation)
@ -330,10 +335,10 @@ def update_package(set_container, representation):
"""
# Load the original package data
current_representation = legacy_io.find_one({
"_id": ObjectId(set_container['representation']),
"type": "representation"
})
project_name = legacy_io.active_project()
current_representation = get_representation_by_id(
project_name, set_container["representation"]
)
current_file = get_representation_path(current_representation)
assert current_file.endswith(".json")
@ -380,6 +385,7 @@ def update_scene(set_container, containers, current_data, new_data, new_file):
from openpype.hosts.maya.lib import DEFAULT_MATRIX, get_container_transforms
set_namespace = set_container['namespace']
project_name = legacy_io.active_project()
# Update the setdress hierarchy alembic
set_root = get_container_transforms(set_container, root=True)
@ -481,12 +487,12 @@ def update_scene(set_container, containers, current_data, new_data, new_file):
# Check whether the conversion can be done by the Loader.
# They *must* use the same asset, subset and Loader for
# `update_container` to make sense.
old = legacy_io.find_one({
"_id": ObjectId(representation_current)
})
new = legacy_io.find_one({
"_id": ObjectId(representation_new)
})
old = get_representation_by_id(
project_name, representation_current
)
new = get_representation_by_id(
project_name, representation_new
)
is_valid = compare_representations(old=old, new=new)
if not is_valid:
log.error("Skipping: %s. See log for details.",

View file

@ -1,6 +1,10 @@
import re
import json
from bson.objectid import ObjectId
from openpype.client import (
get_representation_by_id,
get_representations
)
from openpype.pipeline import (
InventoryAction,
get_representation_context,
@ -31,6 +35,7 @@ class ImportModelRender(InventoryAction):
def process(self, containers):
from maya import cmds
project_name = legacy_io.active_project()
for container in containers:
con_name = container["objectName"]
nodes = []
@ -40,9 +45,9 @@ class ImportModelRender(InventoryAction):
else:
nodes.append(n)
repr_doc = legacy_io.find_one({
"_id": ObjectId(container["representation"]),
})
repr_doc = get_representation_by_id(
project_name, container["representation"], fields=["parent"]
)
version_id = repr_doc["parent"]
print("Importing render sets for model %r" % con_name)
@ -63,26 +68,38 @@ class ImportModelRender(InventoryAction):
from maya import cmds
project_name = legacy_io.active_project()
repre_docs = get_representations(
project_name, version_ids=[version_id], fields=["_id", "name"]
)
# Get representations of shader file and relationships
look_repr = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": {"$regex": self.scene_type_regex},
})
if not look_repr:
json_repre = None
look_repres = []
scene_type_regex = re.compile(self.scene_type_regex)
for repre_doc in repre_docs:
repre_name = repre_doc["name"]
if repre_name == self.look_data_type:
json_repre = repre_doc
continue
if scene_type_regex.fullmatch(repre_name):
look_repres.append(repre_doc)
# QUESTION should we care if there is more then one look
# representation? (since it's based on regex match)
look_repre = None
if look_repres:
look_repre = look_repres[0]
# QUESTION shouldn't be json representation validated too?
if not look_repre:
print("No model render sets for this model version..")
return
json_repr = legacy_io.find_one({
"type": "representation",
"parent": version_id,
"name": self.look_data_type,
})
context = get_representation_context(look_repr["_id"])
context = get_representation_context(look_repre["_id"])
maya_file = self.filepath_from_context(context)
context = get_representation_context(json_repr["_id"])
context = get_representation_context(json_repre["_id"])
json_file = self.filepath_from_context(context)
# Import the look file

View file

@ -1,5 +1,10 @@
from maya import cmds, mel
from openpype.client import (
get_asset_by_id,
get_subset_by_id,
get_version_by_id,
)
from openpype.pipeline import (
legacy_io,
load,
@ -65,9 +70,16 @@ class AudioLoader(load.LoaderPlugin):
)
# Set frame range.
version = legacy_io.find_one({"_id": representation["parent"]})
subset = legacy_io.find_one({"_id": version["parent"]})
asset = legacy_io.find_one({"_id": subset["parent"]})
project_name = legacy_io.active_project()
version = get_version_by_id(
project_name, representation["parent"], fields=["parent"]
)
subset = get_subset_by_id(
project_name, version["parent"], fields=["parent"]
)
asset = get_asset_by_id(
project_name, subset["parent"], fields=["parent"]
)
audio_node.sourceStart.set(1 - asset["data"]["frameStart"])
audio_node.sourceEnd.set(asset["data"]["frameEnd"])

View file

@ -1,5 +1,10 @@
from Qt import QtWidgets, QtCore
from openpype.client import (
get_asset_by_id,
get_subset_by_id,
get_version_by_id,
)
from openpype.pipeline import (
legacy_io,
load,
@ -216,9 +221,16 @@ class ImagePlaneLoader(load.LoaderPlugin):
)
# Set frame range.
version = legacy_io.find_one({"_id": representation["parent"]})
subset = legacy_io.find_one({"_id": version["parent"]})
asset = legacy_io.find_one({"_id": subset["parent"]})
project_name = legacy_io.active_project()
version = get_version_by_id(
project_name, representation["parent"], fields=["parent"]
)
subset = get_subset_by_id(
project_name, version["parent"], fields=["parent"]
)
asset = get_asset_by_id(
project_name, subset["parent"], fields=["parent"]
)
start_frame = asset["data"]["frameStart"]
end_frame = asset["data"]["frameEnd"]
image_plane_shape.frameOffset.set(1 - start_frame)

View file

@ -5,6 +5,7 @@ from collections import defaultdict
from Qt import QtWidgets
from openpype.client import get_representation_by_name
from openpype.pipeline import (
legacy_io,
get_representation_path,
@ -75,11 +76,10 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
shader_nodes = cmds.ls(members, type='shadingEngine')
nodes = set(self._get_nodes_with_shader(shader_nodes))
json_representation = legacy_io.find_one({
"type": "representation",
"parent": representation['parent'],
"name": "json"
})
project_name = legacy_io.active_project()
json_representation = get_representation_by_name(
project_name, "json", representation["parent"]
)
# Load relationships
shader_relation = get_representation_path(json_representation)

View file

@ -7,10 +7,9 @@ loader will use them instead of native vray vrmesh format.
"""
import os
from bson.objectid import ObjectId
import maya.cmds as cmds
from openpype.client import get_representation_by_name
from openpype.api import get_project_settings
from openpype.pipeline import (
legacy_io,
@ -185,12 +184,8 @@ class VRayProxyLoader(load.LoaderPlugin):
"""
self.log.debug(
"Looking for abc in published representations of this version.")
abc_rep = legacy_io.find_one({
"type": "representation",
"parent": ObjectId(version_id),
"name": "abc"
})
project_name = legacy_io.active_project()
abc_rep = get_representation_by_name(project_name, "abc", version_id)
if abc_rep:
self.log.debug("Found, we'll link alembic to vray proxy.")
file_name = get_representation_path(abc_rep)

View file

@ -3,6 +3,7 @@ import pymel.core as pm
import pyblish.api
from openpype.client import get_subset_by_name
from openpype.pipeline import legacy_io
@ -78,11 +79,15 @@ class CollectReview(pyblish.api.InstancePlugin):
self.log.debug('isntance data {}'.format(instance.data))
else:
legacy_subset_name = task + 'Review'
asset_doc_id = instance.context.data['assetEntity']["_id"]
subsets = legacy_io.find({"type": "subset",
"name": legacy_subset_name,
"parent": asset_doc_id}).distinct("_id")
if len(list(subsets)) > 0:
asset_doc = instance.context.data['assetEntity']
project_name = legacy_io.active_project()
subset_doc = get_subset_by_name(
project_name,
legacy_subset_name,
asset_doc["_id"],
fields=["_id"]
)
if subset_doc:
self.log.debug("Existing subsets found, keep legacy name.")
instance.data['subset'] = legacy_subset_name

View file

@ -1,6 +1,7 @@
import pyblish.api
import openpype.api
from openpype.client import get_assets
from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action
from openpype.hosts.maya.api import lib
@ -42,8 +43,12 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
nodes=instance[:])
# check ids against database ids
db_asset_ids = legacy_io.find({"type": "asset"}).distinct("_id")
db_asset_ids = set(str(i) for i in db_asset_ids)
project_name = legacy_io.active_project()
asset_docs = get_assets(project_name, fields=["_id"])
db_asset_ids = {
str(asset_doc["_id"])
for asset_doc in asset_docs
}
# Get all asset IDs
for node in id_required_nodes:

View file

@ -1,7 +1,6 @@
import pyblish.api
import openpype.api
from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action
from openpype.hosts.maya.api import lib
@ -36,15 +35,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
"""Return the member nodes that are invalid"""
invalid = list()
asset = instance.data['asset']
asset_data = legacy_io.find_one(
{
"name": asset,
"type": "asset"
},
projection={"_id": True}
)
asset_id = str(asset_data['_id'])
asset_id = str(instance.data['assetEntity']["_id"])
# We do want to check the referenced nodes as we it might be
# part of the end product

View file

@ -1,8 +1,8 @@
import pyblish.api
from openpype.client import get_subset_by_name
import openpype.hosts.maya.api.action
from openpype.pipeline import legacy_io
import openpype.api
class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
@ -33,26 +33,23 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
raise RuntimeError("Found unregistered subsets: {}".format(invalid))
def get_invalid(self, instance):
invalid = []
asset_name = instance.data["asset"]
project_name = legacy_io.active_project()
asset_doc = instance.data["assetEntity"]
render_passses = instance.data.get("renderPasses", [])
for render_pass in render_passses:
is_valid = self.validate_subset_registered(asset_name, render_pass)
is_valid = self.validate_subset_registered(
project_name, asset_doc, render_pass
)
if not is_valid:
invalid.append(render_pass)
return invalid
def validate_subset_registered(self, asset_name, subset_name):
def validate_subset_registered(self, project_name, asset_doc, subset_name):
"""Check if subset is registered in the database under the asset"""
asset = legacy_io.find_one({"type": "asset", "name": asset_name})
is_valid = legacy_io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
return is_valid
return get_subset_by_name(
project_name, subset_name, asset_doc["_id"], fields=["_id"]
)