Merge pull request #3459 from pypeclub/feature/OP-3515_Use-query-functions-in-global-plugins

General: Use query functions in global plugins
This commit is contained in:
Jakub Trllo 2022-07-08 12:47:20 +02:00 committed by GitHub
commit eb26078731
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 330 additions and 217 deletions

View file

@ -29,6 +29,7 @@ from .entities import (
get_representations,
get_representation_parents,
get_representations_parents,
get_archived_representations,
get_thumbnail,
get_thumbnails,
@ -66,6 +67,7 @@ __all__ = (
"get_representations",
"get_representation_parents",
"get_representations_parents",
"get_archived_representations",
"get_thumbnail",
"get_thumbnails",

View file

@ -384,6 +384,7 @@ def get_subsets(
subset_ids=None,
subset_names=None,
asset_ids=None,
names_by_asset_ids=None,
archived=False,
fields=None
):
@ -399,6 +400,9 @@ def get_subsets(
Filter ignored if 'None' is passed.
asset_ids (list[str|ObjectId]): Asset ids under which should look for
the subsets. Filter ignored if 'None' is passed.
names_by_asset_ids (dict[ObjectId, list[str]]): Complex filtering
using asset ids and list of subset names under the asset.
archived (bool): Look for archived subsets too.
fields (list[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
@ -432,6 +436,18 @@ def get_subsets(
return []
query_filter["name"] = {"$in": list(subset_names)}
if names_by_asset_ids is not None:
or_query = []
for asset_id, names in names_by_asset_ids.items():
if asset_id and names:
or_query.append({
"parent": _convert_id(asset_id),
"name": {"$in": list(names)}
})
if not or_query:
return []
query_filter["$or"] = or_query
conn = _get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields))
@ -742,7 +758,10 @@ def get_last_versions(project_name, subset_ids, fields=None):
"""Latest versions for entered subset_ids.
Args:
project_name (str): Name of project where to look for queried entities.
subset_ids (list): List of subset ids.
fields (list[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
Returns:
dict[ObjectId, int]: Key is subset id and value is last version name.
@ -752,7 +771,34 @@ def get_last_versions(project_name, subset_ids, fields=None):
if not subset_ids:
return {}
_pipeline = [
if fields is not None:
fields = list(fields)
if not fields:
return {}
# Avoid double query if only name and _id are requested
name_needed = False
limit_query = False
if fields:
fields_s = set(fields)
if "name" in fields_s:
name_needed = True
fields_s.remove("name")
for field in ("_id", "parent"):
if field in fields_s:
fields_s.remove(field)
limit_query = len(fields_s) == 0
group_item = {
"_id": "$parent",
"_version_id": {"$last": "$_id"}
}
# Add name if name is needed (only for limit query)
if name_needed:
group_item["name"] = {"$last": "$name"}
aggregation_pipeline = [
# Find all versions of those subsets
{"$match": {
"type": "version",
@ -761,16 +807,24 @@ def get_last_versions(project_name, subset_ids, fields=None):
# Sorting versions all together
{"$sort": {"name": 1}},
# Group them by "parent", but only take the last
{"$group": {
"_id": "$parent",
"_version_id": {"$last": "$_id"}
}}
{"$group": group_item}
]
conn = _get_project_connection(project_name)
aggregate_result = conn.aggregate(aggregation_pipeline)
if limit_query:
output = {}
for item in aggregate_result:
subset_id = item["_id"]
item_data = {"_id": item["_version_id"], "parent": subset_id}
if name_needed:
item_data["name"] = item["name"]
output[subset_id] = item_data
return output
version_ids = [
doc["_version_id"]
for doc in conn.aggregate(_pipeline)
for doc in aggregate_result
]
fields = _prepare_fields(fields, ["parent"])
@ -867,7 +921,7 @@ def get_representation_by_id(project_name, representation_id, fields=None):
if not representation_id:
return None
repre_types = ["representation", "archived_representations"]
repre_types = ["representation", "archived_representation"]
query_filter = {
"type": {"$in": repre_types}
}
@ -911,43 +965,26 @@ def get_representation_by_name(
return conn.find_one(query_filter, _prepare_fields(fields))
def get_representations(
def _get_representations(
project_name,
representation_ids=None,
representation_names=None,
version_ids=None,
extensions=None,
names_by_version_ids=None,
archived=False,
fields=None
representation_ids,
representation_names,
version_ids,
extensions,
names_by_version_ids,
standard,
archived,
fields
):
"""Representaion entities data from one project filtered by filters.
Filters are additive (all conditions must pass to return subset).
Args:
project_name (str): Name of project where to look for queried entities.
representation_ids (list[str|ObjectId]): Representation ids used as
filter. Filter ignored if 'None' is passed.
representation_names (list[str]): Representations names used as filter.
Filter ignored if 'None' is passed.
version_ids (list[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
extensions (list[str]): Filter by extension of main representation
file (without dot).
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
archived (bool): Output will also contain archived representations.
fields (list[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching representations.
"""
repre_types = ["representation"]
repre_types = []
if standard:
repre_types.append("representation")
if archived:
repre_types.append("archived_representations")
repre_types.append("archived_representation")
if not repre_types:
return []
if len(repre_types) == 1:
query_filter = {"type": repre_types[0]}
else:
@ -992,6 +1029,99 @@ def get_representations(
return conn.find(query_filter, _prepare_fields(fields))
def get_representations(
project_name,
representation_ids=None,
representation_names=None,
version_ids=None,
extensions=None,
names_by_version_ids=None,
archived=False,
standard=True,
fields=None
):
"""Representaion entities data from one project filtered by filters.
Filters are additive (all conditions must pass to return subset).
Args:
project_name (str): Name of project where to look for queried entities.
representation_ids (list[str|ObjectId]): Representation ids used as
filter. Filter ignored if 'None' is passed.
representation_names (list[str]): Representations names used as filter.
Filter ignored if 'None' is passed.
version_ids (list[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
extensions (list[str]): Filter by extension of main representation
file (without dot).
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
archived (bool): Output will also contain archived representations.
fields (list[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching representations.
"""
return _get_representations(
project_name=project_name,
representation_ids=representation_ids,
representation_names=representation_names,
version_ids=version_ids,
extensions=extensions,
names_by_version_ids=names_by_version_ids,
standard=True,
archived=archived,
fields=fields
)
def get_archived_representations(
project_name,
representation_ids=None,
representation_names=None,
version_ids=None,
extensions=None,
names_by_version_ids=None,
fields=None
):
"""Archived representaion entities data from project with applied filters.
Filters are additive (all conditions must pass to return subset).
Args:
project_name (str): Name of project where to look for queried entities.
representation_ids (list[str|ObjectId]): Representation ids used as
filter. Filter ignored if 'None' is passed.
representation_names (list[str]): Representations names used as filter.
Filter ignored if 'None' is passed.
version_ids (list[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
extensions (list[str]): Filter by extension of main representation
file (without dot).
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
fields (list[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching representations.
"""
return _get_representations(
project_name=project_name,
representation_ids=representation_ids,
representation_names=representation_names,
version_ids=version_ids,
extensions=extensions,
names_by_version_ids=names_by_version_ids,
standard=False,
archived=True,
fields=fields
)
def get_representations_parents(project_name, representations):
"""Prepare parents of representation entities.

View file

@ -7,6 +7,7 @@ from pymongo import UpdateOne
import qargparse
from Qt import QtWidgets, QtCore
from openpype.client import get_versions, get_representations
from openpype import style
from openpype.pipeline import load, AvalonMongoDB, Anatomy
from openpype.lib import StringTemplate
@ -197,18 +198,10 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
def get_data(self, context, versions_count):
subset = context["subset"]
asset = context["asset"]
anatomy = Anatomy(context["project"]["name"])
project_name = context["project"]["name"]
anatomy = Anatomy(project_name)
self.dbcon = AvalonMongoDB()
self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
self.dbcon.install()
versions = list(
self.dbcon.find({
"type": "version",
"parent": {"$in": [subset["_id"]]}
})
)
versions = list(get_versions(project_name, subset_ids=[subset["_id"]]))
versions_by_parent = collections.defaultdict(list)
for ent in versions:
@ -267,10 +260,9 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
print(msg)
return
repres = list(self.dbcon.find({
"type": "representation",
"parent": {"$in": version_ids}
}))
repres = list(get_representations(
project_name, version_ids=version_ids
))
self.log.debug(
"Collected representations to remove ({})".format(len(repres))
@ -329,7 +321,7 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
return data
def main(self, data, remove_publish_folder):
def main(self, project_name, data, remove_publish_folder):
# Size of files.
size = 0
if not data:
@ -366,9 +358,11 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
))
if mongo_changes_bulk:
self.dbcon.bulk_write(mongo_changes_bulk)
self.dbcon.uninstall()
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
dbcon.install()
dbcon.bulk_write(mongo_changes_bulk)
dbcon.uninstall()
self._ftrack_delete_versions(data)
@ -458,7 +452,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
if not data:
continue
size += self.main(data, remove_publish_folder)
project_name = context["project"]["name"]
size += self.main(project_name, data, remove_publish_folder)
print("Progressing {}/{}".format(count + 1, len(contexts)))
msg = "Total size of files: " + self.sizeof_fmt(size)
@ -484,7 +479,7 @@ class CalculateOldVersions(DeleteOldVersions):
)
]
def main(self, data, remove_publish_folder):
def main(self, project_name, data, remove_publish_folder):
size = 0
if not data:

View file

@ -3,8 +3,9 @@ from collections import defaultdict
from Qt import QtWidgets, QtCore, QtGui
from openpype.client import get_representations
from openpype.lib import config
from openpype.pipeline import load, AvalonMongoDB, Anatomy
from openpype.pipeline import load, Anatomy
from openpype import resources, style
from openpype.lib.delivery import (
@ -68,17 +69,13 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
self.setStyleSheet(style.load_stylesheet())
project = contexts[0]["project"]["name"]
self.anatomy = Anatomy(project)
project_name = contexts[0]["project"]["name"]
self.anatomy = Anatomy(project_name)
self._representations = None
self.log = log
self.currently_uploaded = 0
self.dbcon = AvalonMongoDB()
self.dbcon.Session["AVALON_PROJECT"] = project
self.dbcon.install()
self._set_representations(contexts)
self._set_representations(project_name, contexts)
dropdown = QtWidgets.QComboBox()
self.templates = self._get_templates(self.anatomy)
@ -238,13 +235,12 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
return templates
def _set_representations(self, contexts):
def _set_representations(self, project_name, contexts):
version_ids = [context["version"]["_id"] for context in contexts]
repres = list(self.dbcon.find({
"type": "representation",
"parent": {"$in": version_ids}
}))
repres = list(get_representations(
project_name, version_ids=version_ids
))
self._representations = repres

View file

@ -27,6 +27,11 @@ import collections
import pyblish.api
from openpype.client import (
get_assets,
get_subsets,
get_last_versions
)
from openpype.pipeline import legacy_io
@ -44,13 +49,14 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
def process(self, context):
self.log.info("Collecting anatomy data for all instances.")
self.fill_missing_asset_docs(context)
self.fill_latest_versions(context)
project_name = legacy_io.active_project()
self.fill_missing_asset_docs(context, project_name)
self.fill_latest_versions(context, project_name)
self.fill_anatomy_data(context)
self.log.info("Anatomy Data collection finished.")
def fill_missing_asset_docs(self, context):
def fill_missing_asset_docs(self, context, project_name):
self.log.debug("Qeurying asset documents for instances.")
context_asset_doc = context.data.get("assetEntity")
@ -84,10 +90,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
self.log.debug("Querying asset documents with names: {}".format(
", ".join(["\"{}\"".format(name) for name in asset_names])
))
asset_docs = legacy_io.find({
"type": "asset",
"name": {"$in": asset_names}
})
asset_docs = get_assets(project_name, asset_names=asset_names)
asset_docs_by_name = {
asset_doc["name"]: asset_doc
for asset_doc in asset_docs
@ -111,7 +115,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
"Not found asset documents with names \"{}\"."
).format(joined_asset_names))
def fill_latest_versions(self, context):
def fill_latest_versions(self, context, project_name):
"""Try to find latest version for each instance's subset.
Key "latestVersion" is always set to latest version or `None`.
@ -126,7 +130,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
self.log.debug("Qeurying latest versions for instances.")
hierarchy = {}
subset_filters = []
names_by_asset_ids = collections.defaultdict(set)
for instance in context:
# Make sure `"latestVersion"` key is set
latest_version = instance.data.get("latestVersion")
@ -147,67 +151,33 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
if subset_name not in hierarchy[asset_id]:
hierarchy[asset_id][subset_name] = []
hierarchy[asset_id][subset_name].append(instance)
subset_filters.append({
"parent": asset_id,
"name": subset_name
})
names_by_asset_ids[asset_id].add(subset_name)
subset_docs = []
if subset_filters:
subset_docs = list(legacy_io.find({
"type": "subset",
"$or": subset_filters
}))
if names_by_asset_ids:
subset_docs = list(get_subsets(
project_name, names_by_asset_ids=names_by_asset_ids
))
subset_ids = [
subset_doc["_id"]
for subset_doc in subset_docs
]
last_version_by_subset_id = self._query_last_versions(subset_ids)
last_version_docs_by_subset_id = get_last_versions(
project_name, subset_ids, fields=["name"]
)
for subset_doc in subset_docs:
subset_id = subset_doc["_id"]
last_version = last_version_by_subset_id.get(subset_id)
if last_version is None:
last_version_doc = last_version_docs_by_subset_id.get(subset_id)
if last_version_docs_by_subset_id is None:
continue
asset_id = subset_doc["parent"]
subset_name = subset_doc["name"]
_instances = hierarchy[asset_id][subset_name]
for _instance in _instances:
_instance.data["latestVersion"] = last_version
def _query_last_versions(self, subset_ids):
"""Retrieve all latest versions for entered subset_ids.
Args:
subset_ids (list): List of subset ids with type `ObjectId`.
Returns:
dict: Key is subset id and value is last version name.
"""
_pipeline = [
# Find all versions of those subsets
{"$match": {
"type": "version",
"parent": {"$in": subset_ids}
}},
# Sorting versions all together
{"$sort": {"name": 1}},
# Group them by "parent", but only take the last
{"$group": {
"_id": "$parent",
"_version_id": {"$last": "$_id"},
"name": {"$last": "$name"}
}}
]
last_version_by_subset_id = {}
for doc in legacy_io.aggregate(_pipeline):
subset_id = doc["_id"]
last_version_by_subset_id[subset_id] = doc["name"]
return last_version_by_subset_id
_instance.data["latestVersion"] = last_version_doc["name"]
def fill_anatomy_data(self, context):
self.log.debug("Storing anatomy data to instance data.")

View file

@ -10,6 +10,7 @@ Provides:
import pyblish.api
from openpype.client import get_project, get_asset_by_name
from openpype.pipeline import legacy_io
@ -25,10 +26,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
project_entity = legacy_io.find_one({
"type": "project",
"name": project_name
})
project_entity = get_project(project_name)
assert project_entity, (
"Project '{0}' was not found."
).format(project_name)
@ -39,11 +37,8 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
if not asset_name:
self.log.info("Context is not set. Can't collect global data.")
return
asset_entity = legacy_io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
asset_entity = get_asset_by_name(project_name, asset_name)
assert asset_entity, (
"No asset found by the name '{0}' in project '{1}'"
).format(asset_name, project_name)

View file

@ -1,7 +1,6 @@
from bson.objectid import ObjectId
import pyblish.api
from openpype.client import get_representations
from openpype.pipeline import (
registered_host,
legacy_io,
@ -39,23 +38,29 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
return
loaded_versions = []
_containers = list(host.ls())
_repr_ids = [ObjectId(c["representation"]) for c in _containers]
repre_docs = legacy_io.find(
{"_id": {"$in": _repr_ids}},
projection={"_id": 1, "parent": 1}
containers = list(host.ls())
repre_ids = {
container["representation"]
for container in containers
}
project_name = legacy_io.active_project()
repre_docs = get_representations(
project_name,
representation_ids=repre_ids,
fields=["_id", "parent"]
)
version_by_repr = {
str(doc["_id"]): doc["parent"]
repre_doc_by_str_id = {
str(doc["_id"]): doc
for doc in repre_docs
}
# QUESTION should we add same representation id when loaded multiple
# times?
for con in _containers:
for con in containers:
repre_id = con["representation"]
version_id = version_by_repr.get(repre_id)
if version_id is None:
repre_doc = repre_doc_by_str_id.get(repre_id)
if repre_doc is None:
self.log.warning((
"Skipping container,"
" did not find representation document. {}"
@ -66,8 +71,8 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
# may have more then one representation that are same version
version = {
"subsetName": con["name"],
"representation": ObjectId(repre_id),
"version": version_id,
"representation": repre_doc["_id"],
"version": repre_doc["parent"],
}
loaded_versions.append(version)

View file

@ -1,5 +1,11 @@
from copy import deepcopy
import pyblish.api
from openpype.client import (
get_project,
get_asset_by_id,
get_asset_by_name,
get_archived_assets
)
from openpype.pipeline import legacy_io
@ -19,14 +25,14 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if not legacy_io.Session:
legacy_io.install()
project_name = legacy_io.active_project()
hierarchy_context = self._get_active_assets(context)
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
self.project = None
self.import_to_avalon(hierarchy_context)
self.import_to_avalon(project_name, hierarchy_context)
def import_to_avalon(self, input_data, parent=None):
def import_to_avalon(self, project_name, input_data, parent=None):
for name in input_data:
self.log.info("input_data[name]: {}".format(input_data[name]))
entity_data = input_data[name]
@ -62,7 +68,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
update_data = True
# Process project
if entity_type.lower() == "project":
entity = legacy_io.find_one({"type": "project"})
entity = get_project(project_name)
# TODO: should be in validator?
assert (entity is not None), "Did not find project in DB"
@ -79,7 +85,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
)
# Else process assset
else:
entity = legacy_io.find_one({"type": "asset", "name": name})
entity = get_asset_by_name(project_name, name)
if entity:
# Do not override data, only update
cur_entity_data = entity.get("data") or {}
@ -103,10 +109,10 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
# Skip updating data
update_data = False
archived_entities = legacy_io.find({
"type": "archived_asset",
"name": name
})
archived_entities = get_archived_assets(
project_name,
asset_names=[name]
)
unarchive_entity = None
for archived_entity in archived_entities:
archived_parents = (
@ -120,7 +126,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if unarchive_entity is None:
# Create entity if doesn"t exist
entity = self.create_avalon_asset(name, data)
entity = self.create_avalon_asset(
project_name, name, data
)
else:
# Unarchive if entity was archived
entity = self.unarchive_entity(unarchive_entity, data)
@ -133,7 +141,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
)
if "childs" in entity_data:
self.import_to_avalon(entity_data["childs"], entity)
self.import_to_avalon(
project_name, entity_data["childs"], entity
)
def unarchive_entity(self, entity, data):
# Unarchived asset should not use same data
@ -151,7 +161,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
)
return new_entity
def create_avalon_asset(self, name, data):
def create_avalon_asset(self, project_name, name, data):
item = {
"schema": "openpype:asset-3.0",
"name": name,
@ -162,7 +172,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
self.log.debug("Creating asset: {}".format(item))
entity_id = legacy_io.insert_one(item).inserted_id
return legacy_io.find_one({"_id": entity_id})
return get_asset_by_id(project_name, entity_id)
def _get_active_assets(self, context):
""" Returns only asset dictionary.

View file

@ -8,6 +8,12 @@ from bson.objectid import ObjectId
from pymongo import InsertOne, ReplaceOne
import pyblish.api
from openpype.client import (
get_version_by_id,
get_hero_version_by_subset_id,
get_archived_representations,
get_representations,
)
from openpype.lib import (
create_hard_link,
filter_profiles
@ -85,9 +91,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
hero_template
))
self.integrate_instance(instance, template_key, hero_template)
self.integrate_instance(
instance, project_name, template_key, hero_template
)
def integrate_instance(self, instance, template_key, hero_template):
def integrate_instance(
self, instance, project_name, template_key, hero_template
):
anatomy = instance.context.data["anatomy"]
published_repres = instance.data["published_representations"]
hero_publish_dir = self.get_publish_dir(instance, template_key)
@ -118,8 +128,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
"Published version entity was not sent in representation data."
" Querying entity from database."
))
src_version_entity = (
self.version_from_representations(published_repres)
src_version_entity = self.version_from_representations(
project_name, published_repres
)
if not src_version_entity:
@ -170,8 +180,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
other_file_paths_mapping.append((file_path, dst_filepath))
# Current version
old_version, old_repres = (
self.current_hero_ents(src_version_entity)
old_version, old_repres = self.current_hero_ents(
project_name, src_version_entity
)
old_repres_by_name = {
@ -223,11 +233,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
if old_repres_by_name:
old_repres_to_delete = old_repres_by_name
archived_repres = list(legacy_io.find({
archived_repres = list(get_archived_representations(
project_name,
# Check what is type of archived representation
"type": "archived_repsentation",
"parent": new_version_id
}))
version_ids=[new_version_id]
))
archived_repres_by_name = {}
for repre in archived_repres:
repre_name_low = repre["name"].lower()
@ -586,25 +596,23 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
shutil.copy(src_path, dst_path)
def version_from_representations(self, repres):
def version_from_representations(self, project_name, repres):
for repre in repres:
version = legacy_io.find_one({"_id": repre["parent"]})
version = get_version_by_id(project_name, repre["parent"])
if version:
return version
def current_hero_ents(self, version):
hero_version = legacy_io.find_one({
"parent": version["parent"],
"type": "hero_version"
})
def current_hero_ents(self, project_name, version):
hero_version = get_hero_version_by_subset_id(
project_name, version["parent"]
)
if not hero_version:
return (None, [])
hero_repres = list(legacy_io.find({
"parent": hero_version["_id"],
"type": "representation"
}))
hero_repres = list(get_representations(
project_name, version_ids=[hero_version["_id"]]
))
return (hero_version, hero_repres)
def _update_path(self, anatomy, path, src_file, dst_file):

View file

@ -16,6 +16,15 @@ from pymongo import DeleteOne, InsertOne
import pyblish.api
import openpype.api
from openpype.client import (
get_asset_by_name,
get_subset_by_id,
get_subset_by_name,
get_version_by_id,
get_version_by_name,
get_representations,
get_archived_representations,
)
from openpype.lib.profiles_filtering import filter_profiles
from openpype.lib import (
prepare_template_data,
@ -201,6 +210,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
context = instance.context
project_entity = instance.data["projectEntity"]
project_name = project_entity["name"]
context_asset_name = None
context_asset_doc = context.data.get("assetEntity")
@ -210,11 +220,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
asset_name = instance.data["asset"]
asset_entity = instance.data.get("assetEntity")
if not asset_entity or asset_entity["name"] != context_asset_name:
asset_entity = legacy_io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
asset_entity = get_asset_by_name(project_name, asset_name)
assert asset_entity, (
"No asset found by the name \"{0}\" in project \"{1}\""
).format(asset_name, project_entity["name"])
@ -270,7 +276,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"Establishing staging directory @ {0}".format(stagingdir)
)
subset = self.get_subset(asset_entity, instance)
subset = self.get_subset(project_name, asset_entity, instance)
instance.data["subsetEntity"] = subset
version_number = instance.data["version"]
@ -297,11 +303,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
for _repre in repres
]
existing_version = legacy_io.find_one({
'type': 'version',
'parent': subset["_id"],
'name': version_number
})
existing_version = get_version_by_name(
project_name, version_number, subset["_id"]
)
if existing_version is None:
version_id = legacy_io.insert_one(version).inserted_id
@ -322,10 +326,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
version_id = existing_version['_id']
# Find representations of existing version and archive them
current_repres = list(legacy_io.find({
"type": "representation",
"parent": version_id
}))
current_repres = list(get_representations(
project_name, version_ids=[version_id]
))
bulk_writes = []
for repre in current_repres:
if append_repres:
@ -345,18 +348,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# bulk updates
if bulk_writes:
project_name = legacy_io.Session["AVALON_PROJECT"]
legacy_io.database[project_name].bulk_write(
bulk_writes
)
version = legacy_io.find_one({"_id": version_id})
version = get_version_by_id(project_name, version_id)
instance.data["versionEntity"] = version
existing_repres = list(legacy_io.find({
"parent": version_id,
"type": "archived_representation"
}))
existing_repres = list(get_archived_representations(
project_name,
version_ids=[version_id]
))
instance.data['version'] = version['name']
@ -792,13 +794,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
create_hard_link(src, dst)
def get_subset(self, asset, instance):
def get_subset(self, project_name, asset, instance):
subset_name = instance.data["subset"]
subset = legacy_io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": subset_name
})
subset = get_subset_by_name(project_name, subset_name, asset["_id"])
if subset is None:
self.log.info("Subset '%s' not found, creating ..." % subset_name)
@ -825,7 +823,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"parent": asset["_id"]
}).inserted_id
subset = legacy_io.find_one({"_id": _id})
subset = get_subset_by_id(project_name, _id)
# QUESTION Why is changing of group and updating it's
# families in 'get_subset'?

View file

@ -8,6 +8,7 @@ import six
import pyblish.api
from bson.objectid import ObjectId
from openpype.client import get_version_by_id
from openpype.pipeline import legacy_io
@ -70,7 +71,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
version = legacy_io.find_one({"_id": thumb_repre["parent"]})
version = get_version_by_id(project_name, thumb_repre["parent"])
if not version:
raise AssertionError(
"There does not exist version with id {}".format(

View file

@ -3,6 +3,7 @@ from pprint import pformat
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.client import get_assets
class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
@ -29,8 +30,10 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
if not legacy_io.Session:
legacy_io.install()
db_assets = list(legacy_io.find(
{"type": "asset"}, {"name": 1, "data.parents": 1}))
project_name = legacy_io.active_project()
db_assets = list(get_assets(
project_name, fields=["name", "data.parents"]
))
self.log.debug("__ db_assets: {}".format(db_assets))
asset_db_docs = {