diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 7362f57d7f..67ddb09ddb 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ +import re import collections import six @@ -1009,17 +1010,70 @@ def get_representation_by_name( return conn.find_one(query_filter, _prepare_fields(fields)) +def _flatten_dict(data): + flatten_queue = collections.deque() + flatten_queue.append(data) + output = {} + while flatten_queue: + item = flatten_queue.popleft() + for key, value in item.items(): + if not isinstance(value, dict): + output[key] = value + continue + + tmp = {} + for subkey, subvalue in value.items(): + new_key = "{}.{}".format(key, subkey) + tmp[new_key] = subvalue + flatten_queue.append(tmp) + return output + + +def _regex_filters(filters): + output = [] + for key, value in filters.items(): + regexes = [] + a_values = [] + if isinstance(value, re.Pattern): + regexes.append(value) + elif isinstance(value, (list, tuple, set)): + for item in value: + if isinstance(item, re.Pattern): + regexes.append(item) + else: + a_values.append(item) + else: + a_values.append(value) + + key_filters = [] + if len(a_values) == 1: + key_filters.append({key: a_values[0]}) + elif a_values: + key_filters.append({key: {"$in": a_values}}) + + for regex in regexes: + key_filters.append({key: {"$regex": regex}}) + + if len(key_filters) == 1: + output.append(key_filters[0]) + else: + output.append({"$or": key_filters}) + + return output + + def _get_representations( project_name, representation_ids, representation_names, version_ids, - extensions, + context_filters, names_by_version_ids, standard, archived, fields ): + default_output = [] repre_types = [] if standard: repre_types.append("representation") @@ -1027,7 +1081,7 @@ def _get_representations( repre_types.append("archived_representation") if not repre_types: - return [] + return default_output if len(repre_types) == 1: query_filter = {"type": repre_types[0]} @@ -1037,25 +1091,21 @@ def _get_representations( if representation_ids is not None: representation_ids = _convert_ids(representation_ids) if not representation_ids: - return [] + return default_output query_filter["_id"] = {"$in": representation_ids} if representation_names is not None: if not representation_names: - return [] + return default_output query_filter["name"] = {"$in": list(representation_names)} if version_ids is not None: version_ids = _convert_ids(version_ids) if not version_ids: - return [] + return default_output query_filter["parent"] = {"$in": version_ids} - if extensions is not None: - if not extensions: - return [] - query_filter["context.ext"] = {"$in": list(extensions)} - + or_queries = [] if names_by_version_ids is not None: or_query = [] for version_id, names in names_by_version_ids.items(): @@ -1065,8 +1115,36 @@ def _get_representations( "name": {"$in": list(names)} }) if not or_query: + return default_output + or_queries.append(or_query) + + if context_filters is not None: + if not context_filters: return [] - query_filter["$or"] = or_query + _flatten_filters = _flatten_dict(context_filters) + flatten_filters = {} + for key, value in _flatten_filters.items(): + if not key.startswith("context"): + key = "context.{}".format(key) + flatten_filters[key] = value + + for item in _regex_filters(flatten_filters): + for key, value in item.items(): + if key != "$or": + query_filter[key] = value + + elif value: + or_queries.append(value) + + if len(or_queries) == 1: + query_filter["$or"] = or_queries[0] + elif or_queries: + and_query = [] + for or_query in or_queries: + if isinstance(or_query, list): + or_query = {"$or": or_query} + and_query.append(or_query) + query_filter["$and"] = and_query conn = get_project_connection(project_name) @@ -1078,7 +1156,7 @@ def get_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, + context_filters=None, names_by_version_ids=None, archived=False, standard=True, @@ -1096,8 +1174,8 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. archived (bool): Output will also contain archived representations. @@ -1113,7 +1191,7 @@ def get_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=True, archived=archived, @@ -1126,7 +1204,7 @@ def get_archived_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, + context_filters=None, names_by_version_ids=None, fields=None ): @@ -1142,8 +1220,8 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. fields (Iterable[str]): Fields that should be returned. All fields are @@ -1158,7 +1236,7 @@ def get_archived_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=False, archived=True, diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 855c72e361..34a8450a26 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -40,6 +40,9 @@ def create_placeholder(): placeholder_name = create_placeholder_name(args, options) selection = cmds.ls(selection=True) + if not selection: + raise ValueError("Nothing is selected") + placeholder = cmds.spaceLocator(name=placeholder_name)[0] # get the long name of the placeholder (with the groups) diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py index 6b225442e7..ecffafc93d 100644 --- a/openpype/hosts/maya/api/template_loader.py +++ b/openpype/hosts/maya/api/template_loader.py @@ -1,5 +1,7 @@ +import re from maya import cmds +from openpype.client import get_representations from openpype.pipeline import legacy_io from openpype.pipeline.workfile.abstract_template_loader import ( AbstractPlaceholder, @@ -96,11 +98,11 @@ class MayaPlaceholder(AbstractPlaceholder): """Concrete implementation of AbstractPlaceholder for maya """ - optional_attributes = {'asset', 'subset', 'hierarchy'} + optional_keys = {'asset', 'subset', 'hierarchy'} def get_data(self, node): user_data = dict() - for attr in self.attributes.union(self.optional_attributes): + for attr in self.required_keys.union(self.optional_keys): attribute_name = '{}.{}'.format(node, attr) if not cmds.attributeQuery(attr, node=node, exists=True): print("{} not found".format(attribute_name)) @@ -110,7 +112,9 @@ class MayaPlaceholder(AbstractPlaceholder): asString=True) user_data['parent'] = ( cmds.getAttr(node + '.parent', asString=True) - or node.rpartition('|')[0] or "") + or node.rpartition('|')[0] + or "" + ) user_data['node'] = node if user_data['parent']: siblings = cmds.listRelatives(user_data['parent'], children=True) @@ -191,48 +195,48 @@ class MayaPlaceholder(AbstractPlaceholder): cmds.hide(node) cmds.setAttr(node + '.hiddenInOutliner', True) - def convert_to_db_filters(self, current_asset, linked_asset): - if self.data['builder_type'] == "context_asset": - return [ - { - "type": "representation", - "context.asset": { - "$eq": current_asset, - "$regex": self.data['asset'] - }, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } - ] + def get_representations(self, current_asset_doc, linked_asset_docs): + project_name = legacy_io.active_project() - elif self.data['builder_type'] == "linked_asset": - return [ - { - "type": "representation", - "context.asset": { - "$eq": asset_name, - "$regex": self.data['asset'] - }, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } for asset_name in linked_asset - ] + builder_type = self.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representations": [self.data["representation"]], + "family": [self.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(self.data["asset"])], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]] + } else: - return [ - { - "type": "representation", - "context.asset": {"$regex": self.data['asset']}, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } - ] + asset_regex = re.compile(self.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) def err_message(self): return ( diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 8ee2e91719..05a98a1ddc 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -1,15 +1,18 @@ import os from abc import ABCMeta, abstractmethod -import traceback - import six import logging from functools import reduce from openpype.client import get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import get_linked_assets, Logger +from openpype.lib import ( + StringTemplate, + Logger, + filter_profiles, + get_linked_assets, +) from openpype.pipeline import legacy_io, Anatomy from openpype.pipeline.load import ( get_loaders_by_name, @@ -169,49 +172,65 @@ class AbstractTemplateLoader: anatomy = Anatomy(project_name) project_settings = get_project_settings(project_name) - build_info = project_settings[host_name]['templated_workfile_build'] - profiles = build_info['profiles'] + build_info = project_settings[host_name]["templated_workfile_build"] + profile = filter_profiles( + build_info["profiles"], + { + "task_types": task_type, + "tasks": task_name + } + ) - for prf in profiles: - if prf['task_types'] and task_type not in prf['task_types']: - continue - if prf['tasks'] and task_name not in prf['tasks']: - continue - path = prf['path'] - break - else: # IF no template were found (no break happened) + if not profile: raise TemplateProfileNotFound( "No matching profile found for task '{}' of type '{}' " "with host '{}'".format(task_name, task_type, host_name) ) - if path is None: + + path = profile["path"] + if not path: raise TemplateLoadingFailed( "Template path is not set.\n" "Path need to be set in {}\\Template Workfile Build " "Settings\\Profiles".format(host_name.title())) - try: - solved_path = None - while True: - solved_path = anatomy.path_remapper(path) - if solved_path is None: - solved_path = path - if solved_path == path: - break - path = solved_path - except KeyError as missing_key: - raise KeyError( - "Could not solve key '{}' in template path '{}'".format( - missing_key, path)) - finally: - solved_path = os.path.normpath(solved_path) + # Try fill path with environments and anatomy roots + fill_data = { + key: value + for key, value in os.environ.items() + } + fill_data["root"] = anatomy.roots + result = StringTemplate.format_template(path, fill_data) + if result.solved: + path = result.normalized() + + if path and os.path.exists(path): + self.log.info("Found template at: '{}'".format(path)) + return path + + solved_path = None + while True: + try: + solved_path = anatomy.path_remapper(path) + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + + solved_path = os.path.normpath(solved_path) if not os.path.exists(solved_path): raise TemplateNotFound( "Template found in openPype settings for task '{}' with host " "'{}' does not exists. (Not found : {})".format( task_name, host_name, solved_path)) - self.log.info("Found template at : '{}'".format(solved_path)) + self.log.info("Found template at: '{}'".format(solved_path)) return solved_path @@ -223,30 +242,30 @@ class AbstractTemplateLoader: Returns: None """ + loaders_by_name = self.loaders_by_name - current_asset = self.current_asset - linked_assets = [asset['name'] for asset - in get_linked_assets(self.current_asset_doc)] + current_asset_doc = self.current_asset_doc + linked_assets = get_linked_assets(current_asset_doc) ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() self.log.debug("Placeholders found in template: {}".format( - [placeholder.data['node'] for placeholder in placeholders] + [placeholder.name for placeholder in placeholders] )) for placeholder in placeholders: self.log.debug("Start to processing placeholder {}".format( - placeholder.data['node'] + placeholder.name )) placeholder_representations = self.get_placeholder_representations( placeholder, - current_asset, + current_asset_doc, linked_assets ) if not placeholder_representations: self.log.info( "There's no representation for this placeholder: " - "{}".format(placeholder.data['node']) + "{}".format(placeholder.name) ) continue @@ -264,8 +283,8 @@ class AbstractTemplateLoader: "Loader arguments used : {}".format( representation['context']['asset'], representation['context']['subset'], - placeholder.loader, - placeholder.data['loader_args'])) + placeholder.loader_name, + placeholder.loader_args)) try: container = self.load( @@ -278,21 +297,18 @@ class AbstractTemplateLoader: self.postload(placeholder) def get_placeholder_representations( - self, placeholder, current_asset, linked_assets + self, placeholder, current_asset_doc, linked_asset_docs ): - # TODO This approach must be changed. Placeholders should return - # already prepared data and not query them here. - # - this is impossible to handle using query functions - placeholder_db_filters = placeholder.convert_to_db_filters( - current_asset, - linked_assets) - # get representation by assets - for db_filter in placeholder_db_filters: - placeholder_representations = list(legacy_io.find(db_filter)) - for representation in reduce(update_representations, - placeholder_representations, - dict()).values(): - yield representation + placeholder_representations = placeholder.get_representations( + current_asset_doc, + linked_asset_docs + ) + for repre_doc in reduce( + update_representations, + placeholder_representations, + dict() + ).values(): + yield repre_doc def load_data_is_incorrect( self, placeholder, last_representation, ignored_ids): @@ -310,19 +326,22 @@ class AbstractTemplateLoader: def load(self, placeholder, loaders_by_name, last_representation): repre = get_representation_context(last_representation) return load_with_repre_context( - loaders_by_name[placeholder.loader], + loaders_by_name[placeholder.loader_name], repre, - options=parse_loader_args(placeholder.data['loader_args'])) + options=parse_loader_args(placeholder.loader_args)) def load_succeed(self, placeholder, container): placeholder.parent_in_hierarchy(container) def load_failed(self, placeholder, last_representation): - self.log.warning("Got error trying to load {}:{} with {}\n\n" - "{}".format(last_representation['context']['asset'], - last_representation['context']['subset'], - placeholder.loader, - traceback.format_exc())) + self.log.warning( + "Got error trying to load {}:{} with {}".format( + last_representation['context']['asset'], + last_representation['context']['subset'], + placeholder.loader_name + ), + exc_info=True + ) def postload(self, placeholder): placeholder.clean() @@ -332,11 +351,15 @@ class AbstractTemplateLoader: self.populate_template(ignored_ids=loaded_containers_ids) def get_placeholders(self): - placeholder_class = self.placeholder_class - placeholders = map(placeholder_class, self.get_template_nodes()) - valid_placeholders = filter(placeholder_class.is_valid, placeholders) - sorted_placeholders = sorted(valid_placeholders, - key=placeholder_class.order) + placeholders = map(self.placeholder_class, self.get_template_nodes()) + valid_placeholders = filter( + lambda i: i.is_valid, + placeholders + ) + sorted_placeholders = list(sorted( + valid_placeholders, + key=lambda i: i.order + )) return sorted_placeholders @abstractmethod @@ -377,107 +400,127 @@ class AbstractTemplateLoader: @six.add_metaclass(ABCMeta) class AbstractPlaceholder: - """Abstraction of placeholders logic + """Abstraction of placeholders logic. + Properties: - attributes: A list of mandatory attribute to decribe placeholder + required_keys: A list of mandatory keys to decribe placeholder and assets to load. - optional_attributes: A list of optional attribute to decribe + optional_keys: A list of optional keys to decribe placeholder and assets to load - loader: Name of linked loader to use while loading assets - is_context: Is placeholder linked - to context asset (or to linked assets) - Methods: - is_repres_valid: - loader: - order: - is_valid: - get_data: - parent_in_hierachy: + loader_name: Name of linked loader to use while loading assets + + Args: + identifier (str): Placeholder identifier. Should be possible to be + used as identifier in "a scene" (e.g. unique node name). """ - attributes = {'builder_type', 'family', 'representation', - 'order', 'loader', 'loader_args'} - optional_attributes = {} + required_keys = { + "builder_type", + "family", + "representation", + "order", + "loader", + "loader_args" + } + optional_keys = {} - def __init__(self, node): - self.get_data(node) + def __init__(self, identifier): + self._log = None + self._name = identifier + self.get_data(identifier) + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def __repr__(self): + return "< {} {} >".format(self.__class__.__name__, self.name) + + @property + def name(self): + return self._name + + @property + def loader_args(self): + return self.data["loader_args"] + + @property + def builder_type(self): + return self.data["builder_type"] + + @property def order(self): - """Get placeholder order. - Order is used to sort them by priority - Priority is lowset first, highest last - (ex: - 1: First to load - 100: Last to load) - Returns: - Int: Order priority - """ - return self.data.get('order') + return self.data["order"] @property - def loader(self): - """Return placeholder loader type + def loader_name(self): + """Return placeholder loader name. + Returns: - string: Loader name + str: Loader name that will be used to load placeholder + representations. """ - return self.data.get('loader') + + return self.data["loader"] @property - def is_context(self): - """Return placeholder type - context_asset: For loading current asset - linked_asset: For loading linked assets - Returns: - bool: true if placeholder is a context placeholder - """ - return self.data.get('builder_type') == 'context_asset' - def is_valid(self): - """Test validity of placeholder - i.e.: every attributes exists in placeholder data + """Test validity of placeholder. + + i.e.: every required key exists in placeholder data + Returns: - Bool: True if every attributes are a key of data + bool: True if every key is in data """ - if set(self.attributes).issubset(self.data.keys()): - print("Valid placeholder : {}".format(self.data["node"])) + + if set(self.required_keys).issubset(self.data.keys()): + self.log.debug("Valid placeholder : {}".format(self.name)) return True - print("Placeholder is not valid : {}".format(self.data["node"])) + self.log.info("Placeholder is not valid : {}".format(self.name)) return False @abstractmethod - def parent_in_hierarchy(self, containers): - """Place container in correct hierarchy - given by placeholder + def parent_in_hierarchy(self, container): + """Place loaded container in correct hierarchy given by placeholder + Args: - containers (String): Container name returned back by - placeholder's loader. + container (Dict[str, Any]): Loaded container created by loader. """ + pass @abstractmethod def clean(self): - """Clean placeholder from hierarchy after loading assets. - """ + """Clean placeholder from hierarchy after loading assets.""" + pass @abstractmethod - def convert_to_db_filters(self, current_asset, linked_asset): - """map current placeholder data as a db filter - args: - current_asset (String): Name of current asset in context - linked asset (list[String]) : Names of assets linked to - current asset in context - Returns: - dict: a dictionnary describing a filter to look for asset in - a database - """ - pass + def get_representations(self, current_asset_doc, linked_asset_docs): + """Query representations based on placeholder data. - @abstractmethod - def get_data(self, node): - """ - Collect placeholders information. Args: - node (AnyNode): A unique node decided by Placeholder implementation + current_asset_doc (Dict[str, Any]): Document of current + context asset. + linked_asset_docs (List[Dict[str, Any]]): Documents of assets + linked to current context asset. + + Returns: + Iterable[Dict[str, Any]]: Representations that are matching + placeholder filters. """ + + pass + + @abstractmethod + def get_data(self, identifier): + """Collect information about placeholder by identifier. + + Args: + identifier (str): A unique placeholder identifier defined by + implementation. + """ + pass diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py index df6fe3514a..e6396578c5 100644 --- a/openpype/pipeline/workfile/build_template.py +++ b/openpype/pipeline/workfile/build_template.py @@ -15,7 +15,7 @@ from .build_template_exceptions import ( MissingTemplateLoaderClass ) -_module_path_format = 'openpype.{host}.template_loader' +_module_path_format = 'openpype.hosts.{host}.api.template_loader' def build_workfile_template(*args): diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 55c6ace900..21f3918566 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -968,13 +968,7 @@ ] }, "templated_workfile_build": { - "profiles": [ - { - "task_types": [], - "tasks": [], - "path": "/path/to/your/template" - } - ] + "profiles": [] }, "filters": { "preset 1": { @@ -985,4 +979,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +}