From cf43bbb18e1b6bf453241132a283a847d06e3ef1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:34:12 +0100 Subject: [PATCH 01/22] moved imports to top of lib --- pype/lib.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index ad3a863854..39d8533be8 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1,14 +1,19 @@ import os import re +import uuid +import json +import getpass +import collections import logging import itertools import contextlib import subprocess import inspect +import platform -from avalon import io +from avalon import io, pipeline import avalon.api -import avalon +from pypeapp import config log = logging.getLogger(__name__) @@ -489,7 +494,6 @@ def filter_pyblish_plugins(plugins): `discover()` method. :type plugins: Dict """ - from pypeapp import config from pyblish import api host = api.current_host() @@ -546,7 +550,6 @@ def get_subsets(asset_name, Returns: dict: subsets with version and representaions in keys """ - from avalon import io # query asset from db asset_io = io.find_one({"type": "asset", "name": asset_name}) @@ -620,7 +623,6 @@ class CustomNone: def __init__(self): """Create uuid as identifier for custom None.""" - import uuid self.identifier = str(uuid.uuid4()) def __bool__(self): From 116a16eb4c0ab04bd3f29c6d39889455e08ddfaf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:34:48 +0100 Subject: [PATCH 02/22] get_workfile_build_presets method implemented to load workfile variants per host and current task --- pype/lib.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 39d8533be8..06f3540177 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -643,3 +643,30 @@ class CustomNone: def __repr__(self): """Representation of custom None.""" return "".format(str(self.identifier)) + + +def get_workfile_build_presets(task_name): + host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1] + presets = config.get_presets(io.Session["AVALON_PROJECT"]) + # Get presets for host + workfile_presets = presets["plugins"].get(host_name, {}).get( + "workfile_build" + ) + if not workfile_presets: + return + + task_name_low = task_name.lower() + per_task_preset = None + for variant in workfile_presets: + variant_tasks = variant.get("tasks") + if not variant_tasks: + continue + + variant_tasks_low = [task.lower() for task in variant_tasks] + if task_name_low not in variant_tasks_low: + continue + + per_task_preset = variant + break + + return per_task_preset From 99ea83939a8707babe8ffe803a1bc78e870f1c17 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:36:24 +0100 Subject: [PATCH 03/22] collect_last_version_repres implemented to get all representations of latest versions for all subsets of entered assets --- pype/lib.py | 95 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 06f3540177..5cdcf82d4d 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -670,3 +670,98 @@ def get_workfile_build_presets(task_name): break return per_task_preset + + +def collect_last_version_repres(asset_entities): + """Collect subsets, versions and representations for asset_entities. + + :param asset_entities: Asset entities for which want to find data + :type asset_entities: list + :return: collected entities + :rtype: dict + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + if not asset_entities: + return {} + + asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} + + subsets = list(io.find({ + "type": "subset", + "parent": {"$in": asset_entity_by_ids.keys()} + })) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + sorted_versions = list(io.find({ + "type": "version", + "parent": {"$in": subset_entity_by_ids.keys()} + }).sort("name", -1)) + + subset_id_with_latest_version = [] + last_versions_by_id = {} + for version in sorted_versions: + subset_id = version["parent"] + if subset_id in subset_id_with_latest_version: + continue + subset_id_with_latest_version.append(subset_id) + last_versions_by_id[version["_id"]] = version + + repres = io.find({ + "type": "representation", + "parent": {"$in": last_versions_by_id.keys()} + }) + + output = {} + for repre in repres: + version_id = repre["parent"] + version = last_versions_by_id[version_id] + + subset_id = version["parent"] + subset = subset_entity_by_ids[subset_id] + + asset_id = subset["parent"] + asset = asset_entity_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset, + "version": { + "version_entity": version, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre + ) + + return output From a415e2255af906e940124eb8715fb253531690a0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:36:56 +0100 Subject: [PATCH 04/22] added get_link_assets, not yet implemented since we dont have the logic --- pype/lib.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 5cdcf82d4d..3e92349d10 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -672,6 +672,12 @@ def get_workfile_build_presets(task_name): return per_task_preset +def get_link_assets(asset_entity): + """Return linked assets for `asset_entity`.""" + # TODO implement + return [test_asset] + + def collect_last_version_repres(asset_entities): """Collect subsets, versions and representations for asset_entities. From 57f6f9b87d8280aedff63f0c3155fc897181ff27 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:37:47 +0100 Subject: [PATCH 05/22] implemented load_containers_by_asset_data which loads all containers for specific asset by entered workfile variants --- pype/lib.py | 209 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 209 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 3e92349d10..30c222ac57 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -672,6 +672,215 @@ def get_workfile_build_presets(task_name): return per_task_preset +def load_containers_by_asset_data( + asset_entity_data, workfile_presets, loaders_by_name +): + if not asset_entity_data or not workfile_presets or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + # Filter workfile presets by available loaders + valid_variants = [] + for variant in workfile_presets: + variant_loaders = variant.get("loaders") + if not variant_loaders: + log.warning(( + "Workfile variant has missing loaders configuration: {0}" + ).format(json.dumps(variant, indent=4))) + continue + + found = False + for loader_name in variant_loaders: + if loader_name in loaders_by_name: + valid_variants.append(variant) + found = True + break + + if not found: + log.warning( + "Loaders in Workfile variant are not available: {0}".format( + json.dumps(variant, indent=4) + ) + ) + + if not valid_variants: + log.warning("There are not valid Workfile variants. Skipping process.") + return + + log.debug("Valid Workfile variants: {}".format(valid_variants)) + + subsets = [] + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subsets.append(in_data["subset_entity"]) + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = version_data["repres"] + + if not subsets: + log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + subsets_by_family = collections.defaultdict(list) + for subset in subsets: + family = subset["data"].get("family") + if not family: + families = subset["data"].get("families") + if not families: + continue + family = families[0] + + subsets_by_family[family].append(subset) + + valid_subsets_by_id = {} + variants_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for variant in valid_variants: + # Family filtering + variant_families = variant.get("families") or [] + if not variant_families: + continue + + variant_families_low = [fam.lower() for fam in variant_families] + if family_low not in variant_families_low: + continue + + # Regex filtering (optional) + variant_regexes = variant.get("subset_filters") + for subset in subsets: + if variant_regexes: + valid = False + for pattern in variant_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + subset_id = subset["_id"] + valid_subsets_by_id[subset_id] = subset + variants_per_subset_id[subset_id] = variant + + # break variants loop if got here + break + + if not valid_subsets_by_id: + log.warning("There are not valid subsets.") + return + + log.debug("Valid subsets: {}".format(valid_subsets_by_id.values())) + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, subset_entity in valid_subsets_by_id.items(): + variant = variants_per_subset_id[subset_id] + variant_repre_names = variant.get("repre_names") + if not variant_repre_names: + continue + + # Lower names + variant_repre_names = [name.lower() for name in variant_repre_names] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in variant_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format(asset_entity["name"]) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = valid_subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format(subset["name"], subset_id) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + log.debug(msg) + + loaded_containers = { + "asset_entity": asset_entity, + "containers": [] + } + + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = valid_subsets_by_id[subset_id] + subset_name = subset["name"] + + variant = variants_per_subset_id[subset_id] + + variant_loader_names = variant["loaders"] + variant_loader_count = len(variant_loader_names) + + variant_repre_names = variant["repre_names"] + variant_repre_count = len(variant_repre_names) + + is_loaded = False + for repre_name_idx, variant_repre_name in enumerate( + variant_repre_names + ): + found_repre = None + for repre in repres: + repre_name = repre["name"] + if repre_name == variant_repre_name: + found_repre = repre + break + + if not found_repre: + continue + + for loader_idx, loader_name in enumerate(variant_loader_names): + if is_loaded: + break + + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = avalon.api.load( + loader, + found_repre["_id"], + name=subset_name + ) + loaded_containers["containers"].append(container) + is_loaded = True + + except Exception as exc: + if exc == pipeline.IncompatibleLoaderError: + log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) + + else: + log.error( + "Unexpected error happened during loading", + exc_info=True + ) + + msg = "Loading failed." + if loader_idx < (variant_loader_count - 1): + msg += " Trying next loader." + elif repre_name_idx < (variant_repre_count - 1): + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + log.info(msg) + + if is_loaded: + break + + return loaded_containers + + def get_link_assets(asset_entity): """Return linked assets for `asset_entity`.""" # TODO implement From 808510d856fb5ba4562630b9673dcff630b4b88f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:39:12 +0100 Subject: [PATCH 06/22] implemented load_containers_for_workfile which loads and trigger loading by current context --- pype/lib.py | 135 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 135 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 30c222ac57..f37da5096a 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -980,3 +980,138 @@ def collect_last_version_repres(asset_entities): ) return output + + +def load_containers_for_workfile(): + """Load containers for (first) workfile. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile variants from presets. Variants are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded representation is + returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + """ + io.install() + + # Get current asset name and entity + current_asset_name = io.Session["AVALON_ASSET"] + current_asset_entity = io.find_one({ + "type": "asset", + "name": current_asset_name + }) + + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return + + # Prepare available loaders + loaders_by_name = {} + for loader in avalon.api.discover(avalon.api.Loader): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError("Duplicated loader name {0}!".format(loader_name)) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + print("There are not registered loaders.") + return + + # Get current task name + current_task_name = os.environ["AVALON_TASK"] + current_task_name_low = current_task_name.lower() + # Load workfile presets for task + workfile_presets = get_workfile_build_presets(current_task_name_low) + + # Skip if there are any presets for task + if not workfile_presets: + log.warning( + "For current task `{}` is not set any loading preset.".format( + current_task_name + ) + ) + return + + # Get presets for loading current asset + current_context = workfile_presets.get("current_context") + # Get presets for loading linked assets + link_context = workfile_presets.get("linked_assets") + # Skip if both are missing + if not current_context and not link_context: + log.warning("Current task `{}` has empty loading preset.".format( + current_task_name + )) + return + + elif not current_context: + log.warning(( + "Current task `{}` don't have set loading preset for it's context." + ).format(current_task_name)) + + elif not link_context: + log.warning(( + "Current task `{}` don't have set " + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context: + # Find and append linked assets if preset has set linked mapping + link_assets = get_link_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are any assets + # - this may happend if only linked mapping is set and there are not links + if not assets: + log.warning("Asset does not have linked assets. Nothing to process.") + return + + # Prepare entities from database for assets + prepared_entities = collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + loaded_containers = [] + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = load_containers_by_asset_data( + current_context_data, current_context, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = load_containers_by_asset_data( + linked_asset_data, link_context, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers From 149a65f1e7f5c371c3f51bfc06932d05b6bcd60a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:39:36 +0100 Subject: [PATCH 07/22] implemented methods for creating first version workfile --- pype/lib.py | 123 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 123 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index f37da5096a..4e7d88947d 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1115,3 +1115,126 @@ def load_containers_for_workfile(): # Return list of loaded containers return loaded_containers + + +def get_last_workfile_path(root, template, file_ext): + template = re.sub("<.*?>", ".*?", template) + template = re.sub("{version.*}", "([0-9]+)", template) + template = re.sub("{comment.*?}", ".+?", template) + # template = pipeline._format_work_template(template) + template = "^" + template + "$" + + all_file_names = [] + if os.path.exists(root): + all_file_names = os.listdir(root) + + filtered_file_names = [ + file_name for file_name in all_file_names + if os.path.splitext(file_name)[1] == file_ext + ] + + kwargs = {} + if platform.system() == "Windows": + kwargs["flags"] = re.IGNORECASE + + version = None + last_file_name = None + for file_name in sorted(filtered_file_names): + match = re.match(template, file_name, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if file_version >= version: + last_file_name = file_name + version = file_version + 1 + + last_file_path = None + if last_file_name: + last_file_path = os.path.join(root, last_file_name) + + return last_file_path + + +def create_first_workfile(file_ext=None): + """Builds first workfile and load containers for it. + + :param file_ext: Work file extension may be specified otherwise first + extension in host's registered extensions is used. + :type file_ext: str + :return: Workfile path and loaded containers by Asset entity + :rtype: tuple + """ + # Get host + host = avalon.api.registered_host() + + # Workfile extension + if file_ext is None: + if not host.file_extensions(): + raise AssertionError( + "Host doesn't have set file extensions. Can't create workfile." + ) + file_ext = host.file_extensions()[0] + + workfile_root = host.work_root(io.Session) + + # make sure extension has dot + if not file_ext.startswith("."): + file_ext = ".{}".format(file_ext) + + # Create new workfile + project_doc = io.find_one({"type": "project"}) + asset_name = io.Session["AVALON_ASSET"] + asset_doc = io.find_one({ + "type": "asset", + "name": asset_name + }) + if not asset_doc: + raise AssertionError( + "Asset with name `{}` was not found.".format(asset_name) + ) + + root = avalon.api.registered_root() + template = project_doc["config"]["template"]["workfile"] + file_path = get_last_workfile_path(root, template, file_ext) + # TODO what should do if already exists? + # 1.) create new + # 2.) override + # 3.) raise exception + if file_path is not None: + log.warning("There already exist workfile `{}`.".format(file_path)) + return file_path + + hierarchy = "" + parents = asset_doc["data"].get("parents") + if parents: + hierarchy = "/".join(parents) + + # Use same data as Workfiles tool + template_data = { + "root": root, + "project": { + "name": project_doc["name"], + "code": project_doc["data"].get("code") + }, + "asset": asset_name, + "task": io.Session["AVALON_TASK"], + "hierarchy": hierarchy, + "version": 1, + "user": getpass.getuser(), + "ext": file_ext + } + + # Use same template as in Workfiles Tool + template_filled = pipeline.format_template_with_optional_keys( + template_data, template + ) + + # make sure filled template does not have more dots due to extension + while ".." in template_filled: + template_filled = template_filled.replace("..", ".") + + workfile_path = os.path.join(workfile_root, template_filled) + host.save_file(workfile_path) + + return workfile_path From d38df643a837ecc47c86362421ae85ee7c635e4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:39:57 +0100 Subject: [PATCH 08/22] added build first workfile (probably for testing) --- pype/lib.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pype/lib.py b/pype/lib.py index 4e7d88947d..0698e2bbba 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1238,3 +1238,12 @@ def create_first_workfile(file_ext=None): host.save_file(workfile_path) return workfile_path + + +def build_first_workfile(file_ext=None): + # DEBUG this should probably be host specific + # Create workfile + workfile_path = create_first_workfile(file_ext) + # Load containers + loaded_containers = load_containers_for_workfile() + return (workfile_path, loaded_containers) From 4428ccc04a286e9a4d3028ac90340ffe662c0cbc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:40:18 +0100 Subject: [PATCH 09/22] added build first workfile to maya menu --- pype/maya/menu.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/pype/maya/menu.py b/pype/maya/menu.py index 806944c117..efe338b4af 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -3,7 +3,9 @@ import os import logging from avalon.vendor.Qt import QtWidgets, QtCore, QtGui - +from avalon.maya import pipeline +import avalon.api +from ..lib import build_first_workfile import maya.cmds as cmds self = sys.modules[__name__] @@ -21,8 +23,15 @@ def _get_menu(): return menu - def deferred(): + def add_build_workfiles_item(): + # Add build first workfile + cmds.menuItem(divider=True, parent=pipeline._menu) + cmds.menuItem( + "Build First Workfile", + parent=pipeline._menu, + command=lambda *args: build_first_workfile() + ) log.info("Attempting to install scripts menu..") @@ -30,8 +39,11 @@ def deferred(): import scriptsmenu.launchformaya as launchformaya import scriptsmenu.scriptsmenu as scriptsmenu except ImportError: - log.warning("Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable.") + log.warning( + "Skipping studio.menu install, because " + "'scriptsmenu' module seems unavailable." + ) + add_build_workfiles_item() return # load configuration of custom menu @@ -39,15 +51,16 @@ def deferred(): config = scriptsmenu.load_configuration(config_path) # run the launcher for Maya menu - studio_menu = launchformaya.main(title=self._menu.title(), - objectName=self._menu) + studio_menu = launchformaya.main( + title=self._menu.title(), + objectName=self._menu + ) # apply configuration studio_menu.build_from_configuration(studio_menu, config) def uninstall(): - menu = _get_menu() if menu: log.info("Attempting to uninstall..") @@ -60,9 +73,8 @@ def uninstall(): def install(): - if cmds.about(batch=True): - print("Skipping pype.menu initialization in batch mode..") + log.info("Skipping pype.menu initialization in batch mode..") return uninstall() From c11092252f2b455fb4a7922d6661a46b08f9c74d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:45:55 +0100 Subject: [PATCH 10/22] fixed not existing variable --- pype/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index 0698e2bbba..d39902f5cf 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -884,7 +884,7 @@ def load_containers_by_asset_data( def get_link_assets(asset_entity): """Return linked assets for `asset_entity`.""" # TODO implement - return [test_asset] + return [] def collect_last_version_repres(asset_entities): From 7d0d0f4270c8dd0f0e62ad0a63033e3d4ea0dc75 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Mar 2020 17:48:07 +0100 Subject: [PATCH 11/22] renamed key `subset_filters` to `subset_name_filters` --- pype/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index d39902f5cf..0264653f0f 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -751,7 +751,7 @@ def load_containers_by_asset_data( continue # Regex filtering (optional) - variant_regexes = variant.get("subset_filters") + variant_regexes = variant.get("subset_name_filters") for subset in subsets: if variant_regexes: valid = False From 7d48a0322d3df9b7fdc8e8e14d774890c173b884 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 2 Apr 2020 16:54:33 +0200 Subject: [PATCH 12/22] function names and gramar --- pype/lib.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 5410c2eba1..78fe8b6c69 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -843,7 +843,7 @@ def load_containers_by_asset_data( log.debug("Valid subsets: {}".format(valid_subsets_by_id.values())) valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, subset_entity in valid_subsets_by_id.items(): + for subset_id, _subset_entity in valid_subsets_by_id.items(): variant = variants_per_subset_id[subset_id] variant_repre_names = variant.get("repre_names") if not variant_repre_names: @@ -889,7 +889,7 @@ def load_containers_by_asset_data( is_loaded = False for repre_name_idx, variant_repre_name in enumerate( - variant_repre_names + variant_repre_names ): found_repre = None for repre in repres: @@ -947,7 +947,7 @@ def load_containers_by_asset_data( return loaded_containers -def get_link_assets(asset_entity): +def get_linked_assets(asset_entity): """Return linked assets for `asset_entity`.""" # TODO implement return [] @@ -1048,7 +1048,7 @@ def collect_last_version_repres(asset_entities): return output -def load_containers_for_workfile(): +def load_containers_to_workfile(): """Load containers for (first) workfile. Loads latest versions of current and linked assets to workfile by logic @@ -1097,7 +1097,7 @@ def load_containers_for_workfile(): # Skip if there are any loaders if not loaders_by_name: - print("There are not registered loaders.") + print("There are no registered loaders.") return # Get current task name @@ -1109,7 +1109,7 @@ def load_containers_for_workfile(): # Skip if there are any presets for task if not workfile_presets: log.warning( - "For current task `{}` is not set any loading preset.".format( + "Current task `{}` does not have any loading preset.".format( current_task_name ) ) @@ -1128,12 +1128,12 @@ def load_containers_for_workfile(): elif not current_context: log.warning(( - "Current task `{}` don't have set loading preset for it's context." + "Current task `{}` doesn't have any loading preset for it's context." ).format(current_task_name)) elif not link_context: log.warning(( - "Current task `{}` don't have set " + "Current task `{}` doesn't have any" "loading preset for it's linked assets." ).format(current_task_name)) @@ -1147,7 +1147,7 @@ def load_containers_for_workfile(): if link_context: # Find and append linked assets if preset has set linked mapping - link_assets = get_link_assets(current_asset_entity) + link_assets = get_linked_assets(current_asset_entity) if link_assets: assets.extend(link_assets) @@ -1268,7 +1268,7 @@ def create_first_workfile(file_ext=None): # 2.) override # 3.) raise exception if file_path is not None: - log.warning("There already exist workfile `{}`.".format(file_path)) + log.warning("Workfile already exists`{}`.".format(file_path)) return file_path hierarchy = "" @@ -1311,5 +1311,5 @@ def build_first_workfile(file_ext=None): # Create workfile workfile_path = create_first_workfile(file_ext) # Load containers - loaded_containers = load_containers_for_workfile() + loaded_containers = load_containers_to_workfile() return (workfile_path, loaded_containers) From fe5733771a6418f9bc47e03fe5055e21f71b3e26 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 3 Apr 2020 09:53:25 +0200 Subject: [PATCH 13/22] Update pype/lib.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit precompile regex Co-Authored-By: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- pype/lib.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index 78fe8b6c69..108351ee1b 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1205,8 +1205,9 @@ def get_last_workfile_path(root, template, file_ext): version = None last_file_name = None + re_template = re.compile(template) for file_name in sorted(filtered_file_names): - match = re.match(template, file_name, **kwargs) + match = re.match(re_template, file_name, **kwargs) if not match: continue From d8d5ccb2f0544f6fa83108d3fe0643ba87496c78 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 3 Apr 2020 10:15:04 +0200 Subject: [PATCH 14/22] PR comments based changes --- pype/lib.py | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 108351ee1b..e863fcac77 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -711,7 +711,17 @@ class PypeHook: pass -def get_workfile_build_presets(task_name): +def get_workfile_build_preset(task_name): + """ Returns preset variant to build workfile for task name. + + Presets are loaded for current project set in io.Session["AVALON_PROJECT"], + filtered by registered host and entered task name. + + :param task_name: Task name used for filtering build presets. + :type task_name: str + :return: preset per eneter task + :rtype: dict | None + """ host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1] presets = config.get_presets(io.Session["AVALON_PROJECT"]) # Get presets for host @@ -739,15 +749,15 @@ def get_workfile_build_presets(task_name): def load_containers_by_asset_data( - asset_entity_data, workfile_presets, loaders_by_name + asset_entity_data, workfile_preset, loaders_by_name ): - if not asset_entity_data or not workfile_presets or not loaders_by_name: + if not asset_entity_data or not workfile_preset or not loaders_by_name: return asset_entity = asset_entity_data["asset_entity"] # Filter workfile presets by available loaders valid_variants = [] - for variant in workfile_presets: + for variant in workfile_preset: variant_loaders = variant.get("loaders") if not variant_loaders: log.warning(( @@ -755,14 +765,15 @@ def load_containers_by_asset_data( ).format(json.dumps(variant, indent=4))) continue - found = False + valid_variant = None for loader_name in variant_loaders: if loader_name in loaders_by_name: - valid_variants.append(variant) - found = True + valid_variant = variant break - if not found: + if valid_variant: + valid_variants.append(valid_variant) + else: log.warning( "Loaders in Workfile variant are not available: {0}".format( json.dumps(variant, indent=4) @@ -833,7 +844,7 @@ def load_containers_by_asset_data( valid_subsets_by_id[subset_id] = subset variants_per_subset_id[subset_id] = variant - # break variants loop if got here + # break variants loop on finding the first matching variant break if not valid_subsets_by_id: @@ -1071,8 +1082,6 @@ def load_containers_to_workfile(): ... }] """ - io.install() - # Get current asset name and entity current_asset_name = io.Session["AVALON_ASSET"] current_asset_entity = io.find_one({ @@ -1101,13 +1110,13 @@ def load_containers_to_workfile(): return # Get current task name - current_task_name = os.environ["AVALON_TASK"] + current_task_name = io.Session["AVALON_TASK"] current_task_name_low = current_task_name.lower() # Load workfile presets for task - workfile_presets = get_workfile_build_presets(current_task_name_low) + workfile_preset = get_workfile_build_preset(current_task_name_low) # Skip if there are any presets for task - if not workfile_presets: + if not workfile_preset: log.warning( "Current task `{}` does not have any loading preset.".format( current_task_name @@ -1116,9 +1125,9 @@ def load_containers_to_workfile(): return # Get presets for loading current asset - current_context = workfile_presets.get("current_context") + current_context = workfile_preset.get("current_context") # Get presets for loading linked assets - link_context = workfile_presets.get("linked_assets") + link_context = workfile_preset.get("linked_assets") # Skip if both are missing if not current_context and not link_context: log.warning("Current task `{}` has empty loading preset.".format( From a8d1fc9cc67c4955a71866b97ab6fc70251e7f70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 3 Apr 2020 10:32:55 +0200 Subject: [PATCH 15/22] variables shuffle --- pype/lib.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index e863fcac77..67c5e82611 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -887,20 +887,15 @@ def load_containers_by_asset_data( } for subset_id, repres in valid_repres_by_subset_id.items(): - subset = valid_subsets_by_id[subset_id] - subset_name = subset["name"] + subset_name = valid_subsets_by_id[subset_id]["name"] variant = variants_per_subset_id[subset_id] - - variant_loader_names = variant["loaders"] - variant_loader_count = len(variant_loader_names) - - variant_repre_names = variant["repre_names"] - variant_repre_count = len(variant_repre_names) + loaders_last_idx = len(variant["loaders"]) - 1 + repre_names_last_idx = len(variant["repre_names"]) - 1 is_loaded = False for repre_name_idx, variant_repre_name in enumerate( - variant_repre_names + variant["repre_names"] ): found_repre = None for repre in repres: @@ -912,7 +907,7 @@ def load_containers_by_asset_data( if not found_repre: continue - for loader_idx, loader_name in enumerate(variant_loader_names): + for loader_idx, loader_name in enumerate(variant["loaders"]): if is_loaded: break @@ -942,9 +937,9 @@ def load_containers_by_asset_data( ) msg = "Loading failed." - if loader_idx < (variant_loader_count - 1): + if loader_idx < loaders_last_idx: msg += " Trying next loader." - elif repre_name_idx < (variant_repre_count - 1): + elif repre_name_idx < repre_names_last_idx: msg += ( " Loading of subset `{}` was not successful." ).format(subset_name) @@ -1106,7 +1101,7 @@ def load_containers_to_workfile(): # Skip if there are any loaders if not loaders_by_name: - print("There are no registered loaders.") + log.warning("There are no registered loaders.") return # Get current task name @@ -1137,7 +1132,8 @@ def load_containers_to_workfile(): elif not current_context: log.warning(( - "Current task `{}` doesn't have any loading preset for it's context." + "Current task `{}` doesn't have any loading" + " preset for it's context." ).format(current_task_name)) elif not link_context: @@ -1160,8 +1156,8 @@ def load_containers_to_workfile(): if link_assets: assets.extend(link_assets) - # Skip if there are any assets - # - this may happend if only linked mapping is set and there are not links + # Skip if there are no assets. This can happen if only linked mapping is + # set and there are no links for his asset. if not assets: log.warning("Asset does not have linked assets. Nothing to process.") return From 0c99fc849d131e01a4cf4f25c22414bdd2f1378c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Apr 2020 10:04:05 +0200 Subject: [PATCH 16/22] renamed `load_containers_to_workfile` to `build_workfile` and removed `create_first_workfile` --- pype/lib.py | 97 +---------------------------------------------- pype/maya/menu.py | 7 ++-- 2 files changed, 5 insertions(+), 99 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 67c5e82611..d38e76a5ee 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1054,8 +1054,8 @@ def collect_last_version_repres(asset_entities): return output -def load_containers_to_workfile(): - """Load containers for (first) workfile. +def build_workfile(): + """Load representations for current context as containers into workfile. Loads latest versions of current and linked assets to workfile by logic stored in Workfile variants from presets. Variants are set by host, @@ -1226,96 +1226,3 @@ def get_last_workfile_path(root, template, file_ext): last_file_path = os.path.join(root, last_file_name) return last_file_path - - -def create_first_workfile(file_ext=None): - """Builds first workfile and load containers for it. - - :param file_ext: Work file extension may be specified otherwise first - extension in host's registered extensions is used. - :type file_ext: str - :return: Workfile path and loaded containers by Asset entity - :rtype: tuple - """ - # Get host - host = avalon.api.registered_host() - - # Workfile extension - if file_ext is None: - if not host.file_extensions(): - raise AssertionError( - "Host doesn't have set file extensions. Can't create workfile." - ) - file_ext = host.file_extensions()[0] - - workfile_root = host.work_root(io.Session) - - # make sure extension has dot - if not file_ext.startswith("."): - file_ext = ".{}".format(file_ext) - - # Create new workfile - project_doc = io.find_one({"type": "project"}) - asset_name = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ - "type": "asset", - "name": asset_name - }) - if not asset_doc: - raise AssertionError( - "Asset with name `{}` was not found.".format(asset_name) - ) - - root = avalon.api.registered_root() - template = project_doc["config"]["template"]["workfile"] - file_path = get_last_workfile_path(root, template, file_ext) - # TODO what should do if already exists? - # 1.) create new - # 2.) override - # 3.) raise exception - if file_path is not None: - log.warning("Workfile already exists`{}`.".format(file_path)) - return file_path - - hierarchy = "" - parents = asset_doc["data"].get("parents") - if parents: - hierarchy = "/".join(parents) - - # Use same data as Workfiles tool - template_data = { - "root": root, - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "asset": asset_name, - "task": io.Session["AVALON_TASK"], - "hierarchy": hierarchy, - "version": 1, - "user": getpass.getuser(), - "ext": file_ext - } - - # Use same template as in Workfiles Tool - template_filled = pipeline.format_template_with_optional_keys( - template_data, template - ) - - # make sure filled template does not have more dots due to extension - while ".." in template_filled: - template_filled = template_filled.replace("..", ".") - - workfile_path = os.path.join(workfile_root, template_filled) - host.save_file(workfile_path) - - return workfile_path - - -def build_first_workfile(file_ext=None): - # DEBUG this should probably be host specific - # Create workfile - workfile_path = create_first_workfile(file_ext) - # Load containers - loaded_containers = load_containers_to_workfile() - return (workfile_path, loaded_containers) diff --git a/pype/maya/menu.py b/pype/maya/menu.py index efe338b4af..c25166f210 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -2,10 +2,9 @@ import sys import os import logging -from avalon.vendor.Qt import QtWidgets, QtCore, QtGui +from avalon.vendor.Qt import QtWidgets, QtGui from avalon.maya import pipeline -import avalon.api -from ..lib import build_first_workfile +from ..lib import build_workfile import maya.cmds as cmds self = sys.modules[__name__] @@ -30,7 +29,7 @@ def deferred(): cmds.menuItem( "Build First Workfile", parent=pipeline._menu, - command=lambda *args: build_first_workfile() + command=lambda *args: build_workfile() ) log.info("Attempting to install scripts menu..") From c002890772e0e95a9e27c108f8d5758368bfc73a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Apr 2020 10:27:47 +0200 Subject: [PATCH 17/22] name `variant` changed to `profile` --- pype/lib.py | 95 ++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 48 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index d38e76a5ee..11491fe250 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -4,7 +4,6 @@ import types import re import uuid import json -import getpass import collections import logging import itertools @@ -712,7 +711,7 @@ class PypeHook: def get_workfile_build_preset(task_name): - """ Returns preset variant to build workfile for task name. + """ Returns preset profile to build workfile for task name. Presets are loaded for current project set in io.Session["AVALON_PROJECT"], filtered by registered host and entered task name. @@ -733,16 +732,16 @@ def get_workfile_build_preset(task_name): task_name_low = task_name.lower() per_task_preset = None - for variant in workfile_presets: - variant_tasks = variant.get("tasks") - if not variant_tasks: + for profile in workfile_presets: + profile_tasks = profile.get("tasks") + if not profile_tasks: continue - variant_tasks_low = [task.lower() for task in variant_tasks] - if task_name_low not in variant_tasks_low: + profile_tasks_low = [task.lower() for task in profile_tasks] + if task_name_low not in profile_tasks_low: continue - per_task_preset = variant + per_task_preset = profile break return per_task_preset @@ -756,35 +755,35 @@ def load_containers_by_asset_data( asset_entity = asset_entity_data["asset_entity"] # Filter workfile presets by available loaders - valid_variants = [] - for variant in workfile_preset: - variant_loaders = variant.get("loaders") - if not variant_loaders: + valid_profiles = [] + for profile in workfile_preset: + profile_loaders = profile.get("loaders") + if not profile_loaders: log.warning(( - "Workfile variant has missing loaders configuration: {0}" - ).format(json.dumps(variant, indent=4))) + "Workfile profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) continue - valid_variant = None - for loader_name in variant_loaders: + valid_profile = None + for loader_name in profile_loaders: if loader_name in loaders_by_name: - valid_variant = variant + valid_profile = profile break - if valid_variant: - valid_variants.append(valid_variant) + if valid_profile: + valid_profiles.append(valid_profile) else: log.warning( - "Loaders in Workfile variant are not available: {0}".format( - json.dumps(variant, indent=4) + "Loaders in Workfile profile are not available: {0}".format( + json.dumps(profile, indent=4) ) ) - if not valid_variants: - log.warning("There are not valid Workfile variants. Skipping process.") + if not valid_profiles: + log.warning("There are not valid Workfile profiles. Skipping process.") return - log.debug("Valid Workfile variants: {}".format(valid_variants)) + log.debug("Valid Workfile profiles: {}".format(valid_profiles)) subsets = [] version_by_subset_id = {} @@ -814,25 +813,25 @@ def load_containers_by_asset_data( subsets_by_family[family].append(subset) valid_subsets_by_id = {} - variants_per_subset_id = {} + profiles_per_subset_id = {} for family, subsets in subsets_by_family.items(): family_low = family.lower() - for variant in valid_variants: + for profile in valid_profiles: # Family filtering - variant_families = variant.get("families") or [] - if not variant_families: + profile_families = profile.get("families") or [] + if not profile_families: continue - variant_families_low = [fam.lower() for fam in variant_families] - if family_low not in variant_families_low: + profile_families_low = [fam.lower() for fam in profile_families] + if family_low not in profile_families_low: continue # Regex filtering (optional) - variant_regexes = variant.get("subset_name_filters") + profile_regexes = profile.get("subset_name_filters") for subset in subsets: - if variant_regexes: + if profile_regexes: valid = False - for pattern in variant_regexes: + for pattern in profile_regexes: if re.match(pattern, subset["name"]): valid = True break @@ -842,9 +841,9 @@ def load_containers_by_asset_data( subset_id = subset["_id"] valid_subsets_by_id[subset_id] = subset - variants_per_subset_id[subset_id] = variant + profiles_per_subset_id[subset_id] = profile - # break variants loop on finding the first matching variant + # break profiles loop on finding the first matching profile break if not valid_subsets_by_id: @@ -855,20 +854,20 @@ def load_containers_by_asset_data( valid_repres_by_subset_id = collections.defaultdict(list) for subset_id, _subset_entity in valid_subsets_by_id.items(): - variant = variants_per_subset_id[subset_id] - variant_repre_names = variant.get("repre_names") - if not variant_repre_names: + profile = profiles_per_subset_id[subset_id] + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: continue # Lower names - variant_repre_names = [name.lower() for name in variant_repre_names] + profile_repre_names = [name.lower() for name in profile_repre_names] version_entity = version_by_subset_id[subset_id] version_id = version_entity["_id"] repres = repres_by_version_id[version_id] for repre in repres: repre_name_low = repre["name"].lower() - if repre_name_low in variant_repre_names: + if repre_name_low in profile_repre_names: valid_repres_by_subset_id[subset_id].append(repre) # DEBUG message @@ -889,25 +888,25 @@ def load_containers_by_asset_data( for subset_id, repres in valid_repres_by_subset_id.items(): subset_name = valid_subsets_by_id[subset_id]["name"] - variant = variants_per_subset_id[subset_id] - loaders_last_idx = len(variant["loaders"]) - 1 - repre_names_last_idx = len(variant["repre_names"]) - 1 + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names"]) - 1 is_loaded = False - for repre_name_idx, variant_repre_name in enumerate( - variant["repre_names"] + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names"] ): found_repre = None for repre in repres: repre_name = repre["name"] - if repre_name == variant_repre_name: + if repre_name == profile_repre_name: found_repre = repre break if not found_repre: continue - for loader_idx, loader_name in enumerate(variant["loaders"]): + for loader_idx, loader_name in enumerate(profile["loaders"]): if is_loaded: break @@ -1058,7 +1057,7 @@ def build_workfile(): """Load representations for current context as containers into workfile. Loads latest versions of current and linked assets to workfile by logic - stored in Workfile variants from presets. Variants are set by host, + stored in Workfile profiles from presets. Variants are set by host, filtered by current task name and used by families. Each family can specify representation names and loaders for From 29d63e4f179d5a7fe19952fbac5c2331b73833b0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Apr 2020 12:05:01 +0200 Subject: [PATCH 18/22] build workile methods wrapped into one class and methods were split or reorganized a little bit --- pype/lib.py | 977 ++++++++++++++++++++++++---------------------- pype/maya/menu.py | 4 +- 2 files changed, 512 insertions(+), 469 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 11491fe250..48d9cb5965 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -710,97 +710,13 @@ class PypeHook: pass -def get_workfile_build_preset(task_name): - """ Returns preset profile to build workfile for task name. - - Presets are loaded for current project set in io.Session["AVALON_PROJECT"], - filtered by registered host and entered task name. - - :param task_name: Task name used for filtering build presets. - :type task_name: str - :return: preset per eneter task - :rtype: dict | None - """ - host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1] - presets = config.get_presets(io.Session["AVALON_PROJECT"]) - # Get presets for host - workfile_presets = presets["plugins"].get(host_name, {}).get( - "workfile_build" - ) - if not workfile_presets: - return - - task_name_low = task_name.lower() - per_task_preset = None - for profile in workfile_presets: - profile_tasks = profile.get("tasks") - if not profile_tasks: - continue - - profile_tasks_low = [task.lower() for task in profile_tasks] - if task_name_low not in profile_tasks_low: - continue - - per_task_preset = profile - break - - return per_task_preset +def get_linked_assets(asset_entity): + """Return linked assets for `asset_entity`.""" + # TODO implement + return [] -def load_containers_by_asset_data( - asset_entity_data, workfile_preset, loaders_by_name -): - if not asset_entity_data or not workfile_preset or not loaders_by_name: - return - - asset_entity = asset_entity_data["asset_entity"] - # Filter workfile presets by available loaders - valid_profiles = [] - for profile in workfile_preset: - profile_loaders = profile.get("loaders") - if not profile_loaders: - log.warning(( - "Workfile profile has missing loaders configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - valid_profile = None - for loader_name in profile_loaders: - if loader_name in loaders_by_name: - valid_profile = profile - break - - if valid_profile: - valid_profiles.append(valid_profile) - else: - log.warning( - "Loaders in Workfile profile are not available: {0}".format( - json.dumps(profile, indent=4) - ) - ) - - if not valid_profiles: - log.warning("There are not valid Workfile profiles. Skipping process.") - return - - log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - - subsets = [] - version_by_subset_id = {} - repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subsets.append(in_data["subset_entity"]) - version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = version_data["repres"] - - if not subsets: - log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] - )) - return - +def map_subsets_by_family(subsets): subsets_by_family = collections.defaultdict(list) for subset in subsets: family = subset["data"].get("family") @@ -811,417 +727,544 @@ def load_containers_by_asset_data( family = families[0] subsets_by_family[family].append(subset) + return subsets_by_family - valid_subsets_by_id = {} - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() - for profile in valid_profiles: - # Family filtering - profile_families = profile.get("families") or [] - if not profile_families: + +class BuildWorkfile: + """Wrapper for build workfile process. + + Load representations for current context by build presets. Build presets + are host related, since each host has it's loaders. + """ + + @classmethod + def build_workfile(cls): + """Main method of this wrapper. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile profiles from presets. Profiles are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded + representation is returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + """ + # Get current asset name and entity + current_asset_name = io.Session["AVALON_ASSET"] + current_asset_entity = io.find_one({ + "type": "asset", + "name": current_asset_name + }) + + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return + + # Prepare available loaders + loaders_by_name = {} + for loader in avalon.api.discover(avalon.api.Loader): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {0}!".format(loader_name) + ) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + log.warning("There are no registered loaders.") + return + + # Get current task name + current_task_name = io.Session["AVALON_TASK"] + + # Load workfile presets for task + build_presets = cls.get_build_presets(current_task_name) + + # Skip if there are any presets for task + if not build_presets: + log.warning( + "Current task `{}` does not have any loading preset.".format( + current_task_name + ) + ) + return + + # Get presets for loading current asset + current_context_profiles = build_presets.get("current_context") + # Get presets for loading linked assets + link_context_profiles = build_presets.get("linked_assets") + # Skip if both are missing + if not current_context_profiles and not link_context_profiles: + log.warning("Current task `{}` has empty loading preset.".format( + current_task_name + )) + return + + elif not current_context_profiles: + log.warning(( + "Current task `{}` doesn't have any loading" + " preset for it's context." + ).format(current_task_name)) + + elif not link_context_profiles: + log.warning(( + "Current task `{}` doesn't have any" + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context_profiles: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context_profiles: + # Find and append linked assets if preset has set linked mapping + link_assets = get_linked_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are no assets. This can happen if only linked mapping + # is set and there are no links for his asset. + if not assets: + log.warning( + "Asset does not have linked assets. Nothing to process." + ) + return + + # Prepare entities from database for assets + prepared_entities = cls._collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + loaded_containers = [] + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = cls.load_containers_by_asset_data( + current_context_data, current_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = cls.load_containers_by_asset_data( + linked_asset_data, link_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers + + @classmethod + def get_build_presets(cls, task_name): + """ Returns presets to build workfile for task name. + + Presets are loaded for current project set in + io.Session["AVALON_PROJECT"], filtered by registered host + and entered task name. + + :param task_name: Task name used for filtering build presets. + :type task_name: str + :return: preset per eneter task + :rtype: dict | None + """ + host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1] + presets = config.get_presets(io.Session["AVALON_PROJECT"]) + # Get presets for host + build_presets = ( + presets["plugins"] + .get(host_name, {}) + .get("workfile_build") + ) + if not build_presets: + return + + task_name_low = task_name.lower() + per_task_preset = None + for preset in build_presets: + preset_tasks = preset.get("tasks") or [] + preset_tasks_low = [task.lower() for task in preset_tasks] + if task_name_low in preset_tasks_low: + per_task_preset = preset + break + + return per_task_preset + + @classmethod + def _filter_workfile_profiles(cls, workfile_profiles, loaders_by_name): + """ Filter and prepare workfile presets by available loaders. + + Valid profile must have "loaders", "families" and "repre_names" keys + with valid values. + - "loaders" expects list of strings representing possible loaders. + - "families" expects list of strings for filtering + by main subset family. + - "repre_names" expects list of strings for filtering by + representation name. + + Lowered "families" and "repre_names" are prepared for each profile with + all required keys. + """ + valid_profiles = [] + for profile in workfile_profiles: + # Check loaders + profile_loaders = profile.get("loaders") + if not profile_loaders: + log.warning(( + "Workfile profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) continue - profile_families_low = [fam.lower() for fam in profile_families] - if family_low not in profile_families_low: - continue - - # Regex filtering (optional) - profile_regexes = profile.get("subset_name_filters") - for subset in subsets: - if profile_regexes: - valid = False - for pattern in profile_regexes: - if re.match(pattern, subset["name"]): - valid = True - break - - if not valid: - continue - - subset_id = subset["_id"] - valid_subsets_by_id[subset_id] = subset - profiles_per_subset_id[subset_id] = profile - - # break profiles loop on finding the first matching profile - break - - if not valid_subsets_by_id: - log.warning("There are not valid subsets.") - return - - log.debug("Valid subsets: {}".format(valid_subsets_by_id.values())) - - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, _subset_entity in valid_subsets_by_id.items(): - profile = profiles_per_subset_id[subset_id] - profile_repre_names = profile.get("repre_names") - if not profile_repre_names: - continue - - # Lower names - profile_repre_names = [name.lower() for name in profile_repre_names] - - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] - repres = repres_by_version_id[version_id] - for repre in repres: - repre_name_low = repre["name"].lower() - if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) - - # DEBUG message - msg = "Valid representations for Asset: `{}`".format(asset_entity["name"]) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = valid_subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format(subset["name"], subset_id) - for repre in repres: - msg += "\n## Repre name: `{}`".format(repre["name"]) - - log.debug(msg) - - loaded_containers = { - "asset_entity": asset_entity, - "containers": [] - } - - for subset_id, repres in valid_repres_by_subset_id.items(): - subset_name = valid_subsets_by_id[subset_id]["name"] - - profile = profiles_per_subset_id[subset_id] - loaders_last_idx = len(profile["loaders"]) - 1 - repre_names_last_idx = len(profile["repre_names"]) - 1 - - is_loaded = False - for repre_name_idx, profile_repre_name in enumerate( - profile["repre_names"] - ): - found_repre = None - for repre in repres: - repre_name = repre["name"] - if repre_name == profile_repre_name: - found_repre = repre + # Check if any loader is available + loaders_match = False + for loader_name in profile_loaders: + if loader_name in loaders_by_name: + loaders_match = True break - if not found_repre: + if not loaders_match: + log.warning(( + "All loaders from Workfile profile are not available: {0}" + ).format(json.dumps(profile, indent=4))) continue - for loader_idx, loader_name in enumerate(profile["loaders"]): + # Check families + profile_families = profile.get("families") + if not profile_families: + log.warning(( + "Workfile profile is missing families configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check representation names + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: + log.warning(( + "Workfile profile is missing" + " representation names filtering: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Prepare lowered families and representation names + profile["families_lowered"] = [ + fam.lower() for fam in profile_families + ] + profile["repre_names_lowered"] = [ + name.lower() for name in profile_repre_names + ] + + valid_profiles.append(profile) + + return valid_profiles + + @classmethod + def _prepare_profile_for_subsets(cls, subsets, profiles): + # Prepare subsets + subsets_by_family = map_subsets_by_family(subsets) + + profiles_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for profile in profiles: + # Skip profile if does not contain family + if family_low not in profile["families_lowered"]: + continue + + # Precompile name filters as regexes + profile_regexes = profile.get("subset_name_filters") + if profile_regexes: + _profile_regexes = [] + for regex in profile_regexes: + _profile_regexes.append(re.compile(regex)) + profile_regexes = _profile_regexes + + # TODO prepare regex compilation + for subset in subsets: + # Verify regex filtering (optional) + if profile_regexes: + valid = False + for pattern in profile_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + profiles_per_subset_id[subset["_id"]] = profile + + # break profiles loop on finding the first matching profile + break + return profiles_per_subset_id + + @classmethod + def load_containers_by_asset_data( + cls, asset_entity_data, build_profiles, loaders_by_name + ): + # Make sure all data are not empty + if not asset_entity_data or not build_profiles or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + + valid_profiles = cls._filter_workfile_profiles( + build_profiles, loaders_by_name + ) + if not valid_profiles: + log.warning( + "There are not valid Workfile profiles. Skipping process." + ) + return + + log.debug("Valid Workfile profiles: {}".format(valid_profiles)) + + subsets_by_id = {} + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subset_entity = in_data["subset_entity"] + subsets_by_id[subset_entity["_id"]] = subset_entity + + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = ( + version_data["repres"] + ) + + if not subsets_by_id: + log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + profiles_per_subset_id = cls._prepare_profile_for_subsets( + subsets_by_id.values(), valid_profiles + ) + if not profiles_per_subset_id: + log.warning("There are not valid subsets.") + return + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, profile in profiles_per_subset_id.items(): + profile_repre_names = profile["repre_names_lowered"] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in profile_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format( + asset_entity["name"] + ) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format( + subset["name"], subset_id + ) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + log.debug(msg) + + containers = cls._load_containers( + valid_repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ) + + return { + "asset_entity": asset_entity, + "containers": containers + } + + @classmethod + def _load_containers( + cls, repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ): + loaded_containers = [] + for subset_id, repres in repres_by_subset_id.items(): + subset_name = subsets_by_id[subset_id]["name"] + + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + + is_loaded = False + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names_lowered"] + ): + # Break iteration if representation was already loaded if is_loaded: break - loader = loaders_by_name.get(loader_name) - if not loader: + found_repre = None + for repre in repres: + repre_name = repre["name"].lower() + if repre_name == profile_repre_name: + found_repre = repre + break + + if not found_repre: continue - try: - container = avalon.api.load( - loader, - found_repre["_id"], - name=subset_name - ) - loaded_containers["containers"].append(container) - is_loaded = True - except Exception as exc: - if exc == pipeline.IncompatibleLoaderError: - log.info(( - "Loader `{}` is not compatible with" - " representation `{}`" - ).format(loader_name, repre["name"])) + for loader_idx, loader_name in enumerate(profile["loaders"]): + if is_loaded: + break - else: - log.error( - "Unexpected error happened during loading", - exc_info=True + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = avalon.api.load( + loader, + found_repre["_id"], + name=subset_name ) + loaded_containers.append(container) + is_loaded = True - msg = "Loading failed." - if loader_idx < loaders_last_idx: - msg += " Trying next loader." - elif repre_name_idx < repre_names_last_idx: - msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) - else: - msg += " Trying next representation." - log.info(msg) + except Exception as exc: + if exc == pipeline.IncompatibleLoaderError: + log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) - if is_loaded: - break + else: + log.error( + "Unexpected error happened during loading", + exc_info=True + ) - return loaded_containers + msg = "Loading failed." + if loader_idx < loaders_last_idx: + msg += " Trying next loader." + elif repre_name_idx < repre_names_last_idx: + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + log.info(msg) + return loaded_containers -def get_linked_assets(asset_entity): - """Return linked assets for `asset_entity`.""" - # TODO implement - return [] + @classmethod + def _collect_last_version_repres(cls, asset_entities): + """Collect subsets, versions and representations for asset_entities. + :param asset_entities: Asset entities for which want to find data + :type asset_entities: list + :return: collected entities + :rtype: dict -def collect_last_version_repres(asset_entities): - """Collect subsets, versions and representations for asset_entities. - - :param asset_entities: Asset entities for which want to find data - :type asset_entities: list - :return: collected entities - :rtype: dict - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - if not asset_entities: - return {} - - asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} - - subsets = list(io.find({ - "type": "subset", - "parent": {"$in": asset_entity_by_ids.keys()} - })) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - sorted_versions = list(io.find({ - "type": "version", - "parent": {"$in": subset_entity_by_ids.keys()} - }).sort("name", -1)) - - subset_id_with_latest_version = [] - last_versions_by_id = {} - for version in sorted_versions: - subset_id = version["parent"] - if subset_id in subset_id_with_latest_version: - continue - subset_id_with_latest_version.append(subset_id) - last_versions_by_id[version["_id"]] = version - - repres = io.find({ - "type": "representation", - "parent": {"$in": last_versions_by_id.keys()} - }) - - output = {} - for repre in repres: - version_id = repre["parent"] - version = last_versions_by_id[version_id] - - subset_id = version["parent"] - subset = subset_entity_by_ids[subset_id] - - asset_id = subset["parent"] - asset = asset_entity_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset, - "version": { - "version_entity": version, - "repres": [] + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... } - } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre - ) + if not asset_entities: + return {} - return output + asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} + subsets = list(io.find({ + "type": "subset", + "parent": {"$in": asset_entity_by_ids.keys()} + })) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} -def build_workfile(): - """Load representations for current context as containers into workfile. + sorted_versions = list(io.find({ + "type": "version", + "parent": {"$in": subset_entity_by_ids.keys()} + }).sort("name", -1)) - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Variants are set by host, - filtered by current task name and used by families. + subset_id_with_latest_version = [] + last_versions_by_id = {} + for version in sorted_versions: + subset_id = version["parent"] + if subset_id in subset_id_with_latest_version: + continue + subset_id_with_latest_version.append(subset_id) + last_versions_by_id[version["_id"]] = version - Each family can specify representation names and loaders for - representations and first available and successful loaded representation is - returned as container. + repres = io.find({ + "type": "representation", + "parent": {"$in": last_versions_by_id.keys()} + }) - At the end you'll get list of loaded containers per each asset. + output = {} + for repre in repres: + version_id = repre["parent"] + version = last_versions_by_id[version_id] - loaded_containers [{ - "asset_entity": , - "containers": [, , ...] - }, { - "asset_entity": , - "containers": [, ...] - }, { - ... - }] - """ - # Get current asset name and entity - current_asset_name = io.Session["AVALON_ASSET"] - current_asset_entity = io.find_one({ - "type": "asset", - "name": current_asset_name - }) + subset_id = version["parent"] + subset = subset_entity_by_ids[subset_id] - # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name - )) - return + asset_id = subset["parent"] + asset = asset_entity_by_ids[asset_id] - # Prepare available loaders - loaders_by_name = {} - for loader in avalon.api.discover(avalon.api.Loader): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError("Duplicated loader name {0}!".format(loader_name)) - loaders_by_name[loader_name] = loader + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset, + "subsets": {} + } - # Skip if there are any loaders - if not loaders_by_name: - log.warning("There are no registered loaders.") - return + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset, + "version": { + "version_entity": version, + "repres": [] + } + } - # Get current task name - current_task_name = io.Session["AVALON_TASK"] - current_task_name_low = current_task_name.lower() - # Load workfile presets for task - workfile_preset = get_workfile_build_preset(current_task_name_low) - - # Skip if there are any presets for task - if not workfile_preset: - log.warning( - "Current task `{}` does not have any loading preset.".format( - current_task_name + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre ) - ) - return - # Get presets for loading current asset - current_context = workfile_preset.get("current_context") - # Get presets for loading linked assets - link_context = workfile_preset.get("linked_assets") - # Skip if both are missing - if not current_context and not link_context: - log.warning("Current task `{}` has empty loading preset.".format( - current_task_name - )) - return - - elif not current_context: - log.warning(( - "Current task `{}` doesn't have any loading" - " preset for it's context." - ).format(current_task_name)) - - elif not link_context: - log.warning(( - "Current task `{}` doesn't have any" - "loading preset for it's linked assets." - ).format(current_task_name)) - - # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None - if current_context: - # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] - - if link_context: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(current_asset_entity) - if link_assets: - assets.extend(link_assets) - - # Skip if there are no assets. This can happen if only linked mapping is - # set and there are no links for his asset. - if not assets: - log.warning("Asset does not have linked assets. Nothing to process.") - return - - # Prepare entities from database for assets - prepared_entities = collect_last_version_repres(assets) - - # Load containers by prepared entities and presets - loaded_containers = [] - # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = load_containers_by_asset_data( - current_context_data, current_context, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = load_containers_by_asset_data( - linked_asset_data, link_context, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # Return list of loaded containers - return loaded_containers - - -def get_last_workfile_path(root, template, file_ext): - template = re.sub("<.*?>", ".*?", template) - template = re.sub("{version.*}", "([0-9]+)", template) - template = re.sub("{comment.*?}", ".+?", template) - # template = pipeline._format_work_template(template) - template = "^" + template + "$" - - all_file_names = [] - if os.path.exists(root): - all_file_names = os.listdir(root) - - filtered_file_names = [ - file_name for file_name in all_file_names - if os.path.splitext(file_name)[1] == file_ext - ] - - kwargs = {} - if platform.system() == "Windows": - kwargs["flags"] = re.IGNORECASE - - version = None - last_file_name = None - re_template = re.compile(template) - for file_name in sorted(filtered_file_names): - match = re.match(re_template, file_name, **kwargs) - if not match: - continue - - file_version = int(match.group(1)) - if file_version >= version: - last_file_name = file_name - version = file_version + 1 - - last_file_path = None - if last_file_name: - last_file_path = os.path.join(root, last_file_name) - - return last_file_path + return output diff --git a/pype/maya/menu.py b/pype/maya/menu.py index c25166f210..eee4347a72 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -4,7 +4,7 @@ import logging from avalon.vendor.Qt import QtWidgets, QtGui from avalon.maya import pipeline -from ..lib import build_workfile +from ..lib import BuildWorkfile import maya.cmds as cmds self = sys.modules[__name__] @@ -29,7 +29,7 @@ def deferred(): cmds.menuItem( "Build First Workfile", parent=pipeline._menu, - command=lambda *args: build_workfile() + command=lambda *args: BuildWorkfile.build_workfile() ) log.info("Attempting to install scripts menu..") From 20e3a6022404e4f481170e7106fdabc939a7cf48 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Apr 2020 13:20:45 +0200 Subject: [PATCH 19/22] removed unused modules --- pype/lib.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index 48d9cb5965..e16643a472 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -11,7 +11,6 @@ import contextlib import subprocess import inspect from abc import ABCMeta, abstractmethod -import platform from avalon import io, pipeline import six From 871bfde9c8cea0296893aa062efe480ecccc54ab Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Apr 2020 14:14:30 +0200 Subject: [PATCH 20/22] added some documentation --- pype/lib.py | 91 +++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 74 insertions(+), 17 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index e16643a472..965e738d1f 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -910,8 +910,8 @@ class BuildWorkfile: return per_task_preset @classmethod - def _filter_workfile_profiles(cls, workfile_profiles, loaders_by_name): - """ Filter and prepare workfile presets by available loaders. + def _filter_build_profiles(cls, build_profiles, loaders_by_name): + """ Filter build profiles by loaders and prepare process data. Valid profile must have "loaders", "families" and "repre_names" keys with valid values. @@ -923,14 +923,21 @@ class BuildWorkfile: Lowered "families" and "repre_names" are prepared for each profile with all required keys. + + :param build_profiles: Profiles for building workfile. + :type build_profiles: dict + :param loaders_by_name: Available loaders per name. + :type loaders_by_name: dict + :return: Filtered and prepared profiles. + :rtype: list """ valid_profiles = [] - for profile in workfile_profiles: + for profile in build_profiles: # Check loaders profile_loaders = profile.get("loaders") if not profile_loaders: log.warning(( - "Workfile profile has missing loaders configuration: {0}" + "Build profile has missing loaders configuration: {0}" ).format(json.dumps(profile, indent=4))) continue @@ -943,7 +950,7 @@ class BuildWorkfile: if not loaders_match: log.warning(( - "All loaders from Workfile profile are not available: {0}" + "All loaders from Build profile are not available: {0}" ).format(json.dumps(profile, indent=4))) continue @@ -951,7 +958,7 @@ class BuildWorkfile: profile_families = profile.get("families") if not profile_families: log.warning(( - "Workfile profile is missing families configuration: {0}" + "Build profile is missing families configuration: {0}" ).format(json.dumps(profile, indent=4))) continue @@ -959,7 +966,7 @@ class BuildWorkfile: profile_repre_names = profile.get("repre_names") if not profile_repre_names: log.warning(( - "Workfile profile is missing" + "Build profile is missing" " representation names filtering: {0}" ).format(json.dumps(profile, indent=4))) continue @@ -978,6 +985,22 @@ class BuildWorkfile: @classmethod def _prepare_profile_for_subsets(cls, subsets, profiles): + """Select profile for each subset byt it's data. + + Profiles are filtered for each subset individually. + Profile is filtered by subset's family, optionally by name regex and + representation names set in profile. + It is possible to not find matching profile for subset, in that case + subset is skipped and it is possible that none of subsets have + matching profile. + + :param subsets: Subset documents. + :type subsets: list + :param profiles: Build profiles. + :type profiles: dict + :return: Profile by subset's id. + :rtype: dict + """ # Prepare subsets subsets_by_family = map_subsets_by_family(subsets) @@ -1020,13 +1043,26 @@ class BuildWorkfile: def load_containers_by_asset_data( cls, asset_entity_data, build_profiles, loaders_by_name ): + """Load containers for entered asset entity by Build profiles. + + :param asset_entity_data: Prepared data with subsets, last version + and representations for specific asset. + :type asset_entity_data: dict + :param build_profiles: Build profiles. + :type build_profiles: dict + :param loaders_by_name: Available loaders per name. + :type loaders_by_name: dict + :return: Output contains asset document and loaded containers. + :rtype: dict + """ + # Make sure all data are not empty if not asset_entity_data or not build_profiles or not loaders_by_name: return asset_entity = asset_entity_data["asset_entity"] - valid_profiles = cls._filter_workfile_profiles( + valid_profiles = cls._filter_build_profiles( build_profiles, loaders_by_name ) if not valid_profiles: @@ -1105,6 +1141,29 @@ class BuildWorkfile: cls, repres_by_subset_id, subsets_by_id, profiles_per_subset_id, loaders_by_name ): + """Real load by collected data happens here. + + Loading of representations per subset happens here. Each subset can + loads one representation. Loading is tried in specific order. + Representations are tried to load by names defined in configuration. + If subset has representation matching representation name each loader + is tried to load it until any is successful. If none of them was + successful then next reprensentation name is tried. + Subset process loop ends when any representation is loaded or + all matching representations were already tried. + + :param repres_by_subset_id: Available representations mapped + by their parent (subset) id. + :type repres_by_subset_id: dict + :param subsets_by_id: Subset documents mapped by their id. + :type subsets_by_id: dict + :param profiles_per_subset_id: Build profiles mapped by subset id. + :type profiles_per_subset_id: dict + :param loaders_by_name: Available loaders per name. + :type loaders_by_name: dict + :return: Objects of loaded containers. + :rtype: list + """ loaded_containers = [] for subset_id, repres in repres_by_subset_id.items(): subset_name = subsets_by_id[subset_id]["name"] @@ -1113,6 +1172,10 @@ class BuildWorkfile: loaders_last_idx = len(profile["loaders"]) - 1 repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + repre_by_low_name = { + repre["name"].lower(): repre for repre in repres + } + is_loaded = False for repre_name_idx, profile_repre_name in enumerate( profile["repre_names_lowered"] @@ -1121,14 +1184,8 @@ class BuildWorkfile: if is_loaded: break - found_repre = None - for repre in repres: - repre_name = repre["name"].lower() - if repre_name == profile_repre_name: - found_repre = repre - break - - if not found_repre: + repre = repre_by_low_name.get(profile_repre_name) + if not repre: continue for loader_idx, loader_name in enumerate(profile["loaders"]): @@ -1141,7 +1198,7 @@ class BuildWorkfile: try: container = avalon.api.load( loader, - found_repre["_id"], + repre["_id"], name=subset_name ) loaded_containers.append(container) From 59e2f2a36d5aeb2ef26f2fac33be78d69209be63 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 7 Apr 2020 15:01:05 +0200 Subject: [PATCH 21/22] classmethods replaced with object methods and added process method where post processing can be implemented by host --- pype/lib.py | 45 ++++++++++++++++++++++++--------------------- pype/maya/menu.py | 2 +- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 965e738d1f..d3ccbc8589 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -736,10 +736,19 @@ class BuildWorkfile: are host related, since each host has it's loaders. """ - @classmethod - def build_workfile(cls): + def process(self): """Main method of this wrapper. + Building of workfile is triggered and is possible to implement + post processing of loaded containers if necessary. + """ + containers = self.build_workfile() + + return containers + + def build_workfile(self): + """Prepares and load containers into workfile. + Loads latest versions of current and linked assets to workfile by logic stored in Workfile profiles from presets. Profiles are set by host, filtered by current task name and used by families. @@ -793,7 +802,7 @@ class BuildWorkfile: current_task_name = io.Session["AVALON_TASK"] # Load workfile presets for task - build_presets = cls.get_build_presets(current_task_name) + build_presets = self.get_build_presets(current_task_name) # Skip if there are any presets for task if not build_presets: @@ -850,14 +859,14 @@ class BuildWorkfile: return # Prepare entities from database for assets - prepared_entities = cls._collect_last_version_repres(assets) + prepared_entities = self._collect_last_version_repres(assets) # Load containers by prepared entities and presets loaded_containers = [] # - Current asset containers if current_asset_id and current_asset_id in prepared_entities: current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = cls.load_containers_by_asset_data( + loaded_data = self.load_containers_by_asset_data( current_context_data, current_context_profiles, loaders_by_name ) if loaded_data: @@ -865,7 +874,7 @@ class BuildWorkfile: # - Linked assets container for linked_asset_data in prepared_entities.values(): - loaded_data = cls.load_containers_by_asset_data( + loaded_data = self.load_containers_by_asset_data( linked_asset_data, link_context_profiles, loaders_by_name ) if loaded_data: @@ -874,8 +883,7 @@ class BuildWorkfile: # Return list of loaded containers return loaded_containers - @classmethod - def get_build_presets(cls, task_name): + def get_build_presets(self, task_name): """ Returns presets to build workfile for task name. Presets are loaded for current project set in @@ -909,8 +917,7 @@ class BuildWorkfile: return per_task_preset - @classmethod - def _filter_build_profiles(cls, build_profiles, loaders_by_name): + def _filter_build_profiles(self, build_profiles, loaders_by_name): """ Filter build profiles by loaders and prepare process data. Valid profile must have "loaders", "families" and "repre_names" keys @@ -983,8 +990,7 @@ class BuildWorkfile: return valid_profiles - @classmethod - def _prepare_profile_for_subsets(cls, subsets, profiles): + def _prepare_profile_for_subsets(self, subsets, profiles): """Select profile for each subset byt it's data. Profiles are filtered for each subset individually. @@ -1039,9 +1045,8 @@ class BuildWorkfile: break return profiles_per_subset_id - @classmethod def load_containers_by_asset_data( - cls, asset_entity_data, build_profiles, loaders_by_name + self, asset_entity_data, build_profiles, loaders_by_name ): """Load containers for entered asset entity by Build profiles. @@ -1062,7 +1067,7 @@ class BuildWorkfile: asset_entity = asset_entity_data["asset_entity"] - valid_profiles = cls._filter_build_profiles( + valid_profiles = self._filter_build_profiles( build_profiles, loaders_by_name ) if not valid_profiles: @@ -1093,7 +1098,7 @@ class BuildWorkfile: )) return - profiles_per_subset_id = cls._prepare_profile_for_subsets( + profiles_per_subset_id = self._prepare_profile_for_subsets( subsets_by_id.values(), valid_profiles ) if not profiles_per_subset_id: @@ -1126,7 +1131,7 @@ class BuildWorkfile: log.debug(msg) - containers = cls._load_containers( + containers = self._load_containers( valid_repres_by_subset_id, subsets_by_id, profiles_per_subset_id, loaders_by_name ) @@ -1136,9 +1141,8 @@ class BuildWorkfile: "containers": containers } - @classmethod def _load_containers( - cls, repres_by_subset_id, subsets_by_id, + self, repres_by_subset_id, subsets_by_id, profiles_per_subset_id, loaders_by_name ): """Real load by collected data happens here. @@ -1230,8 +1234,7 @@ class BuildWorkfile: return loaded_containers - @classmethod - def _collect_last_version_repres(cls, asset_entities): + def _collect_last_version_repres(self, asset_entities): """Collect subsets, versions and representations for asset_entities. :param asset_entities: Asset entities for which want to find data diff --git a/pype/maya/menu.py b/pype/maya/menu.py index eee4347a72..12eca6d4d9 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -29,7 +29,7 @@ def deferred(): cmds.menuItem( "Build First Workfile", parent=pipeline._menu, - command=lambda *args: BuildWorkfile.build_workfile() + command=lambda *args: BuildWorkfile().build_workfile() ) log.info("Attempting to install scripts menu..") From 1b28e58c62e62ada29a004f60e40edaecf8c6aa1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 7 Apr 2020 15:10:19 +0200 Subject: [PATCH 22/22] changed method which is called in maya --- pype/maya/menu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/maya/menu.py b/pype/maya/menu.py index 12eca6d4d9..70df50b9e6 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -29,7 +29,7 @@ def deferred(): cmds.menuItem( "Build First Workfile", parent=pipeline._menu, - command=lambda *args: BuildWorkfile().build_workfile() + command=lambda *args: BuildWorkfile().process() ) log.info("Attempting to install scripts menu..")