Merge pull request #3578 from pypeclub/feature/maya-build-from-template

Maya: Build workfile by template
This commit is contained in:
Jakub Trllo 2022-08-18 10:36:39 +02:00 committed by GitHub
commit 93c9fb2309
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 1374 additions and 26 deletions

View file

@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?).
+ We will need more specific functions doing wery specific queires really fast.
"""
import re
import collections
import six
@ -1009,17 +1010,70 @@ def get_representation_by_name(
return conn.find_one(query_filter, _prepare_fields(fields))
def _flatten_dict(data):
flatten_queue = collections.deque()
flatten_queue.append(data)
output = {}
while flatten_queue:
item = flatten_queue.popleft()
for key, value in item.items():
if not isinstance(value, dict):
output[key] = value
continue
tmp = {}
for subkey, subvalue in value.items():
new_key = "{}.{}".format(key, subkey)
tmp[new_key] = subvalue
flatten_queue.append(tmp)
return output
def _regex_filters(filters):
output = []
for key, value in filters.items():
regexes = []
a_values = []
if isinstance(value, re.Pattern):
regexes.append(value)
elif isinstance(value, (list, tuple, set)):
for item in value:
if isinstance(item, re.Pattern):
regexes.append(item)
else:
a_values.append(item)
else:
a_values.append(value)
key_filters = []
if len(a_values) == 1:
key_filters.append({key: a_values[0]})
elif a_values:
key_filters.append({key: {"$in": a_values}})
for regex in regexes:
key_filters.append({key: {"$regex": regex}})
if len(key_filters) == 1:
output.append(key_filters[0])
else:
output.append({"$or": key_filters})
return output
def _get_representations(
project_name,
representation_ids,
representation_names,
version_ids,
extensions,
context_filters,
names_by_version_ids,
standard,
archived,
fields
):
default_output = []
repre_types = []
if standard:
repre_types.append("representation")
@ -1027,7 +1081,7 @@ def _get_representations(
repre_types.append("archived_representation")
if not repre_types:
return []
return default_output
if len(repre_types) == 1:
query_filter = {"type": repre_types[0]}
@ -1037,25 +1091,21 @@ def _get_representations(
if representation_ids is not None:
representation_ids = _convert_ids(representation_ids)
if not representation_ids:
return []
return default_output
query_filter["_id"] = {"$in": representation_ids}
if representation_names is not None:
if not representation_names:
return []
return default_output
query_filter["name"] = {"$in": list(representation_names)}
if version_ids is not None:
version_ids = _convert_ids(version_ids)
if not version_ids:
return []
return default_output
query_filter["parent"] = {"$in": version_ids}
if extensions is not None:
if not extensions:
return []
query_filter["context.ext"] = {"$in": list(extensions)}
or_queries = []
if names_by_version_ids is not None:
or_query = []
for version_id, names in names_by_version_ids.items():
@ -1065,8 +1115,36 @@ def _get_representations(
"name": {"$in": list(names)}
})
if not or_query:
return default_output
or_queries.append(or_query)
if context_filters is not None:
if not context_filters:
return []
query_filter["$or"] = or_query
_flatten_filters = _flatten_dict(context_filters)
flatten_filters = {}
for key, value in _flatten_filters.items():
if not key.startswith("context"):
key = "context.{}".format(key)
flatten_filters[key] = value
for item in _regex_filters(flatten_filters):
for key, value in item.items():
if key != "$or":
query_filter[key] = value
elif value:
or_queries.append(value)
if len(or_queries) == 1:
query_filter["$or"] = or_queries[0]
elif or_queries:
and_query = []
for or_query in or_queries:
if isinstance(or_query, list):
or_query = {"$or": or_query}
and_query.append(or_query)
query_filter["$and"] = and_query
conn = get_project_connection(project_name)
@ -1078,7 +1156,7 @@ def get_representations(
representation_ids=None,
representation_names=None,
version_ids=None,
extensions=None,
context_filters=None,
names_by_version_ids=None,
archived=False,
standard=True,
@ -1096,8 +1174,8 @@ def get_representations(
as filter. Filter ignored if 'None' is passed.
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
extensions (Iterable[str]): Filter by extension of main representation
file (without dot).
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
representation context fields.
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
archived (bool): Output will also contain archived representations.
@ -1113,7 +1191,7 @@ def get_representations(
representation_ids=representation_ids,
representation_names=representation_names,
version_ids=version_ids,
extensions=extensions,
context_filters=context_filters,
names_by_version_ids=names_by_version_ids,
standard=True,
archived=archived,
@ -1126,7 +1204,7 @@ def get_archived_representations(
representation_ids=None,
representation_names=None,
version_ids=None,
extensions=None,
context_filters=None,
names_by_version_ids=None,
fields=None
):
@ -1142,8 +1220,8 @@ def get_archived_representations(
as filter. Filter ignored if 'None' is passed.
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
extensions (Iterable[str]): Filter by extension of main representation
file (without dot).
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
representation context fields.
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
using version ids and list of names under the version.
fields (Iterable[str]): Fields that should be returned. All fields are
@ -1158,7 +1236,7 @@ def get_archived_representations(
representation_ids=representation_ids,
representation_names=representation_names,
version_ids=version_ids,
extensions=extensions,
context_filters=context_filters,
names_by_version_ids=names_by_version_ids,
standard=False,
archived=True,

View file

@ -0,0 +1,253 @@
import json
from collections import OrderedDict
import maya.cmds as cmds
import qargparse
from openpype.tools.utils.widgets import OptionDialog
from .lib import get_main_window, imprint
# To change as enum
build_types = ["context_asset", "linked_asset", "all_assets"]
def get_placeholder_attributes(node):
return {
attr: cmds.getAttr("{}.{}".format(node, attr))
for attr in cmds.listAttr(node, userDefined=True)}
def delete_placeholder_attributes(node):
'''
function to delete all extra placeholder attributes
'''
extra_attributes = get_placeholder_attributes(node)
for attribute in extra_attributes:
cmds.deleteAttr(node + '.' + attribute)
def create_placeholder():
args = placeholder_window()
if not args:
return # operation canceled, no locator created
# custom arg parse to force empty data query
# and still imprint them on placeholder
# and getting items when arg is of type Enumerator
options = create_options(args)
# create placeholder name dynamically from args and options
placeholder_name = create_placeholder_name(args, options)
selection = cmds.ls(selection=True)
if not selection:
raise ValueError("Nothing is selected")
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
# get the long name of the placeholder (with the groups)
placeholder_full_name = cmds.ls(selection[0], long=True)[
0] + '|' + placeholder.replace('|', '')
if selection:
cmds.parent(placeholder, selection[0])
imprint(placeholder_full_name, options)
# Some tweaks because imprint force enums to to default value so we get
# back arg read and force them to attributes
imprint_enum(placeholder_full_name, args)
# Add helper attributes to keep placeholder info
cmds.addAttr(
placeholder_full_name,
longName="parent",
hidden=True,
dataType="string"
)
cmds.addAttr(
placeholder_full_name,
longName="index",
hidden=True,
attributeType="short",
defaultValue=-1
)
cmds.setAttr(placeholder_full_name + '.parent', "", type="string")
def create_placeholder_name(args, options):
placeholder_builder_type = [
arg.read() for arg in args if 'builder_type' in str(arg)
][0]
placeholder_family = options['family']
placeholder_name = placeholder_builder_type.split('_')
# add famlily in any
if placeholder_family:
placeholder_name.insert(1, placeholder_family)
# add loader arguments if any
if options['loader_args']:
pos = 2
loader_args = options['loader_args'].replace('\'', '\"')
loader_args = json.loads(loader_args)
values = [v for v in loader_args.values()]
for i in range(len(values)):
placeholder_name.insert(i + pos, values[i])
placeholder_name = '_'.join(placeholder_name)
return placeholder_name.capitalize()
def update_placeholder():
placeholder = cmds.ls(selection=True)
if len(placeholder) == 0:
raise ValueError("No node selected")
if len(placeholder) > 1:
raise ValueError("Too many selected nodes")
placeholder = placeholder[0]
args = placeholder_window(get_placeholder_attributes(placeholder))
if not args:
return # operation canceled
# delete placeholder attributes
delete_placeholder_attributes(placeholder)
options = create_options(args)
imprint(placeholder, options)
imprint_enum(placeholder, args)
cmds.addAttr(
placeholder,
longName="parent",
hidden=True,
dataType="string"
)
cmds.addAttr(
placeholder,
longName="index",
hidden=True,
attributeType="short",
defaultValue=-1
)
cmds.setAttr(placeholder + '.parent', '', type="string")
def create_options(args):
options = OrderedDict()
for arg in args:
if not type(arg) == qargparse.Separator:
options[str(arg)] = arg._data.get("items") or arg.read()
return options
def imprint_enum(placeholder, args):
"""
Imprint method doesn't act properly with enums.
Replacing the functionnality with this for now
"""
enum_values = {str(arg): arg.read()
for arg in args if arg._data.get("items")}
string_to_value_enum_table = {
build: i for i, build
in enumerate(build_types)}
for key, value in enum_values.items():
cmds.setAttr(
placeholder + "." + key,
string_to_value_enum_table[value])
def placeholder_window(options=None):
options = options or dict()
dialog = OptionDialog(parent=get_main_window())
dialog.setWindowTitle("Create Placeholder")
args = [
qargparse.Separator("Main attributes"),
qargparse.Enum(
"builder_type",
label="Asset Builder Type",
default=options.get("builder_type", 0),
items=build_types,
help="""Asset Builder Type
Builder type describe what template loader will look for.
context_asset : Template loader will look for subsets of
current context asset (Asset bob will find asset)
linked_asset : Template loader will look for assets linked
to current context asset.
Linked asset are looked in avalon database under field "inputLinks"
"""
),
qargparse.String(
"family",
default=options.get("family", ""),
label="OpenPype Family",
placeholder="ex: model, look ..."),
qargparse.String(
"representation",
default=options.get("representation", ""),
label="OpenPype Representation",
placeholder="ex: ma, abc ..."),
qargparse.String(
"loader",
default=options.get("loader", ""),
label="Loader",
placeholder="ex: ReferenceLoader, LightLoader ...",
help="""Loader
Defines what openpype loader will be used to load assets.
Useable loader depends on current host's loader list.
Field is case sensitive.
"""),
qargparse.String(
"loader_args",
default=options.get("loader_args", ""),
label="Loader Arguments",
placeholder='ex: {"camera":"persp", "lights":True}',
help="""Loader
Defines a dictionnary of arguments used to load assets.
Useable arguments depend on current placeholder Loader.
Field should be a valid python dict. Anything else will be ignored.
"""),
qargparse.Integer(
"order",
default=options.get("order", 0),
min=0,
max=999,
label="Order",
placeholder="ex: 0, 100 ... (smallest order loaded first)",
help="""Order
Order defines asset loading priority (0 to 999)
Priority rule is : "lowest is first to load"."""),
qargparse.Separator(
"Optional attributes"),
qargparse.String(
"asset",
default=options.get("asset", ""),
label="Asset filter",
placeholder="regex filtering by asset name",
help="Filtering assets by matching field regex to asset's name"),
qargparse.String(
"subset",
default=options.get("subset", ""),
label="Subset filter",
placeholder="regex filtering by subset name",
help="Filtering assets by matching field regex to subset's name"),
qargparse.String(
"hierarchy",
default=options.get("hierarchy", ""),
label="Hierarchy filter",
placeholder="regex filtering by asset's hierarchy",
help="Filtering assets by matching field asset's hierarchy")
]
dialog.create(args)
if not dialog.exec_():
return None
return args

View file

@ -9,10 +9,15 @@ import maya.cmds as cmds
from openpype.settings import get_project_settings
from openpype.pipeline import legacy_io
from openpype.pipeline.workfile import BuildWorkfile
from openpype.pipeline.workfile.build_template import (
build_workfile_template,
update_workfile_template
)
from openpype.tools.utils import host_tools
from openpype.hosts.maya.api import lib, lib_rendersettings
from .lib import get_main_window, IS_HEADLESS
from .commands import reset_frame_range
from .lib_template_builder import create_placeholder, update_placeholder
log = logging.getLogger(__name__)
@ -147,6 +152,34 @@ def install():
parent_widget
)
)
builder_menu = cmds.menuItem(
"Template Builder",
subMenu=True,
tearOff=True,
parent=MENU_NAME
)
cmds.menuItem(
"Create Placeholder",
parent=builder_menu,
command=lambda *args: create_placeholder()
)
cmds.menuItem(
"Update Placeholder",
parent=builder_menu,
command=lambda *args: update_placeholder()
)
cmds.menuItem(
"Build Workfile from template",
parent=builder_menu,
command=build_workfile_template
)
cmds.menuItem(
"Update Workfile from template",
parent=builder_menu,
command=update_workfile_template
)
cmds.setParent(MENU_NAME, menu=True)
def add_scripts_menu():

View file

@ -0,0 +1,252 @@
import re
from maya import cmds
from openpype.client import get_representations
from openpype.pipeline import legacy_io
from openpype.pipeline.workfile.abstract_template_loader import (
AbstractPlaceholder,
AbstractTemplateLoader
)
from openpype.pipeline.workfile.build_template_exceptions import (
TemplateAlreadyImported
)
PLACEHOLDER_SET = 'PLACEHOLDERS_SET'
class MayaTemplateLoader(AbstractTemplateLoader):
"""Concrete implementation of AbstractTemplateLoader for maya
"""
def import_template(self, path):
"""Import template into current scene.
Block if a template is already loaded.
Args:
path (str): A path to current template (usually given by
get_template_path implementation)
Returns:
bool: Wether the template was succesfully imported or not
"""
if cmds.objExists(PLACEHOLDER_SET):
raise TemplateAlreadyImported(
"Build template already loaded\n"
"Clean scene if needed (File > New Scene)")
cmds.sets(name=PLACEHOLDER_SET, empty=True)
self.new_nodes = cmds.file(path, i=True, returnNewNodes=True)
cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True)
for set in cmds.listSets(allSets=True):
if (cmds.objExists(set) and
cmds.attributeQuery('id', node=set, exists=True) and
cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'):
if cmds.attributeQuery('asset', node=set, exists=True):
cmds.setAttr(
set + '.asset',
legacy_io.Session['AVALON_ASSET'], type='string'
)
return True
def template_already_imported(self, err_msg):
clearButton = "Clear scene and build"
updateButton = "Update template"
abortButton = "Abort"
title = "Scene already builded"
message = (
"It's seems a template was already build for this scene.\n"
"Error message reveived :\n\n\"{}\"".format(err_msg))
buttons = [clearButton, updateButton, abortButton]
defaultButton = clearButton
cancelButton = abortButton
dismissString = abortButton
answer = cmds.confirmDialog(
t=title,
m=message,
b=buttons,
db=defaultButton,
cb=cancelButton,
ds=dismissString)
if answer == clearButton:
cmds.file(newFile=True, force=True)
self.import_template(self.template_path)
self.populate_template()
elif answer == updateButton:
self.update_missing_containers()
elif answer == abortButton:
return
@staticmethod
def get_template_nodes():
attributes = cmds.ls('*.builder_type', long=True)
return [attribute.rpartition('.')[0] for attribute in attributes]
def get_loaded_containers_by_id(self):
try:
containers = cmds.sets("AVALON_CONTAINERS", q=True)
except ValueError:
return None
return [
cmds.getAttr(container + '.representation')
for container in containers]
class MayaPlaceholder(AbstractPlaceholder):
"""Concrete implementation of AbstractPlaceholder for maya
"""
optional_keys = {'asset', 'subset', 'hierarchy'}
def get_data(self, node):
user_data = dict()
for attr in self.required_keys.union(self.optional_keys):
attribute_name = '{}.{}'.format(node, attr)
if not cmds.attributeQuery(attr, node=node, exists=True):
print("{} not found".format(attribute_name))
continue
user_data[attr] = cmds.getAttr(
attribute_name,
asString=True)
user_data['parent'] = (
cmds.getAttr(node + '.parent', asString=True)
or node.rpartition('|')[0]
or ""
)
user_data['node'] = node
if user_data['parent']:
siblings = cmds.listRelatives(user_data['parent'], children=True)
else:
siblings = cmds.ls(assemblies=True)
node_shortname = user_data['node'].rpartition('|')[2]
current_index = cmds.getAttr(node + '.index', asString=True)
user_data['index'] = (
current_index if current_index >= 0
else siblings.index(node_shortname))
self.data = user_data
def parent_in_hierarchy(self, containers):
"""Parent loaded container to placeholder's parent
ie : Set loaded content as placeholder's sibling
Args:
containers (String): Placeholder loaded containers
"""
if not containers:
return
roots = cmds.sets(containers, q=True)
nodes_to_parent = []
for root in roots:
if root.endswith("_RN"):
refRoot = cmds.referenceQuery(root, n=True)[0]
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
nodes_to_parent.extend(refRoot)
elif root in cmds.listSets(allSets=True):
if not cmds.sets(root, q=True):
return
else:
continue
else:
nodes_to_parent.append(root)
if self.data['parent']:
cmds.parent(nodes_to_parent, self.data['parent'])
# Move loaded nodes to correct index in outliner hierarchy
placeholder_node = self.data['node']
placeholder_form = cmds.xform(
placeholder_node,
q=True,
matrix=True,
worldSpace=True
)
for node in set(nodes_to_parent):
cmds.reorder(node, front=True)
cmds.reorder(node, relative=self.data['index'])
cmds.xform(node, matrix=placeholder_form, ws=True)
holding_sets = cmds.listSets(object=placeholder_node)
if not holding_sets:
return
for holding_set in holding_sets:
cmds.sets(roots, forceElement=holding_set)
def clean(self):
"""Hide placeholder, parent them to root
add them to placeholder set and register placeholder's parent
to keep placeholder info available for future use
"""
node = self.data['node']
if self.data['parent']:
cmds.setAttr(node + '.parent', self.data['parent'], type='string')
if cmds.getAttr(node + '.index') < 0:
cmds.setAttr(node + '.index', self.data['index'])
holding_sets = cmds.listSets(object=node)
if holding_sets:
for set in holding_sets:
cmds.sets(node, remove=set)
if cmds.listRelatives(node, p=True):
node = cmds.parent(node, world=True)[0]
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr(node + '.hiddenInOutliner', True)
def get_representations(self, current_asset_doc, linked_asset_docs):
project_name = legacy_io.active_project()
builder_type = self.data["builder_type"]
if builder_type == "context_asset":
context_filters = {
"asset": [current_asset_doc["name"]],
"subset": [re.compile(self.data["subset"])],
"hierarchy": [re.compile(self.data["hierarchy"])],
"representations": [self.data["representation"]],
"family": [self.data["family"]]
}
elif builder_type != "linked_asset":
context_filters = {
"asset": [re.compile(self.data["asset"])],
"subset": [re.compile(self.data["subset"])],
"hierarchy": [re.compile(self.data["hierarchy"])],
"representation": [self.data["representation"]],
"family": [self.data["family"]]
}
else:
asset_regex = re.compile(self.data["asset"])
linked_asset_names = []
for asset_doc in linked_asset_docs:
asset_name = asset_doc["name"]
if asset_regex.match(asset_name):
linked_asset_names.append(asset_name)
context_filters = {
"asset": linked_asset_names,
"subset": [re.compile(self.data["subset"])],
"hierarchy": [re.compile(self.data["hierarchy"])],
"representation": [self.data["representation"]],
"family": [self.data["family"]],
}
return list(get_representations(
project_name,
context_filters=context_filters
))
def err_message(self):
return (
"Error while trying to load a representation.\n"
"Either the subset wasn't published or the template is malformed."
"\n\n"
"Builder was looking for :\n{attributes}".format(
attributes="\n".join([
"{}: {}".format(key.title(), value)
for key, value in self.data.items()]
)
)
)

View file

@ -1,11 +1,9 @@
"""Should be used only inside of hosts."""
import os
import json
import re
import copy
import platform
import logging
import collections
import functools
import warnings
@ -13,13 +11,9 @@ from openpype.client import (
get_project,
get_assets,
get_asset_by_name,
get_subsets,
get_last_versions,
get_last_version_by_subset_name,
get_representations,
get_workfile_info,
)
from openpype.settings import get_project_settings
from .profiles_filtering import filter_profiles
from .events import emit_event
from .path_templates import StringTemplate

View file

@ -16,6 +16,7 @@ from .utils import (
switch_container,
get_loader_identifier,
get_loaders_by_name,
get_representation_path_from_context,
get_representation_path,
@ -61,6 +62,7 @@ __all__ = (
"switch_container",
"get_loader_identifier",
"get_loaders_by_name",
"get_representation_path_from_context",
"get_representation_path",

View file

@ -369,6 +369,20 @@ def get_loader_identifier(loader):
return loader.__name__
def get_loaders_by_name():
from .plugins import discover_loader_plugins
loaders_by_name = {}
for loader in discover_loader_plugins():
loader_name = loader.__name__
if loader_name in loaders_by_name:
raise KeyError(
"Duplicated loader name {} !".format(loader_name)
)
loaders_by_name[loader_name] = loader
return loaders_by_name
def _get_container_loader(container):
"""Return the Loader corresponding to the container"""
from .plugins import discover_loader_plugins

View file

@ -0,0 +1,526 @@
import os
from abc import ABCMeta, abstractmethod
import six
import logging
from functools import reduce
from openpype.client import get_asset_by_name
from openpype.settings import get_project_settings
from openpype.lib import (
StringTemplate,
Logger,
filter_profiles,
get_linked_assets,
)
from openpype.pipeline import legacy_io, Anatomy
from openpype.pipeline.load import (
get_loaders_by_name,
get_representation_context,
load_with_repre_context,
)
from .build_template_exceptions import (
TemplateAlreadyImported,
TemplateLoadingFailed,
TemplateProfileNotFound,
TemplateNotFound
)
log = logging.getLogger(__name__)
def update_representations(entities, entity):
if entity['context']['subset'] not in entities:
entities[entity['context']['subset']] = entity
else:
current = entities[entity['context']['subset']]
incomming = entity
entities[entity['context']['subset']] = max(
current, incomming,
key=lambda entity: entity["context"].get("version", -1))
return entities
def parse_loader_args(loader_args):
if not loader_args:
return dict()
try:
parsed_args = eval(loader_args)
if not isinstance(parsed_args, dict):
return dict()
else:
return parsed_args
except Exception as err:
print(
"Error while parsing loader arguments '{}'.\n{}: {}\n\n"
"Continuing with default arguments. . .".format(
loader_args,
err.__class__.__name__,
err))
return dict()
@six.add_metaclass(ABCMeta)
class AbstractTemplateLoader:
"""
Abstraction of Template Loader.
Properties:
template_path : property to get current template path
Methods:
import_template : Abstract Method. Used to load template,
depending on current host
get_template_nodes : Abstract Method. Used to query nodes acting
as placeholders. Depending on current host
"""
_log = None
def __init__(self, placeholder_class):
# TODO template loader should expect host as and argument
# - host have all responsibility for most of code (also provide
# placeholder class)
# - also have responsibility for current context
# - this won't work in DCCs where multiple workfiles with
# different contexts can be opened at single time
# - template loader should have ability to change context
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
self.loaders_by_name = get_loaders_by_name()
self.current_asset = asset_name
self.project_name = project_name
self.host_name = legacy_io.Session["AVALON_APP"]
self.task_name = legacy_io.Session["AVALON_TASK"]
self.placeholder_class = placeholder_class
self.current_asset_doc = get_asset_by_name(project_name, asset_name)
self.task_type = (
self.current_asset_doc
.get("data", {})
.get("tasks", {})
.get(self.task_name, {})
.get("type")
)
self.log.info(
"BUILDING ASSET FROM TEMPLATE :\n"
"Starting templated build for {asset} in {project}\n\n"
"Asset : {asset}\n"
"Task : {task_name} ({task_type})\n"
"Host : {host}\n"
"Project : {project}\n".format(
asset=self.current_asset,
host=self.host_name,
project=self.project_name,
task_name=self.task_name,
task_type=self.task_type
))
# Skip if there is no loader
if not self.loaders_by_name:
self.log.warning(
"There is no registered loaders. No assets will be loaded")
return
@property
def log(self):
if self._log is None:
self._log = Logger.get_logger(self.__class__.__name__)
return self._log
def template_already_imported(self, err_msg):
"""In case template was already loaded.
Raise the error as a default action.
Override this method in your template loader implementation
to manage this case."""
self.log.error("{}: {}".format(
err_msg.__class__.__name__,
err_msg))
raise TemplateAlreadyImported(err_msg)
def template_loading_failed(self, err_msg):
"""In case template loading failed
Raise the error as a default action.
Override this method in your template loader implementation
to manage this case.
"""
self.log.error("{}: {}".format(
err_msg.__class__.__name__,
err_msg))
raise TemplateLoadingFailed(err_msg)
@property
def template_path(self):
"""
Property returning template path. Avoiding setter.
Getting template path from open pype settings based on current avalon
session and solving the path variables if needed.
Returns:
str: Solved template path
Raises:
TemplateProfileNotFound: No profile found from settings for
current avalon session
KeyError: Could not solve path because a key does not exists
in avalon context
TemplateNotFound: Solved path does not exists on current filesystem
"""
project_name = self.project_name
host_name = self.host_name
task_name = self.task_name
task_type = self.task_type
anatomy = Anatomy(project_name)
project_settings = get_project_settings(project_name)
build_info = project_settings[host_name]["templated_workfile_build"]
profile = filter_profiles(
build_info["profiles"],
{
"task_types": task_type,
"tasks": task_name
}
)
if not profile:
raise TemplateProfileNotFound(
"No matching profile found for task '{}' of type '{}' "
"with host '{}'".format(task_name, task_type, host_name)
)
path = profile["path"]
if not path:
raise TemplateLoadingFailed(
"Template path is not set.\n"
"Path need to be set in {}\\Template Workfile Build "
"Settings\\Profiles".format(host_name.title()))
# Try fill path with environments and anatomy roots
fill_data = {
key: value
for key, value in os.environ.items()
}
fill_data["root"] = anatomy.roots
result = StringTemplate.format_template(path, fill_data)
if result.solved:
path = result.normalized()
if path and os.path.exists(path):
self.log.info("Found template at: '{}'".format(path))
return path
solved_path = None
while True:
try:
solved_path = anatomy.path_remapper(path)
except KeyError as missing_key:
raise KeyError(
"Could not solve key '{}' in template path '{}'".format(
missing_key, path))
if solved_path is None:
solved_path = path
if solved_path == path:
break
path = solved_path
solved_path = os.path.normpath(solved_path)
if not os.path.exists(solved_path):
raise TemplateNotFound(
"Template found in openPype settings for task '{}' with host "
"'{}' does not exists. (Not found : {})".format(
task_name, host_name, solved_path))
self.log.info("Found template at: '{}'".format(solved_path))
return solved_path
def populate_template(self, ignored_ids=None):
"""
Use template placeholders to load assets and parent them in hierarchy
Arguments :
ignored_ids :
Returns:
None
"""
loaders_by_name = self.loaders_by_name
current_asset_doc = self.current_asset_doc
linked_assets = get_linked_assets(current_asset_doc)
ignored_ids = ignored_ids or []
placeholders = self.get_placeholders()
self.log.debug("Placeholders found in template: {}".format(
[placeholder.name for placeholder in placeholders]
))
for placeholder in placeholders:
self.log.debug("Start to processing placeholder {}".format(
placeholder.name
))
placeholder_representations = self.get_placeholder_representations(
placeholder,
current_asset_doc,
linked_assets
)
if not placeholder_representations:
self.log.info(
"There's no representation for this placeholder: "
"{}".format(placeholder.name)
)
continue
for representation in placeholder_representations:
self.preload(placeholder, loaders_by_name, representation)
if self.load_data_is_incorrect(
placeholder,
representation,
ignored_ids):
continue
self.log.info(
"Loading {}_{} with loader {}\n"
"Loader arguments used : {}".format(
representation['context']['asset'],
representation['context']['subset'],
placeholder.loader_name,
placeholder.loader_args))
try:
container = self.load(
placeholder, loaders_by_name, representation)
except Exception:
self.load_failed(placeholder, representation)
else:
self.load_succeed(placeholder, container)
finally:
self.postload(placeholder)
def get_placeholder_representations(
self, placeholder, current_asset_doc, linked_asset_docs
):
placeholder_representations = placeholder.get_representations(
current_asset_doc,
linked_asset_docs
)
for repre_doc in reduce(
update_representations,
placeholder_representations,
dict()
).values():
yield repre_doc
def load_data_is_incorrect(
self, placeholder, last_representation, ignored_ids):
if not last_representation:
self.log.warning(placeholder.err_message())
return True
if (str(last_representation['_id']) in ignored_ids):
print("Ignoring : ", last_representation['_id'])
return True
return False
def preload(self, placeholder, loaders_by_name, last_representation):
pass
def load(self, placeholder, loaders_by_name, last_representation):
repre = get_representation_context(last_representation)
return load_with_repre_context(
loaders_by_name[placeholder.loader_name],
repre,
options=parse_loader_args(placeholder.loader_args))
def load_succeed(self, placeholder, container):
placeholder.parent_in_hierarchy(container)
def load_failed(self, placeholder, last_representation):
self.log.warning(
"Got error trying to load {}:{} with {}".format(
last_representation['context']['asset'],
last_representation['context']['subset'],
placeholder.loader_name
),
exc_info=True
)
def postload(self, placeholder):
placeholder.clean()
def update_missing_containers(self):
loaded_containers_ids = self.get_loaded_containers_by_id()
self.populate_template(ignored_ids=loaded_containers_ids)
def get_placeholders(self):
placeholders = map(self.placeholder_class, self.get_template_nodes())
valid_placeholders = filter(
lambda i: i.is_valid,
placeholders
)
sorted_placeholders = list(sorted(
valid_placeholders,
key=lambda i: i.order
))
return sorted_placeholders
@abstractmethod
def get_loaded_containers_by_id(self):
"""
Collect already loaded containers for updating scene
Return:
dict (string, node): A dictionnary id as key
and containers as value
"""
pass
@abstractmethod
def import_template(self, template_path):
"""
Import template in current host
Args:
template_path (str): fullpath to current task and
host's template file
Return:
None
"""
pass
@abstractmethod
def get_template_nodes(self):
"""
Returning a list of nodes acting as host placeholders for
templating. The data representation is by user.
AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes
Args :
None
Returns:
list(AnyNode): Solved template path
"""
pass
@six.add_metaclass(ABCMeta)
class AbstractPlaceholder:
"""Abstraction of placeholders logic.
Properties:
required_keys: A list of mandatory keys to decribe placeholder
and assets to load.
optional_keys: A list of optional keys to decribe
placeholder and assets to load
loader_name: Name of linked loader to use while loading assets
Args:
identifier (str): Placeholder identifier. Should be possible to be
used as identifier in "a scene" (e.g. unique node name).
"""
required_keys = {
"builder_type",
"family",
"representation",
"order",
"loader",
"loader_args"
}
optional_keys = {}
def __init__(self, identifier):
self._log = None
self._name = identifier
self.get_data(identifier)
@property
def log(self):
if self._log is None:
self._log = Logger.get_logger(repr(self))
return self._log
def __repr__(self):
return "< {} {} >".format(self.__class__.__name__, self.name)
@property
def name(self):
return self._name
@property
def loader_args(self):
return self.data["loader_args"]
@property
def builder_type(self):
return self.data["builder_type"]
@property
def order(self):
return self.data["order"]
@property
def loader_name(self):
"""Return placeholder loader name.
Returns:
str: Loader name that will be used to load placeholder
representations.
"""
return self.data["loader"]
@property
def is_valid(self):
"""Test validity of placeholder.
i.e.: every required key exists in placeholder data
Returns:
bool: True if every key is in data
"""
if set(self.required_keys).issubset(self.data.keys()):
self.log.debug("Valid placeholder : {}".format(self.name))
return True
self.log.info("Placeholder is not valid : {}".format(self.name))
return False
@abstractmethod
def parent_in_hierarchy(self, container):
"""Place loaded container in correct hierarchy given by placeholder
Args:
container (Dict[str, Any]): Loaded container created by loader.
"""
pass
@abstractmethod
def clean(self):
"""Clean placeholder from hierarchy after loading assets."""
pass
@abstractmethod
def get_representations(self, current_asset_doc, linked_asset_docs):
"""Query representations based on placeholder data.
Args:
current_asset_doc (Dict[str, Any]): Document of current
context asset.
linked_asset_docs (List[Dict[str, Any]]): Documents of assets
linked to current context asset.
Returns:
Iterable[Dict[str, Any]]: Representations that are matching
placeholder filters.
"""
pass
@abstractmethod
def get_data(self, identifier):
"""Collect information about placeholder by identifier.
Args:
identifier (str): A unique placeholder identifier defined by
implementation.
"""
pass

View file

@ -0,0 +1,68 @@
from importlib import import_module
from openpype.lib import classes_from_module
from openpype.host import HostBase
from openpype.pipeline import registered_host
from .abstract_template_loader import (
AbstractPlaceholder,
AbstractTemplateLoader)
from .build_template_exceptions import (
TemplateLoadingFailed,
TemplateAlreadyImported,
MissingHostTemplateModule,
MissingTemplatePlaceholderClass,
MissingTemplateLoaderClass
)
_module_path_format = 'openpype.hosts.{host}.api.template_loader'
def build_workfile_template(*args):
template_loader = build_template_loader()
try:
template_loader.import_template(template_loader.template_path)
except TemplateAlreadyImported as err:
template_loader.template_already_imported(err)
except TemplateLoadingFailed as err:
template_loader.template_loading_failed(err)
else:
template_loader.populate_template()
def update_workfile_template(args):
template_loader = build_template_loader()
template_loader.update_missing_containers()
def build_template_loader():
# TODO refactor to use advantage of 'HostBase' and don't import dynamically
# - hosts should have methods that gives option to return builders
host = registered_host()
if isinstance(host, HostBase):
host_name = host.name
else:
host_name = host.__name__.partition('.')[2]
module_path = _module_path_format.format(host=host_name)
module = import_module(module_path)
if not module:
raise MissingHostTemplateModule(
"No template loader found for host {}".format(host_name))
template_loader_class = classes_from_module(
AbstractTemplateLoader,
module
)
template_placeholder_class = classes_from_module(
AbstractPlaceholder,
module
)
if not template_loader_class:
raise MissingTemplateLoaderClass()
template_loader_class = template_loader_class[0]
if not template_placeholder_class:
raise MissingTemplatePlaceholderClass()
template_placeholder_class = template_placeholder_class[0]
return template_loader_class(template_placeholder_class)

View file

@ -0,0 +1,35 @@
class MissingHostTemplateModule(Exception):
"""Error raised when expected module does not exists"""
pass
class MissingTemplatePlaceholderClass(Exception):
"""Error raised when module doesn't implement a placeholder class"""
pass
class MissingTemplateLoaderClass(Exception):
"""Error raised when module doesn't implement a template loader class"""
pass
class TemplateNotFound(Exception):
"""Exception raised when template does not exist."""
pass
class TemplateProfileNotFound(Exception):
"""Exception raised when current profile
doesn't match any template profile"""
pass
class TemplateAlreadyImported(Exception):
"""Error raised when Template was already imported by host for
this session"""
pass
class TemplateLoadingFailed(Exception):
"""Error raised whend Template loader was unable to load the template"""
pass

View file

@ -967,6 +967,9 @@
}
]
},
"templated_workfile_build": {
"profiles": []
},
"filters": {
"preset 1": {
"ValidateNoAnimation": false,
@ -976,4 +979,4 @@
"ValidateNoAnimation": false
}
}
}
}

View file

@ -77,6 +77,10 @@
"type": "schema",
"name": "schema_workfile_build"
},
{
"type": "schema",
"name": "schema_templated_workfile_build"
},
{
"type": "schema",
"name": "schema_publish_gui_filter"

View file

@ -0,0 +1,35 @@
{
"type": "dict",
"collapsible": true,
"key": "templated_workfile_build",
"label": "Templated Workfile Build Settings",
"children": [
{
"type": "list",
"key": "profiles",
"label": "Profiles",
"object_type": {
"type": "dict",
"children": [
{
"key": "task_types",
"label": "Task types",
"type": "task-types-enum"
},
{
"key": "tasks",
"label": "Task names",
"type": "list",
"object_type": "text"
},
{
"key": "path",
"label": "Path to template",
"type": "text",
"object_type": "text"
}
]
}
}
]
}

View file

@ -120,3 +120,54 @@ raw json.
You can configure path mapping using Maya `dirmap` command. This will add bi-directional mapping between
list of paths specified in **Settings**. You can find it in **Settings -> Project Settings -> Maya -> Maya Directory Mapping**
![Dirmap settings](assets/maya-admin_dirmap_settings.png)
## Templated Build Workfile
Building a workfile using a template designed by users. Helping to assert homogeneous subsets hierarchy and imports. Template stored as file easy to define, change and customize for production needs.
**1. Make a template**
Make your template. Add families and everything needed for your tasks. Here is an example template for the modeling task using a placeholder to import a gauge.
![maya outliner](assets/maya-workfile-outliner.png)
If needed, you can add placeholders when the template needs to load some assets. **OpenPype > Template Builder > Create Placeholder**
![create placeholder](assets/maya-create_placeholder.png)
- **Configure placeholders**
Fill in the necessary fields (the optional fields are regex filters)
![new place holder](assets/maya-placeholder_new.png)
- Builder type: Wether the the placeholder should load current asset representations or linked assets representations
- Representation: Representation that will be loaded (ex: ma, abc, png, etc...)
- Family: Family of the representation to load (main, look, image, etc ...)
- Loader: Placeholder loader name that will be used to load corresponding representations
- Order: Priority for current placeholder loader (priority is lowest first, highet last)
- **Save your template**
**2. Configure Template**
- **Go to Studio settings > Project > Your DCC > Templated Build Settings**
- Add a profile for your task and enter path to your template
![setting build template](assets/settings/template_build_workfile.png)
**3. Build your workfile**
- Open maya
- Build your workfile
![maya build template](assets/maya-build_workfile_from_template.png)

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB