mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
2d8a6e17d4
38 changed files with 2596 additions and 1864 deletions
|
|
@ -5,6 +5,7 @@ from .host import (
|
|||
from .interfaces import (
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost,
|
||||
INewPublisher,
|
||||
)
|
||||
|
||||
|
|
@ -16,6 +17,7 @@ __all__ = (
|
|||
|
||||
"IWorkfileHost",
|
||||
"ILoadHost",
|
||||
"IPublishHost",
|
||||
"INewPublisher",
|
||||
|
||||
"HostDirmap",
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ class IWorkfileHost:
|
|||
return self.workfile_has_unsaved_changes()
|
||||
|
||||
|
||||
class INewPublisher:
|
||||
class IPublishHost:
|
||||
"""Functions related to new creation system in new publisher.
|
||||
|
||||
New publisher is not storing information only about each created instance
|
||||
|
|
@ -306,7 +306,7 @@ class INewPublisher:
|
|||
workflow.
|
||||
"""
|
||||
|
||||
if isinstance(host, INewPublisher):
|
||||
if isinstance(host, IPublishHost):
|
||||
return []
|
||||
|
||||
required = [
|
||||
|
|
@ -330,7 +330,7 @@ class INewPublisher:
|
|||
MissingMethodsError: If there are missing methods on host
|
||||
implementation.
|
||||
"""
|
||||
missing = INewPublisher.get_missing_publish_methods(host)
|
||||
missing = IPublishHost.get_missing_publish_methods(host)
|
||||
if missing:
|
||||
raise MissingMethodsError(host, missing)
|
||||
|
||||
|
|
@ -368,3 +368,17 @@ class INewPublisher:
|
|||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class INewPublisher(IPublishHost):
|
||||
"""Legacy interface replaced by 'IPublishHost'.
|
||||
|
||||
Deprecated:
|
||||
'INewPublisher' is replaced by 'IPublishHost' please change your
|
||||
imports.
|
||||
There is no "reasonable" way hot mark these classes as deprecated
|
||||
to show warning of wrong import. Deprecated since 3.14.* will be
|
||||
removed in 3.15.*
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import opentimelineio as otio
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractOTIOFile(openpype.api.Extractor):
|
||||
class ExtractOTIOFile(publish.Extractor):
|
||||
"""
|
||||
Extractor export OTIO file
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ import tempfile
|
|||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.flame import api as opfapi
|
||||
from openpype.hosts.flame.api import MediaInfoFile
|
||||
from openpype.pipeline.editorial import (
|
||||
|
|
@ -14,7 +15,7 @@ from openpype.pipeline.editorial import (
|
|||
import flame
|
||||
|
||||
|
||||
class ExtractSubsetResources(openpype.api.Extractor):
|
||||
class ExtractSubsetResources(publish.Extractor):
|
||||
"""
|
||||
Extractor for transcoding files from Flame clip
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,253 +0,0 @@
|
|||
import json
|
||||
from collections import OrderedDict
|
||||
import maya.cmds as cmds
|
||||
|
||||
import qargparse
|
||||
from openpype.tools.utils.widgets import OptionDialog
|
||||
from .lib import get_main_window, imprint
|
||||
|
||||
# To change as enum
|
||||
build_types = ["context_asset", "linked_asset", "all_assets"]
|
||||
|
||||
|
||||
def get_placeholder_attributes(node):
|
||||
return {
|
||||
attr: cmds.getAttr("{}.{}".format(node, attr))
|
||||
for attr in cmds.listAttr(node, userDefined=True)}
|
||||
|
||||
|
||||
def delete_placeholder_attributes(node):
|
||||
'''
|
||||
function to delete all extra placeholder attributes
|
||||
'''
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes:
|
||||
cmds.deleteAttr(node + '.' + attribute)
|
||||
|
||||
|
||||
def create_placeholder():
|
||||
args = placeholder_window()
|
||||
|
||||
if not args:
|
||||
return # operation canceled, no locator created
|
||||
|
||||
# custom arg parse to force empty data query
|
||||
# and still imprint them on placeholder
|
||||
# and getting items when arg is of type Enumerator
|
||||
options = create_options(args)
|
||||
|
||||
# create placeholder name dynamically from args and options
|
||||
placeholder_name = create_placeholder_name(args, options)
|
||||
|
||||
selection = cmds.ls(selection=True)
|
||||
if not selection:
|
||||
raise ValueError("Nothing is selected")
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
|
||||
# get the long name of the placeholder (with the groups)
|
||||
placeholder_full_name = cmds.ls(selection[0], long=True)[
|
||||
0] + '|' + placeholder.replace('|', '')
|
||||
|
||||
if selection:
|
||||
cmds.parent(placeholder, selection[0])
|
||||
|
||||
imprint(placeholder_full_name, options)
|
||||
|
||||
# Some tweaks because imprint force enums to to default value so we get
|
||||
# back arg read and force them to attributes
|
||||
imprint_enum(placeholder_full_name, args)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder_full_name + '.parent', "", type="string")
|
||||
|
||||
|
||||
def create_placeholder_name(args, options):
|
||||
placeholder_builder_type = [
|
||||
arg.read() for arg in args if 'builder_type' in str(arg)
|
||||
][0]
|
||||
placeholder_family = options['family']
|
||||
placeholder_name = placeholder_builder_type.split('_')
|
||||
|
||||
# add famlily in any
|
||||
if placeholder_family:
|
||||
placeholder_name.insert(1, placeholder_family)
|
||||
|
||||
# add loader arguments if any
|
||||
if options['loader_args']:
|
||||
pos = 2
|
||||
loader_args = options['loader_args'].replace('\'', '\"')
|
||||
loader_args = json.loads(loader_args)
|
||||
values = [v for v in loader_args.values()]
|
||||
for i in range(len(values)):
|
||||
placeholder_name.insert(i + pos, values[i])
|
||||
|
||||
placeholder_name = '_'.join(placeholder_name)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
|
||||
def update_placeholder():
|
||||
placeholder = cmds.ls(selection=True)
|
||||
if len(placeholder) == 0:
|
||||
raise ValueError("No node selected")
|
||||
if len(placeholder) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
placeholder = placeholder[0]
|
||||
|
||||
args = placeholder_window(get_placeholder_attributes(placeholder))
|
||||
|
||||
if not args:
|
||||
return # operation canceled
|
||||
|
||||
# delete placeholder attributes
|
||||
delete_placeholder_attributes(placeholder)
|
||||
|
||||
options = create_options(args)
|
||||
|
||||
imprint(placeholder, options)
|
||||
imprint_enum(placeholder, args)
|
||||
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + '.parent', '', type="string")
|
||||
|
||||
|
||||
def create_options(args):
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
return options
|
||||
|
||||
|
||||
def imprint_enum(placeholder, args):
|
||||
"""
|
||||
Imprint method doesn't act properly with enums.
|
||||
Replacing the functionnality with this for now
|
||||
"""
|
||||
enum_values = {str(arg): arg.read()
|
||||
for arg in args if arg._data.get("items")}
|
||||
string_to_value_enum_table = {
|
||||
build: i for i, build
|
||||
in enumerate(build_types)}
|
||||
for key, value in enum_values.items():
|
||||
cmds.setAttr(
|
||||
placeholder + "." + key,
|
||||
string_to_value_enum_table[value])
|
||||
|
||||
|
||||
def placeholder_window(options=None):
|
||||
options = options or dict()
|
||||
dialog = OptionDialog(parent=get_main_window())
|
||||
dialog.setWindowTitle("Create Placeholder")
|
||||
|
||||
args = [
|
||||
qargparse.Separator("Main attributes"),
|
||||
qargparse.Enum(
|
||||
"builder_type",
|
||||
label="Asset Builder Type",
|
||||
default=options.get("builder_type", 0),
|
||||
items=build_types,
|
||||
help="""Asset Builder Type
|
||||
Builder type describe what template loader will look for.
|
||||
context_asset : Template loader will look for subsets of
|
||||
current context asset (Asset bob will find asset)
|
||||
linked_asset : Template loader will look for assets linked
|
||||
to current context asset.
|
||||
Linked asset are looked in avalon database under field "inputLinks"
|
||||
"""
|
||||
),
|
||||
qargparse.String(
|
||||
"family",
|
||||
default=options.get("family", ""),
|
||||
label="OpenPype Family",
|
||||
placeholder="ex: model, look ..."),
|
||||
qargparse.String(
|
||||
"representation",
|
||||
default=options.get("representation", ""),
|
||||
label="OpenPype Representation",
|
||||
placeholder="ex: ma, abc ..."),
|
||||
qargparse.String(
|
||||
"loader",
|
||||
default=options.get("loader", ""),
|
||||
label="Loader",
|
||||
placeholder="ex: ReferenceLoader, LightLoader ...",
|
||||
help="""Loader
|
||||
Defines what openpype loader will be used to load assets.
|
||||
Useable loader depends on current host's loader list.
|
||||
Field is case sensitive.
|
||||
"""),
|
||||
qargparse.String(
|
||||
"loader_args",
|
||||
default=options.get("loader_args", ""),
|
||||
label="Loader Arguments",
|
||||
placeholder='ex: {"camera":"persp", "lights":True}',
|
||||
help="""Loader
|
||||
Defines a dictionnary of arguments used to load assets.
|
||||
Useable arguments depend on current placeholder Loader.
|
||||
Field should be a valid python dict. Anything else will be ignored.
|
||||
"""),
|
||||
qargparse.Integer(
|
||||
"order",
|
||||
default=options.get("order", 0),
|
||||
min=0,
|
||||
max=999,
|
||||
label="Order",
|
||||
placeholder="ex: 0, 100 ... (smallest order loaded first)",
|
||||
help="""Order
|
||||
Order defines asset loading priority (0 to 999)
|
||||
Priority rule is : "lowest is first to load"."""),
|
||||
qargparse.Separator(
|
||||
"Optional attributes"),
|
||||
qargparse.String(
|
||||
"asset",
|
||||
default=options.get("asset", ""),
|
||||
label="Asset filter",
|
||||
placeholder="regex filtering by asset name",
|
||||
help="Filtering assets by matching field regex to asset's name"),
|
||||
qargparse.String(
|
||||
"subset",
|
||||
default=options.get("subset", ""),
|
||||
label="Subset filter",
|
||||
placeholder="regex filtering by subset name",
|
||||
help="Filtering assets by matching field regex to subset's name"),
|
||||
qargparse.String(
|
||||
"hierarchy",
|
||||
default=options.get("hierarchy", ""),
|
||||
label="Hierarchy filter",
|
||||
placeholder="regex filtering by asset's hierarchy",
|
||||
help="Filtering assets by matching field asset's hierarchy")
|
||||
]
|
||||
dialog.create(args)
|
||||
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
return args
|
||||
|
|
@ -9,16 +9,17 @@ import maya.cmds as cmds
|
|||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile import BuildWorkfile
|
||||
from openpype.pipeline.workfile.build_template import (
|
||||
build_workfile_template,
|
||||
update_workfile_template
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.hosts.maya.api import lib, lib_rendersettings
|
||||
from .lib import get_main_window, IS_HEADLESS
|
||||
from .commands import reset_frame_range
|
||||
from .lib_template_builder import create_placeholder, update_placeholder
|
||||
|
||||
from .workfile_template_builder import (
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
build_workfile_template,
|
||||
update_workfile_template,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -161,12 +162,12 @@ def install():
|
|||
cmds.menuItem(
|
||||
"Create Placeholder",
|
||||
parent=builder_menu,
|
||||
command=lambda *args: create_placeholder()
|
||||
command=create_placeholder
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Update Placeholder",
|
||||
parent=builder_menu,
|
||||
command=lambda *args: update_placeholder()
|
||||
command=update_placeholder
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Build Workfile from template",
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ from openpype.hosts.maya import MAYA_ROOT_DIR
|
|||
from openpype.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
from . import menu, lib
|
||||
from .workfile_template_builder import MayaPlaceholderLoadPlugin
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
|
|
@ -135,6 +136,11 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost):
|
|||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
MayaPlaceholderLoadPlugin
|
||||
]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
with lib.maintained_selection():
|
||||
|
|
|
|||
|
|
@ -1,252 +0,0 @@
|
|||
import re
|
||||
from maya import cmds
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile.abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader
|
||||
)
|
||||
from openpype.pipeline.workfile.build_template_exceptions import (
|
||||
TemplateAlreadyImported
|
||||
)
|
||||
|
||||
PLACEHOLDER_SET = 'PLACEHOLDERS_SET'
|
||||
|
||||
|
||||
class MayaTemplateLoader(AbstractTemplateLoader):
|
||||
"""Concrete implementation of AbstractTemplateLoader for maya
|
||||
"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
"""
|
||||
if cmds.objExists(PLACEHOLDER_SET):
|
||||
raise TemplateAlreadyImported(
|
||||
"Build template already loaded\n"
|
||||
"Clean scene if needed (File > New Scene)")
|
||||
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
self.new_nodes = cmds.file(path, i=True, returnNewNodes=True)
|
||||
cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True)
|
||||
|
||||
for set in cmds.listSets(allSets=True):
|
||||
if (cmds.objExists(set) and
|
||||
cmds.attributeQuery('id', node=set, exists=True) and
|
||||
cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'):
|
||||
if cmds.attributeQuery('asset', node=set, exists=True):
|
||||
cmds.setAttr(
|
||||
set + '.asset',
|
||||
legacy_io.Session['AVALON_ASSET'], type='string'
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def template_already_imported(self, err_msg):
|
||||
clearButton = "Clear scene and build"
|
||||
updateButton = "Update template"
|
||||
abortButton = "Abort"
|
||||
|
||||
title = "Scene already builded"
|
||||
message = (
|
||||
"It's seems a template was already build for this scene.\n"
|
||||
"Error message reveived :\n\n\"{}\"".format(err_msg))
|
||||
buttons = [clearButton, updateButton, abortButton]
|
||||
defaultButton = clearButton
|
||||
cancelButton = abortButton
|
||||
dismissString = abortButton
|
||||
answer = cmds.confirmDialog(
|
||||
t=title,
|
||||
m=message,
|
||||
b=buttons,
|
||||
db=defaultButton,
|
||||
cb=cancelButton,
|
||||
ds=dismissString)
|
||||
|
||||
if answer == clearButton:
|
||||
cmds.file(newFile=True, force=True)
|
||||
self.import_template(self.template_path)
|
||||
self.populate_template()
|
||||
elif answer == updateButton:
|
||||
self.update_missing_containers()
|
||||
elif answer == abortButton:
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def get_template_nodes():
|
||||
attributes = cmds.ls('*.builder_type', long=True)
|
||||
return [attribute.rpartition('.')[0] for attribute in attributes]
|
||||
|
||||
def get_loaded_containers_by_id(self):
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
return [
|
||||
cmds.getAttr(container + '.representation')
|
||||
for container in containers]
|
||||
|
||||
|
||||
class MayaPlaceholder(AbstractPlaceholder):
|
||||
"""Concrete implementation of AbstractPlaceholder for maya
|
||||
"""
|
||||
|
||||
optional_keys = {'asset', 'subset', 'hierarchy'}
|
||||
|
||||
def get_data(self, node):
|
||||
user_data = dict()
|
||||
for attr in self.required_keys.union(self.optional_keys):
|
||||
attribute_name = '{}.{}'.format(node, attr)
|
||||
if not cmds.attributeQuery(attr, node=node, exists=True):
|
||||
print("{} not found".format(attribute_name))
|
||||
continue
|
||||
user_data[attr] = cmds.getAttr(
|
||||
attribute_name,
|
||||
asString=True)
|
||||
user_data['parent'] = (
|
||||
cmds.getAttr(node + '.parent', asString=True)
|
||||
or node.rpartition('|')[0]
|
||||
or ""
|
||||
)
|
||||
user_data['node'] = node
|
||||
if user_data['parent']:
|
||||
siblings = cmds.listRelatives(user_data['parent'], children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = user_data['node'].rpartition('|')[2]
|
||||
current_index = cmds.getAttr(node + '.index', asString=True)
|
||||
user_data['index'] = (
|
||||
current_index if current_index >= 0
|
||||
else siblings.index(node_shortname))
|
||||
|
||||
self.data = user_data
|
||||
|
||||
def parent_in_hierarchy(self, containers):
|
||||
"""Parent loaded container to placeholder's parent
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
Args:
|
||||
containers (String): Placeholder loaded containers
|
||||
"""
|
||||
if not containers:
|
||||
return
|
||||
|
||||
roots = cmds.sets(containers, q=True)
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if root.endswith("_RN"):
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root in cmds.listSets(allSets=True):
|
||||
if not cmds.sets(root, q=True):
|
||||
return
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
nodes_to_parent.append(root)
|
||||
|
||||
if self.data['parent']:
|
||||
cmds.parent(nodes_to_parent, self.data['parent'])
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_node = self.data['node']
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder_node,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=self.data['index'])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder_node)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
def clean(self):
|
||||
"""Hide placeholder, parent them to root
|
||||
add them to placeholder set and register placeholder's parent
|
||||
to keep placeholder info available for future use
|
||||
"""
|
||||
node = self.data['node']
|
||||
if self.data['parent']:
|
||||
cmds.setAttr(node + '.parent', self.data['parent'], type='string')
|
||||
if cmds.getAttr(node + '.index') < 0:
|
||||
cmds.setAttr(node + '.index', self.data['index'])
|
||||
|
||||
holding_sets = cmds.listSets(object=node)
|
||||
if holding_sets:
|
||||
for set in holding_sets:
|
||||
cmds.sets(node, remove=set)
|
||||
|
||||
if cmds.listRelatives(node, p=True):
|
||||
node = cmds.parent(node, world=True)[0]
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + '.hiddenInOutliner', True)
|
||||
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
builder_type = self.data["builder_type"]
|
||||
if builder_type == "context_asset":
|
||||
context_filters = {
|
||||
"asset": [current_asset_doc["name"]],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representations": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
elif builder_type != "linked_asset":
|
||||
context_filters = {
|
||||
"asset": [re.compile(self.data["asset"])],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
else:
|
||||
asset_regex = re.compile(self.data["asset"])
|
||||
linked_asset_names = []
|
||||
for asset_doc in linked_asset_docs:
|
||||
asset_name = asset_doc["name"]
|
||||
if asset_regex.match(asset_name):
|
||||
linked_asset_names.append(asset_name)
|
||||
|
||||
context_filters = {
|
||||
"asset": linked_asset_names,
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]],
|
||||
}
|
||||
|
||||
return list(get_representations(
|
||||
project_name,
|
||||
context_filters=context_filters
|
||||
))
|
||||
|
||||
def err_message(self):
|
||||
return (
|
||||
"Error while trying to load a representation.\n"
|
||||
"Either the subset wasn't published or the template is malformed."
|
||||
"\n\n"
|
||||
"Builder was looking for :\n{attributes}".format(
|
||||
attributes="\n".join([
|
||||
"{}: {}".format(key.title(), value)
|
||||
for key, value in self.data.items()]
|
||||
)
|
||||
)
|
||||
)
|
||||
330
openpype/hosts/maya/api/workfile_template_builder.py
Normal file
330
openpype/hosts/maya/api/workfile_template_builder.py
Normal file
|
|
@ -0,0 +1,330 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline.workfile.workfile_template_builder import (
|
||||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
)
|
||||
from openpype.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import read, imprint
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
||||
class MayaTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for maya"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
"""
|
||||
|
||||
if cmds.objExists(PLACEHOLDER_SET):
|
||||
raise TemplateAlreadyImported((
|
||||
"Build template already loaded\n"
|
||||
"Clean scene if needed (File > New Scene)"
|
||||
))
|
||||
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
cmds.file(path, i=True, returnNewNodes=True)
|
||||
|
||||
cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
attributes = cmds.ls("*.plugin_identifier", long=True)
|
||||
placeholder_nodes = {}
|
||||
for attribute in attributes:
|
||||
node_name = attribute.rpartition(".")[0]
|
||||
placeholder_nodes[node_name] = (
|
||||
self._parse_placeholder_node_data(node_name)
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def _parse_placeholder_node_data(self, node_name):
|
||||
placeholder_data = read(node_name)
|
||||
parent_name = (
|
||||
cmds.getAttr(node_name + ".parent", asString=True)
|
||||
or node_name.rpartition("|")[0]
|
||||
or ""
|
||||
)
|
||||
if parent_name:
|
||||
siblings = cmds.listRelatives(parent_name, children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = node_name.rpartition("|")[2]
|
||||
current_index = cmds.getAttr(node_name + ".index", asString=True)
|
||||
if current_index < 0:
|
||||
current_index = siblings.index(node_shortname)
|
||||
|
||||
placeholder_data.update({
|
||||
"parent": parent_name,
|
||||
"index": current_index
|
||||
})
|
||||
return placeholder_data
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
placeholder_name_parts = placeholder_data["builder_type"].split("_")
|
||||
|
||||
pos = 1
|
||||
# add famlily in any
|
||||
placeholder_family = placeholder_data["family"]
|
||||
if placeholder_family:
|
||||
placeholder_name_parts.insert(pos, placeholder_family)
|
||||
pos += 1
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = json.loads(loader_args.replace('\'', '\"'))
|
||||
values = [v for v in loader_args.values()]
|
||||
for value in values:
|
||||
placeholder_name_parts.insert(pos, value)
|
||||
pos += 1
|
||||
|
||||
placeholder_name = "_".join(placeholder_name_parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
containers = []
|
||||
|
||||
loaded_representation_ids = {
|
||||
cmds.getAttr(container + ".representation")
|
||||
for container in containers
|
||||
}
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
selection = cmds.ls(selection=True)
|
||||
if not selection:
|
||||
raise ValueError("Nothing is selected")
|
||||
if len(selection) > 1:
|
||||
raise ValueError("More then one item are selected")
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
# TODO: this can crash if selection can't be used
|
||||
cmds.parent(placeholder, selection[0])
|
||||
|
||||
# get the long name of the placeholder (with the groups)
|
||||
placeholder_full_name = (
|
||||
cmds.ls(selection[0], long=True)[0]
|
||||
+ "|"
|
||||
+ placeholder.replace("|", "")
|
||||
)
|
||||
|
||||
imprint(placeholder_full_name, placeholder_data)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder_full_name + ".parent", "", type="string")
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
new_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
placeholder_value = placeholder_item.data.get(key)
|
||||
if value != placeholder_value:
|
||||
new_values[key] = value
|
||||
placeholder_item.data[key] = value
|
||||
|
||||
for key in new_values.keys():
|
||||
cmds.deleteAttr(node_name + "." + key)
|
||||
|
||||
imprint(node_name, new_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, placeholder_data in scene_placeholders.items():
|
||||
if placeholder_data.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
"""Hide placeholder, parent them to root
|
||||
add them to placeholder set and register placeholder's parent
|
||||
to keep placeholder info available for future use
|
||||
"""
|
||||
|
||||
node = placeholder._scene_identifier
|
||||
node_parent = placeholder.data["parent"]
|
||||
if node_parent:
|
||||
cmds.setAttr(node + ".parent", node_parent, type="string")
|
||||
|
||||
if cmds.getAttr(node + ".index") < 0:
|
||||
cmds.setAttr(node + ".index", placeholder.data["index"])
|
||||
|
||||
holding_sets = cmds.listSets(object=node)
|
||||
if holding_sets:
|
||||
for set in holding_sets:
|
||||
cmds.sets(node, remove=set)
|
||||
|
||||
if cmds.listRelatives(node, p=True):
|
||||
node = cmds.parent(node, world=True)[0]
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarhchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
|
||||
Args:
|
||||
container (str): Placeholder loaded containers
|
||||
"""
|
||||
|
||||
if not container:
|
||||
return
|
||||
|
||||
roots = cmds.sets(container, q=True)
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if root.endswith("_RN"):
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root not in cmds.listSets(allSets=True):
|
||||
nodes_to_parent.append(root)
|
||||
|
||||
elif not cmds.sets(root, q=True):
|
||||
return
|
||||
|
||||
if placeholder.data["parent"]:
|
||||
cmds.parent(nodes_to_parent, placeholder.data["parent"])
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=placeholder.data["index"])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
builder = MayaTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
||||
|
||||
def update_workfile_template(*args):
|
||||
builder = MayaTemplateBuilder(registered_host())
|
||||
builder.rebuild_template()
|
||||
|
||||
|
||||
def create_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = MayaTemplateBuilder(host)
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.exec_()
|
||||
|
||||
|
||||
def update_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = MayaTemplateBuilder(host)
|
||||
placeholder_items_by_id = {
|
||||
placeholder_item.scene_identifier: placeholder_item
|
||||
for placeholder_item in builder.get_placeholders()
|
||||
}
|
||||
placeholder_items = []
|
||||
for node_name in cmds.ls(selection=True, long=True):
|
||||
if node_name in placeholder_items_by_id:
|
||||
placeholder_items.append(placeholder_items_by_id[node_name])
|
||||
|
||||
# TODO show UI at least
|
||||
if len(placeholder_items) == 0:
|
||||
raise ValueError("No node selected")
|
||||
|
||||
if len(placeholder_items) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
|
||||
placeholder_item = placeholder_items[0]
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.set_update_mode(placeholder_item)
|
||||
window.exec_()
|
||||
|
|
@ -21,6 +21,8 @@ from .pipeline import (
|
|||
containerise,
|
||||
parse_container,
|
||||
update_container,
|
||||
|
||||
get_workfile_build_placeholder_plugins,
|
||||
)
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
|
|
@ -55,6 +57,8 @@ __all__ = (
|
|||
"parse_container",
|
||||
"update_container",
|
||||
|
||||
"get_workfile_build_placeholder_plugins",
|
||||
|
||||
"maintained_selection",
|
||||
"reset_selection",
|
||||
"get_view_process_node",
|
||||
|
|
|
|||
|
|
@ -1,220 +0,0 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import qargparse
|
||||
|
||||
import nuke
|
||||
|
||||
from openpype.tools.utils.widgets import OptionDialog
|
||||
|
||||
from .lib import imprint, get_main_window
|
||||
|
||||
|
||||
# To change as enum
|
||||
build_types = ["context_asset", "linked_asset", "all_assets"]
|
||||
|
||||
|
||||
def get_placeholder_attributes(node, enumerate=False):
|
||||
list_atts = {
|
||||
"builder_type",
|
||||
"family",
|
||||
"representation",
|
||||
"loader",
|
||||
"loader_args",
|
||||
"order",
|
||||
"asset",
|
||||
"subset",
|
||||
"hierarchy",
|
||||
"siblings",
|
||||
"last_loaded"
|
||||
}
|
||||
attributes = {}
|
||||
for attr in node.knobs().keys():
|
||||
if attr in list_atts:
|
||||
if enumerate:
|
||||
try:
|
||||
attributes[attr] = node.knob(attr).values()
|
||||
except AttributeError:
|
||||
attributes[attr] = node.knob(attr).getValue()
|
||||
else:
|
||||
attributes[attr] = node.knob(attr).getValue()
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def delete_placeholder_attributes(node):
|
||||
"""Delete all extra placeholder attributes."""
|
||||
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes.keys():
|
||||
try:
|
||||
node.removeKnob(node.knob(attribute))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
|
||||
def hide_placeholder_attributes(node):
|
||||
"""Hide all extra placeholder attributes."""
|
||||
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes.keys():
|
||||
try:
|
||||
node.knob(attribute).setVisible(False)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
|
||||
def create_placeholder():
|
||||
args = placeholder_window()
|
||||
if not args:
|
||||
# operation canceled, no locator created
|
||||
return
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(4278190335)
|
||||
|
||||
# custom arg parse to force empty data query
|
||||
# and still imprint them on placeholder
|
||||
# and getting items when arg is of type Enumerator
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
imprint(placeholder, options)
|
||||
imprint(placeholder, {"is_placeholder": True})
|
||||
placeholder.knob("is_placeholder").setVisible(False)
|
||||
|
||||
|
||||
def update_placeholder():
|
||||
placeholder = nuke.selectedNodes()
|
||||
if not placeholder:
|
||||
raise ValueError("No node selected")
|
||||
if len(placeholder) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
placeholder = placeholder[0]
|
||||
|
||||
args = placeholder_window(get_placeholder_attributes(placeholder))
|
||||
if not args:
|
||||
return # operation canceled
|
||||
# delete placeholder attributes
|
||||
delete_placeholder_attributes(placeholder)
|
||||
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
imprint(placeholder, options)
|
||||
|
||||
|
||||
def imprint_enum(placeholder, args):
|
||||
"""
|
||||
Imprint method doesn't act properly with enums.
|
||||
Replacing the functionnality with this for now
|
||||
"""
|
||||
|
||||
enum_values = {
|
||||
str(arg): arg.read()
|
||||
for arg in args
|
||||
if arg._data.get("items")
|
||||
}
|
||||
string_to_value_enum_table = {
|
||||
build: idx
|
||||
for idx, build in enumerate(build_types)
|
||||
}
|
||||
attrs = {}
|
||||
for key, value in enum_values.items():
|
||||
attrs[key] = string_to_value_enum_table[value]
|
||||
|
||||
|
||||
def placeholder_window(options=None):
|
||||
options = options or dict()
|
||||
dialog = OptionDialog(parent=get_main_window())
|
||||
dialog.setWindowTitle("Create Placeholder")
|
||||
|
||||
args = [
|
||||
qargparse.Separator("Main attributes"),
|
||||
qargparse.Enum(
|
||||
"builder_type",
|
||||
label="Asset Builder Type",
|
||||
default=options.get("builder_type", 0),
|
||||
items=build_types,
|
||||
help="""Asset Builder Type
|
||||
Builder type describe what template loader will look for.
|
||||
|
||||
context_asset : Template loader will look for subsets of
|
||||
current context asset (Asset bob will find asset)
|
||||
|
||||
linked_asset : Template loader will look for assets linked
|
||||
to current context asset.
|
||||
Linked asset are looked in OpenPype database under field "inputLinks"
|
||||
"""
|
||||
),
|
||||
qargparse.String(
|
||||
"family",
|
||||
default=options.get("family", ""),
|
||||
label="OpenPype Family",
|
||||
placeholder="ex: image, plate ..."),
|
||||
qargparse.String(
|
||||
"representation",
|
||||
default=options.get("representation", ""),
|
||||
label="OpenPype Representation",
|
||||
placeholder="ex: mov, png ..."),
|
||||
qargparse.String(
|
||||
"loader",
|
||||
default=options.get("loader", ""),
|
||||
label="Loader",
|
||||
placeholder="ex: LoadClip, LoadImage ...",
|
||||
help="""Loader
|
||||
|
||||
Defines what openpype loader will be used to load assets.
|
||||
Useable loader depends on current host's loader list.
|
||||
Field is case sensitive.
|
||||
"""),
|
||||
qargparse.String(
|
||||
"loader_args",
|
||||
default=options.get("loader_args", ""),
|
||||
label="Loader Arguments",
|
||||
placeholder='ex: {"camera":"persp", "lights":True}',
|
||||
help="""Loader
|
||||
|
||||
Defines a dictionnary of arguments used to load assets.
|
||||
Useable arguments depend on current placeholder Loader.
|
||||
Field should be a valid python dict. Anything else will be ignored.
|
||||
"""),
|
||||
qargparse.Integer(
|
||||
"order",
|
||||
default=options.get("order", 0),
|
||||
min=0,
|
||||
max=999,
|
||||
label="Order",
|
||||
placeholder="ex: 0, 100 ... (smallest order loaded first)",
|
||||
help="""Order
|
||||
|
||||
Order defines asset loading priority (0 to 999)
|
||||
Priority rule is : "lowest is first to load"."""),
|
||||
qargparse.Separator(
|
||||
"Optional attributes "),
|
||||
qargparse.String(
|
||||
"asset",
|
||||
default=options.get("asset", ""),
|
||||
label="Asset filter",
|
||||
placeholder="regex filtering by asset name",
|
||||
help="Filtering assets by matching field regex to asset's name"),
|
||||
qargparse.String(
|
||||
"subset",
|
||||
default=options.get("subset", ""),
|
||||
label="Subset filter",
|
||||
placeholder="regex filtering by subset name",
|
||||
help="Filtering assets by matching field regex to subset's name"),
|
||||
qargparse.String(
|
||||
"hierarchy",
|
||||
default=options.get("hierarchy", ""),
|
||||
label="Hierarchy filter",
|
||||
placeholder="regex filtering by asset's hierarchy",
|
||||
help="Filtering assets by matching field asset's hierarchy")
|
||||
]
|
||||
dialog.create(args)
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
return args
|
||||
|
|
@ -22,10 +22,6 @@ from openpype.pipeline import (
|
|||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.pipeline.workfile import BuildWorkfile
|
||||
from openpype.pipeline.workfile.build_template import (
|
||||
build_workfile_template,
|
||||
update_workfile_template
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .command import viewer_update_and_undo_stop
|
||||
|
|
@ -40,8 +36,12 @@ from .lib import (
|
|||
set_avalon_knob_data,
|
||||
read_avalon_data,
|
||||
)
|
||||
from .lib_template_builder import (
|
||||
create_placeholder, update_placeholder
|
||||
from .workfile_template_builder import (
|
||||
NukePlaceholderLoadPlugin,
|
||||
build_workfile_template,
|
||||
update_workfile_template,
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
@ -141,6 +141,12 @@ def _show_workfiles():
|
|||
host_tools.show_workfiles(parent=None, on_top=False)
|
||||
|
||||
|
||||
def get_workfile_build_placeholder_plugins():
|
||||
return [
|
||||
NukePlaceholderLoadPlugin
|
||||
]
|
||||
|
||||
|
||||
def _install_menu():
|
||||
# uninstall original avalon menu
|
||||
main_window = get_main_window()
|
||||
|
|
|
|||
|
|
@ -1,13 +1,16 @@
|
|||
import re
|
||||
import collections
|
||||
|
||||
import nuke
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile.abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader,
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
)
|
||||
from openpype.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -25,19 +28,11 @@ from .lib import (
|
|||
node_tempfile,
|
||||
)
|
||||
|
||||
from .lib_template_builder import (
|
||||
delete_placeholder_attributes,
|
||||
get_placeholder_attributes,
|
||||
hide_placeholder_attributes
|
||||
)
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
||||
class NukeTemplateLoader(AbstractTemplateLoader):
|
||||
"""Concrete implementation of AbstractTemplateLoader for Nuke
|
||||
|
||||
"""
|
||||
class NukeTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for maya"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
|
|
@ -58,224 +53,255 @@ class NukeTemplateLoader(AbstractTemplateLoader):
|
|||
|
||||
return True
|
||||
|
||||
def preload(self, placeholder, loaders_by_name, last_representation):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
placeholder.data["last_repre_id"] = str(last_representation["_id"])
|
||||
|
||||
def populate_template(self, ignored_ids=None):
|
||||
processed_key = "_node_processed"
|
||||
class NukePlaceholderPlugin(PlaceholderPlugin):
|
||||
node_color = 4278190335
|
||||
|
||||
processed_nodes = []
|
||||
nodes = self.get_template_nodes()
|
||||
while nodes:
|
||||
# Mark nodes as processed so they're not re-executed
|
||||
# - that can happen if processing of placeholder node fails
|
||||
for node in nodes:
|
||||
imprint(node, {processed_key: True})
|
||||
processed_nodes.append(node)
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
placeholder_nodes = {}
|
||||
all_groups = collections.deque()
|
||||
all_groups.append(nuke.thisGroup())
|
||||
while all_groups:
|
||||
group = all_groups.popleft()
|
||||
for node in group.nodes():
|
||||
if isinstance(node, nuke.Group):
|
||||
all_groups.append(node)
|
||||
|
||||
super(NukeTemplateLoader, self).populate_template(ignored_ids)
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"builder_type" not in node_knobs
|
||||
or "is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
||||
# Recollect nodes to repopulate
|
||||
nodes = []
|
||||
for node in self.get_template_nodes():
|
||||
# Skip already processed nodes
|
||||
if (
|
||||
processed_key in node.knobs()
|
||||
and node.knob(processed_key).value()
|
||||
):
|
||||
continue
|
||||
nodes.append(node)
|
||||
if "empty" in node_knobs and node.knob("empty").value():
|
||||
continue
|
||||
|
||||
for node in processed_nodes:
|
||||
knob = node.knob(processed_key)
|
||||
placeholder_nodes[node.fullName()] = node
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(self.node_color)
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
imprint(placeholder, {"is_placeholder": True})
|
||||
placeholder.knob("is_placeholder").setVisible(False)
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node = nuke.toNode(placeholder_item.scene_identifier)
|
||||
imprint(node, placeholder_data)
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = {}
|
||||
for key in self.get_placeholder_keys():
|
||||
knob = node.knob(key)
|
||||
value = None
|
||||
if knob is not None:
|
||||
node.removeKnob(knob)
|
||||
|
||||
@staticmethod
|
||||
def get_template_nodes():
|
||||
placeholders = []
|
||||
all_groups = collections.deque()
|
||||
all_groups.append(nuke.thisGroup())
|
||||
while all_groups:
|
||||
group = all_groups.popleft()
|
||||
for node in group.nodes():
|
||||
if isinstance(node, nuke.Group):
|
||||
all_groups.append(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"builder_type" not in node_knobs
|
||||
or "is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
||||
if "empty" in node_knobs and node.knob("empty").value():
|
||||
continue
|
||||
|
||||
placeholders.append(node)
|
||||
|
||||
return placeholders
|
||||
|
||||
def update_missing_containers(self):
|
||||
nodes_by_id = collections.defaultdict(list)
|
||||
|
||||
for node in nuke.allNodes():
|
||||
node_knobs = node.knobs().keys()
|
||||
if "repre_id" in node_knobs:
|
||||
repre_id = node.knob("repre_id").getValue()
|
||||
nodes_by_id[repre_id].append(node.name())
|
||||
|
||||
if "empty" in node_knobs:
|
||||
node.removeKnob(node.knob("empty"))
|
||||
imprint(node, {"empty": False})
|
||||
|
||||
for node_names in nodes_by_id.values():
|
||||
node = None
|
||||
for node_name in node_names:
|
||||
node_by_name = nuke.toNode(node_name)
|
||||
if "builder_type" in node_by_name.knobs().keys():
|
||||
node = node_by_name
|
||||
break
|
||||
|
||||
if node is None:
|
||||
continue
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(4278190335)
|
||||
attributes = get_placeholder_attributes(node, enumerate=True)
|
||||
imprint(placeholder, attributes)
|
||||
pos_x = int(node.knob("x").getValue())
|
||||
pos_y = int(node.knob("y").getValue())
|
||||
placeholder.setXYpos(pos_x, pos_y)
|
||||
imprint(placeholder, {"nb_children": 1})
|
||||
refresh_node(placeholder)
|
||||
|
||||
self.populate_template(self.get_loaded_containers_by_id())
|
||||
|
||||
def get_loaded_containers_by_id(self):
|
||||
repre_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
repre_ids.add(node.knob("repre_id").getValue())
|
||||
|
||||
# Removes duplicates in the list
|
||||
return list(repre_ids)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
placeholder_node = placeholder.data["node"]
|
||||
last_loaded = placeholder.data["last_loaded"]
|
||||
if not placeholder.data["delete"]:
|
||||
if "empty" in placeholder_node.knobs().keys():
|
||||
placeholder_node.removeKnob(placeholder_node.knob("empty"))
|
||||
imprint(placeholder_node, {"empty": True})
|
||||
return
|
||||
|
||||
if not last_loaded:
|
||||
nuke.delete(placeholder_node)
|
||||
return
|
||||
|
||||
if "last_loaded" in placeholder_node.knobs().keys():
|
||||
for node_name in placeholder_node.knob("last_loaded").values():
|
||||
node = nuke.toNode(node_name)
|
||||
try:
|
||||
delete_placeholder_attributes(node)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
last_loaded_names = [
|
||||
loaded_node.name()
|
||||
for loaded_node in last_loaded
|
||||
]
|
||||
imprint(placeholder_node, {"last_loaded": last_loaded_names})
|
||||
|
||||
for node in last_loaded:
|
||||
refresh_node(node)
|
||||
refresh_node(placeholder_node)
|
||||
if "builder_type" not in node.knobs().keys():
|
||||
attributes = get_placeholder_attributes(placeholder_node, True)
|
||||
imprint(node, attributes)
|
||||
imprint(node, {"is_placeholder": False})
|
||||
hide_placeholder_attributes(node)
|
||||
node.knob("is_placeholder").setVisible(False)
|
||||
imprint(
|
||||
node,
|
||||
{
|
||||
"x": placeholder_node.xpos(),
|
||||
"y": placeholder_node.ypos()
|
||||
}
|
||||
)
|
||||
node.knob("x").setVisible(False)
|
||||
node.knob("y").setVisible(False)
|
||||
nuke.delete(placeholder_node)
|
||||
value = knob.getValue()
|
||||
placeholder_data[key] = value
|
||||
return placeholder_data
|
||||
|
||||
|
||||
class NukePlaceholder(AbstractPlaceholder):
|
||||
"""Concrete implementation of AbstractPlaceholder for Nuke"""
|
||||
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "nuke.load"
|
||||
label = "Nuke load"
|
||||
|
||||
optional_keys = {"asset", "subset", "hierarchy"}
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderLoadPlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
def get_data(self, node):
|
||||
user_data = dict()
|
||||
node_knobs = node.knobs()
|
||||
for attr in self.required_keys.union(self.optional_keys):
|
||||
if attr in node_knobs:
|
||||
user_data[attr] = node_knobs[attr].getValue()
|
||||
user_data["node"] = node
|
||||
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
user_data["nb_children"] = nb_children
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
user_data["siblings"] = siblings
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
user_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
user_data["last_loaded"] = []
|
||||
user_data["delete"] = False
|
||||
self.data = user_data
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def parent_in_hierarchy(self, containers):
|
||||
return
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
loaded_representation_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
loaded_representation_ids.add(
|
||||
node.knob("repre_id").getValue()
|
||||
)
|
||||
|
||||
def create_sib_copies(self):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def _before_repre_load(self, placeholder, representation):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
placeholder.data["last_repre_id"] = str(representation["_id"])
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def cleanup_placeholder(self, placeholder, failed):
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
# TODO get from shared populate data!
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_loaded = self._move_to_placeholder_group(
|
||||
placeholder, nodes_loaded
|
||||
)
|
||||
placeholder.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes postions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
|
||||
self._set_loaded_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outpus and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def fix_z_order(self):
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = self.data["last_loaded"]
|
||||
nodes_loaded = placeholder.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
|
|
@ -287,7 +313,7 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in self.data["siblings"]:
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
|
@ -302,7 +328,56 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def update_nodes(self, nodes, considered_nodes, offset_y=None):
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = placeholder.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(placeholder.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
|
@ -314,7 +389,7 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = self.data["node"]
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
|
|
@ -330,7 +405,7 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
|
|
@ -369,59 +444,14 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
|
||||
refresh_node(node)
|
||||
|
||||
def imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def imprint_siblings(self):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = self.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(self.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def set_loaded_connections(self):
|
||||
def _set_loaded_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = self.data["node"]
|
||||
input_node, output_node = get_group_io_nodes(self.data["last_loaded"])
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node:
|
||||
|
|
@ -432,15 +462,45 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
if placeholder_node.input(idx) == node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def set_copies_connections(self, copies):
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(self.data["last_loaded"])
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
|
@ -474,166 +534,45 @@ class NukePlaceholder(AbstractPlaceholder):
|
|||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
def move_to_placeholder_group(self, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
def build_workfile_template(*args):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
||||
groups_name = self.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def clean(self):
|
||||
# deselect all selected nodes
|
||||
placeholder_node = self.data["node"]
|
||||
def update_workfile_template(*args):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.rebuild_template()
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = self.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
self.data["delete"] = True
|
||||
def create_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = NukeTemplateBuilder(host)
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.exec_()
|
||||
|
||||
nodes_loaded = self.move_to_placeholder_group(nodes_loaded)
|
||||
self.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
def update_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = NukeTemplateBuilder(host)
|
||||
placeholder_items_by_id = {
|
||||
placeholder_item.scene_identifier: placeholder_item
|
||||
for placeholder_item in builder.get_placeholders()
|
||||
}
|
||||
placeholder_items = []
|
||||
for node in nuke.selectedNodes():
|
||||
node_name = node.fullName()
|
||||
if node_name in placeholder_items_by_id:
|
||||
placeholder_items.append(placeholder_items_by_id[node_name])
|
||||
|
||||
self.fix_z_order() # fix the problem of z_order for backdrops
|
||||
self.imprint_siblings()
|
||||
# TODO show UI at least
|
||||
if len(placeholder_items) == 0:
|
||||
raise ValueError("No node selected")
|
||||
|
||||
if self.data["nb_children"] == 0:
|
||||
# save initial nodes postions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
if len(placeholder_items) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
|
||||
self.imprint_inits()
|
||||
self.update_nodes(nuke.allNodes(), nodes_loaded)
|
||||
self.set_loaded_connections()
|
||||
|
||||
elif self.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outpus and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self.create_sib_copies()
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self.update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self.set_copies_connections(copies)
|
||||
|
||||
self.update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
self.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
self.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
builder_type = self.data["builder_type"]
|
||||
if builder_type == "context_asset":
|
||||
context_filters = {
|
||||
"asset": [re.compile(self.data["asset"])],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representations": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
elif builder_type != "linked_asset":
|
||||
context_filters = {
|
||||
"asset": [
|
||||
current_asset_doc["name"],
|
||||
re.compile(self.data["asset"])
|
||||
],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
else:
|
||||
asset_regex = re.compile(self.data["asset"])
|
||||
linked_asset_names = []
|
||||
for asset_doc in linked_asset_docs:
|
||||
asset_name = asset_doc["name"]
|
||||
if asset_regex.match(asset_name):
|
||||
linked_asset_names.append(asset_name)
|
||||
|
||||
if not linked_asset_names:
|
||||
return []
|
||||
|
||||
context_filters = {
|
||||
"asset": linked_asset_names,
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]],
|
||||
}
|
||||
|
||||
return list(get_representations(
|
||||
project_name,
|
||||
context_filters=context_filters
|
||||
))
|
||||
|
||||
def err_message(self):
|
||||
return (
|
||||
"Error while trying to load a representation.\n"
|
||||
"Either the subset wasn't published or the template is malformed."
|
||||
"\n\n"
|
||||
"Builder was looking for:\n{attributes}".format(
|
||||
attributes="\n".join([
|
||||
"{}: {}".format(key.title(), value)
|
||||
for key, value in self.data.items()]
|
||||
)
|
||||
)
|
||||
)
|
||||
placeholder_item = placeholder_items[0]
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder)
|
||||
window.set_update_mode(placeholder_item)
|
||||
window.exec_()
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.resolve.api.lib import get_project_manager
|
||||
|
||||
|
||||
class ExtractWorkfile(openpype.api.Extractor):
|
||||
class ExtractWorkfile(publish.Extractor):
|
||||
"""
|
||||
Extractor export DRP workfile file representation
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from openpype.pipeline import (
|
|||
register_creator_plugin_path,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.host import HostBase, INewPublisher
|
||||
from openpype.host import HostBase, IPublishHost
|
||||
|
||||
|
||||
ROOT_DIR = os.path.dirname(os.path.dirname(
|
||||
|
|
@ -19,7 +19,7 @@ PUBLISH_PATH = os.path.join(ROOT_DIR, "plugins", "publish")
|
|||
CREATE_PATH = os.path.join(ROOT_DIR, "plugins", "create")
|
||||
|
||||
|
||||
class TrayPublisherHost(HostBase, INewPublisher):
|
||||
class TrayPublisherHost(HostBase, IPublishHost):
|
||||
name = "traypublisher"
|
||||
|
||||
def install(self):
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ import unreal
|
|||
from unreal import EditorAssetLibrary as eal
|
||||
from unreal import EditorLevelLibrary as ell
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractCamera(openpype.api.Extractor):
|
||||
class ExtractCamera(publish.Extractor):
|
||||
"""Extract a camera."""
|
||||
|
||||
label = "Extract Camera"
|
||||
|
|
|
|||
|
|
@ -3,18 +3,15 @@ import os
|
|||
import json
|
||||
import math
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
import unreal
|
||||
from unreal import EditorLevelLibrary as ell
|
||||
from unreal import EditorAssetLibrary as eal
|
||||
|
||||
from openpype.client import get_representation_by_name
|
||||
import openpype.api
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline import legacy_io, publish
|
||||
|
||||
|
||||
class ExtractLayout(openpype.api.Extractor):
|
||||
class ExtractLayout(publish.Extractor):
|
||||
"""Extract a layout."""
|
||||
|
||||
label = "Extract Layout"
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ import os
|
|||
import unreal
|
||||
from unreal import MaterialEditingLibrary as mat_lib
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractLook(openpype.api.Extractor):
|
||||
class ExtractLook(publish.Extractor):
|
||||
"""Extract look."""
|
||||
|
||||
label = "Extract Look"
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@ from pathlib import Path
|
|||
|
||||
import unreal
|
||||
|
||||
import openpype.api
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractRender(openpype.api.Extractor):
|
||||
class ExtractRender(publish.Extractor):
|
||||
"""Extract render."""
|
||||
|
||||
label = "Extract Render"
|
||||
|
|
|
|||
|
|
@ -9,6 +9,49 @@ import six
|
|||
import clique
|
||||
|
||||
|
||||
def get_attributes_keys(attribute_definitions):
|
||||
"""Collect keys from list of attribute definitions.
|
||||
|
||||
Args:
|
||||
attribute_definitions (List[AbtractAttrDef]): Objects of attribute
|
||||
definitions.
|
||||
|
||||
Returns:
|
||||
Set[str]: Keys that will be created using passed attribute definitions.
|
||||
"""
|
||||
|
||||
keys = set()
|
||||
if not attribute_definitions:
|
||||
return keys
|
||||
|
||||
for attribute_def in attribute_definitions:
|
||||
if not isinstance(attribute_def, UIDef):
|
||||
keys.add(attribute_def.key)
|
||||
return keys
|
||||
|
||||
|
||||
def get_default_values(attribute_definitions):
|
||||
"""Receive default values for attribute definitions.
|
||||
|
||||
Args:
|
||||
attribute_definitions (List[AbtractAttrDef]): Attribute definitions for
|
||||
which default values should be collected.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Default values for passet attribute definitions.
|
||||
"""
|
||||
|
||||
output = {}
|
||||
if not attribute_definitions:
|
||||
return output
|
||||
|
||||
for attr_def in attribute_definitions:
|
||||
# Skip UI definitions
|
||||
if not isinstance(attr_def, UIDef):
|
||||
output[attr_def.key] = attr_def.default
|
||||
return output
|
||||
|
||||
|
||||
class AbstractAttrDefMeta(ABCMeta):
|
||||
"""Meta class to validate existence of 'key' attribute.
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,9 @@ from openpype.pipeline import AvalonMongoDB
|
|||
from openpype.settings import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
# Accepted namin pattern for OP
|
||||
naming_pattern = re.compile("^[a-zA-Z0-9_.]*$")
|
||||
|
|
@ -230,7 +233,6 @@ def update_op_assets(
|
|||
},
|
||||
)
|
||||
)
|
||||
|
||||
return assets_with_update
|
||||
|
||||
|
||||
|
|
@ -248,7 +250,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
project_name = project["name"]
|
||||
project_doc = get_project(project_name)
|
||||
if not project_doc:
|
||||
print(f"Creating project '{project_name}'")
|
||||
log.info(f"Creating project '{project_name}'")
|
||||
project_doc = create_project(project_name, project_name)
|
||||
|
||||
# Project data and tasks
|
||||
|
|
@ -268,12 +270,18 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
{
|
||||
"code": project_code,
|
||||
"fps": float(project["fps"]),
|
||||
"resolutionWidth": int(project["resolution"].split("x")[0]),
|
||||
"resolutionHeight": int(project["resolution"].split("x")[1]),
|
||||
"zou_id": project["id"],
|
||||
}
|
||||
)
|
||||
|
||||
match_res = re.match(r"(\d+)x(\d+)", project["resolution"])
|
||||
if match_res:
|
||||
project_data['resolutionWidth'] = int(match_res.group(1))
|
||||
project_data['resolutionHeight'] = int(match_res.group(2))
|
||||
else:
|
||||
log.warning(f"\'{project['resolution']}\' does not match the expected"
|
||||
" format for the resolution, for example: 1920x1080")
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": project_doc["_id"]},
|
||||
{
|
||||
|
|
@ -334,7 +342,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
|||
if not project:
|
||||
project = gazu.project.get_project_by_name(project["name"])
|
||||
|
||||
print(f"Synchronizing {project['name']}...")
|
||||
log.info(f"Synchronizing {project['name']}...")
|
||||
|
||||
# Get all assets from zou
|
||||
all_assets = gazu.asset.all_assets_for_project(project)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,11 @@ from uuid import uuid4
|
|||
from contextlib import contextmanager
|
||||
|
||||
from openpype.client import get_assets
|
||||
from openpype.host import INewPublisher
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings
|
||||
)
|
||||
from openpype.host import IPublishHost
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.mongodb import (
|
||||
AvalonMongoDB,
|
||||
|
|
@ -20,11 +24,6 @@ from .creator_plugins import (
|
|||
discover_creator_plugins,
|
||||
)
|
||||
|
||||
from openpype.api import (
|
||||
get_system_settings,
|
||||
get_project_settings
|
||||
)
|
||||
|
||||
UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"])
|
||||
|
||||
|
||||
|
|
@ -402,8 +401,12 @@ class CreatedInstance:
|
|||
self.creator = creator
|
||||
|
||||
# Instance members may have actions on them
|
||||
# TODO implement members logic
|
||||
self._members = []
|
||||
|
||||
# Data that can be used for lifetime of object
|
||||
self._transient_data = {}
|
||||
|
||||
# Create a copy of passed data to avoid changing them on the fly
|
||||
data = copy.deepcopy(data or {})
|
||||
# Store original value of passed data
|
||||
|
|
@ -596,6 +599,26 @@ class CreatedInstance:
|
|||
|
||||
return self
|
||||
|
||||
@property
|
||||
def transient_data(self):
|
||||
"""Data stored for lifetime of instance object.
|
||||
|
||||
These data are not stored to scene and will be lost on object
|
||||
deletion.
|
||||
|
||||
Can be used to store objects. In some host implementations is not
|
||||
possible to reference to object in scene with some unique identifier
|
||||
(e.g. node in Fusion.). In that case it is handy to store the object
|
||||
here. Should be used that way only if instance data are stored on the
|
||||
node itself.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Dictionary object where you can store data related
|
||||
to instance for lifetime of instance object.
|
||||
"""
|
||||
|
||||
return self._transient_data
|
||||
|
||||
def changes(self):
|
||||
"""Calculate and return changes."""
|
||||
|
||||
|
|
@ -771,7 +794,7 @@ class CreateContext:
|
|||
"""
|
||||
|
||||
missing = set(
|
||||
INewPublisher.get_missing_publish_methods(host)
|
||||
IPublishHost.get_missing_publish_methods(host)
|
||||
)
|
||||
return missing
|
||||
|
||||
|
|
|
|||
|
|
@ -81,6 +81,13 @@ class BaseCreator:
|
|||
# - we may use UI inside processing this attribute should be checked
|
||||
self.headless = headless
|
||||
|
||||
self.apply_settings(project_settings, system_settings)
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
pass
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
"""Identifier of creator (must be unique).
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from .utils import (
|
|||
InvalidRepresentationContext,
|
||||
|
||||
get_repres_contexts,
|
||||
get_contexts_for_repre_docs,
|
||||
get_subset_contexts,
|
||||
get_representation_context,
|
||||
|
||||
|
|
@ -54,6 +55,7 @@ __all__ = (
|
|||
"InvalidRepresentationContext",
|
||||
|
||||
"get_repres_contexts",
|
||||
"get_contexts_for_repre_docs",
|
||||
"get_subset_contexts",
|
||||
"get_representation_context",
|
||||
|
||||
|
|
|
|||
|
|
@ -87,13 +87,20 @@ def get_repres_contexts(representation_ids, dbcon=None):
|
|||
if not dbcon:
|
||||
dbcon = legacy_io
|
||||
|
||||
contexts = {}
|
||||
if not representation_ids:
|
||||
return contexts
|
||||
return {}
|
||||
|
||||
project_name = dbcon.active_project()
|
||||
repre_docs = get_representations(project_name, representation_ids)
|
||||
|
||||
return get_contexts_for_repre_docs(project_name, repre_docs)
|
||||
|
||||
|
||||
def get_contexts_for_repre_docs(project_name, repre_docs):
|
||||
contexts = {}
|
||||
if not repre_docs:
|
||||
return contexts
|
||||
|
||||
repre_docs_by_id = {}
|
||||
version_ids = set()
|
||||
for repre_doc in repre_docs:
|
||||
|
|
|
|||
|
|
@ -1,528 +0,0 @@
|
|||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
import logging
|
||||
from functools import reduce
|
||||
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_linked_assets,
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import (
|
||||
StringTemplate,
|
||||
Logger,
|
||||
filter_profiles,
|
||||
)
|
||||
from openpype.pipeline import legacy_io, Anatomy
|
||||
from openpype.pipeline.load import (
|
||||
get_loaders_by_name,
|
||||
get_representation_context,
|
||||
load_with_repre_context,
|
||||
)
|
||||
|
||||
from .build_template_exceptions import (
|
||||
TemplateAlreadyImported,
|
||||
TemplateLoadingFailed,
|
||||
TemplateProfileNotFound,
|
||||
TemplateNotFound
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def update_representations(entities, entity):
|
||||
if entity['context']['subset'] not in entities:
|
||||
entities[entity['context']['subset']] = entity
|
||||
else:
|
||||
current = entities[entity['context']['subset']]
|
||||
incomming = entity
|
||||
entities[entity['context']['subset']] = max(
|
||||
current, incomming,
|
||||
key=lambda entity: entity["context"].get("version", -1))
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
def parse_loader_args(loader_args):
|
||||
if not loader_args:
|
||||
return dict()
|
||||
try:
|
||||
parsed_args = eval(loader_args)
|
||||
if not isinstance(parsed_args, dict):
|
||||
return dict()
|
||||
else:
|
||||
return parsed_args
|
||||
except Exception as err:
|
||||
print(
|
||||
"Error while parsing loader arguments '{}'.\n{}: {}\n\n"
|
||||
"Continuing with default arguments. . .".format(
|
||||
loader_args,
|
||||
err.__class__.__name__,
|
||||
err))
|
||||
return dict()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractTemplateLoader:
|
||||
"""
|
||||
Abstraction of Template Loader.
|
||||
Properties:
|
||||
template_path : property to get current template path
|
||||
Methods:
|
||||
import_template : Abstract Method. Used to load template,
|
||||
depending on current host
|
||||
get_template_nodes : Abstract Method. Used to query nodes acting
|
||||
as placeholders. Depending on current host
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self, placeholder_class):
|
||||
# TODO template loader should expect host as and argument
|
||||
# - host have all responsibility for most of code (also provide
|
||||
# placeholder class)
|
||||
# - also have responsibility for current context
|
||||
# - this won't work in DCCs where multiple workfiles with
|
||||
# different contexts can be opened at single time
|
||||
# - template loader should have ability to change context
|
||||
project_name = legacy_io.active_project()
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
||||
self.loaders_by_name = get_loaders_by_name()
|
||||
self.current_asset = asset_name
|
||||
self.project_name = project_name
|
||||
self.host_name = legacy_io.Session["AVALON_APP"]
|
||||
self.task_name = legacy_io.Session["AVALON_TASK"]
|
||||
self.placeholder_class = placeholder_class
|
||||
self.current_asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
self.task_type = (
|
||||
self.current_asset_doc
|
||||
.get("data", {})
|
||||
.get("tasks", {})
|
||||
.get(self.task_name, {})
|
||||
.get("type")
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"BUILDING ASSET FROM TEMPLATE :\n"
|
||||
"Starting templated build for {asset} in {project}\n\n"
|
||||
"Asset : {asset}\n"
|
||||
"Task : {task_name} ({task_type})\n"
|
||||
"Host : {host}\n"
|
||||
"Project : {project}\n".format(
|
||||
asset=self.current_asset,
|
||||
host=self.host_name,
|
||||
project=self.project_name,
|
||||
task_name=self.task_name,
|
||||
task_type=self.task_type
|
||||
))
|
||||
# Skip if there is no loader
|
||||
if not self.loaders_by_name:
|
||||
self.log.warning(
|
||||
"There is no registered loaders. No assets will be loaded")
|
||||
return
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def template_already_imported(self, err_msg):
|
||||
"""In case template was already loaded.
|
||||
Raise the error as a default action.
|
||||
Override this method in your template loader implementation
|
||||
to manage this case."""
|
||||
self.log.error("{}: {}".format(
|
||||
err_msg.__class__.__name__,
|
||||
err_msg))
|
||||
raise TemplateAlreadyImported(err_msg)
|
||||
|
||||
def template_loading_failed(self, err_msg):
|
||||
"""In case template loading failed
|
||||
Raise the error as a default action.
|
||||
Override this method in your template loader implementation
|
||||
to manage this case.
|
||||
"""
|
||||
self.log.error("{}: {}".format(
|
||||
err_msg.__class__.__name__,
|
||||
err_msg))
|
||||
raise TemplateLoadingFailed(err_msg)
|
||||
|
||||
@property
|
||||
def template_path(self):
|
||||
"""
|
||||
Property returning template path. Avoiding setter.
|
||||
Getting template path from open pype settings based on current avalon
|
||||
session and solving the path variables if needed.
|
||||
Returns:
|
||||
str: Solved template path
|
||||
Raises:
|
||||
TemplateProfileNotFound: No profile found from settings for
|
||||
current avalon session
|
||||
KeyError: Could not solve path because a key does not exists
|
||||
in avalon context
|
||||
TemplateNotFound: Solved path does not exists on current filesystem
|
||||
"""
|
||||
project_name = self.project_name
|
||||
host_name = self.host_name
|
||||
task_name = self.task_name
|
||||
task_type = self.task_type
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
build_info = project_settings[host_name]["templated_workfile_build"]
|
||||
profile = filter_profiles(
|
||||
build_info["profiles"],
|
||||
{
|
||||
"task_types": task_type,
|
||||
"task_names": task_name
|
||||
}
|
||||
)
|
||||
|
||||
if not profile:
|
||||
raise TemplateProfileNotFound(
|
||||
"No matching profile found for task '{}' of type '{}' "
|
||||
"with host '{}'".format(task_name, task_type, host_name)
|
||||
)
|
||||
|
||||
path = profile["path"]
|
||||
if not path:
|
||||
raise TemplateLoadingFailed(
|
||||
"Template path is not set.\n"
|
||||
"Path need to be set in {}\\Template Workfile Build "
|
||||
"Settings\\Profiles".format(host_name.title()))
|
||||
|
||||
# Try fill path with environments and anatomy roots
|
||||
fill_data = {
|
||||
key: value
|
||||
for key, value in os.environ.items()
|
||||
}
|
||||
fill_data["root"] = anatomy.roots
|
||||
result = StringTemplate.format_template(path, fill_data)
|
||||
if result.solved:
|
||||
path = result.normalized()
|
||||
|
||||
if path and os.path.exists(path):
|
||||
self.log.info("Found template at: '{}'".format(path))
|
||||
return path
|
||||
|
||||
solved_path = None
|
||||
while True:
|
||||
try:
|
||||
solved_path = anatomy.path_remapper(path)
|
||||
except KeyError as missing_key:
|
||||
raise KeyError(
|
||||
"Could not solve key '{}' in template path '{}'".format(
|
||||
missing_key, path))
|
||||
|
||||
if solved_path is None:
|
||||
solved_path = path
|
||||
if solved_path == path:
|
||||
break
|
||||
path = solved_path
|
||||
|
||||
solved_path = os.path.normpath(solved_path)
|
||||
if not os.path.exists(solved_path):
|
||||
raise TemplateNotFound(
|
||||
"Template found in openPype settings for task '{}' with host "
|
||||
"'{}' does not exists. (Not found : {})".format(
|
||||
task_name, host_name, solved_path))
|
||||
|
||||
self.log.info("Found template at: '{}'".format(solved_path))
|
||||
|
||||
return solved_path
|
||||
|
||||
def populate_template(self, ignored_ids=None):
|
||||
"""
|
||||
Use template placeholders to load assets and parent them in hierarchy
|
||||
Arguments :
|
||||
ignored_ids :
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
loaders_by_name = self.loaders_by_name
|
||||
current_asset_doc = self.current_asset_doc
|
||||
linked_assets = get_linked_assets(current_asset_doc)
|
||||
|
||||
ignored_ids = ignored_ids or []
|
||||
placeholders = self.get_placeholders()
|
||||
self.log.debug("Placeholders found in template: {}".format(
|
||||
[placeholder.name for placeholder in placeholders]
|
||||
))
|
||||
for placeholder in placeholders:
|
||||
self.log.debug("Start to processing placeholder {}".format(
|
||||
placeholder.name
|
||||
))
|
||||
placeholder_representations = self.get_placeholder_representations(
|
||||
placeholder,
|
||||
current_asset_doc,
|
||||
linked_assets
|
||||
)
|
||||
|
||||
if not placeholder_representations:
|
||||
self.log.info(
|
||||
"There's no representation for this placeholder: "
|
||||
"{}".format(placeholder.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for representation in placeholder_representations:
|
||||
self.preload(placeholder, loaders_by_name, representation)
|
||||
|
||||
if self.load_data_is_incorrect(
|
||||
placeholder,
|
||||
representation,
|
||||
ignored_ids):
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
"Loading {}_{} with loader {}\n"
|
||||
"Loader arguments used : {}".format(
|
||||
representation['context']['asset'],
|
||||
representation['context']['subset'],
|
||||
placeholder.loader_name,
|
||||
placeholder.loader_args))
|
||||
|
||||
try:
|
||||
container = self.load(
|
||||
placeholder, loaders_by_name, representation)
|
||||
except Exception:
|
||||
self.load_failed(placeholder, representation)
|
||||
else:
|
||||
self.load_succeed(placeholder, container)
|
||||
finally:
|
||||
self.postload(placeholder)
|
||||
|
||||
def get_placeholder_representations(
|
||||
self, placeholder, current_asset_doc, linked_asset_docs
|
||||
):
|
||||
placeholder_representations = placeholder.get_representations(
|
||||
current_asset_doc,
|
||||
linked_asset_docs
|
||||
)
|
||||
for repre_doc in reduce(
|
||||
update_representations,
|
||||
placeholder_representations,
|
||||
dict()
|
||||
).values():
|
||||
yield repre_doc
|
||||
|
||||
def load_data_is_incorrect(
|
||||
self, placeholder, last_representation, ignored_ids):
|
||||
if not last_representation:
|
||||
self.log.warning(placeholder.err_message())
|
||||
return True
|
||||
if (str(last_representation['_id']) in ignored_ids):
|
||||
print("Ignoring : ", last_representation['_id'])
|
||||
return True
|
||||
return False
|
||||
|
||||
def preload(self, placeholder, loaders_by_name, last_representation):
|
||||
pass
|
||||
|
||||
def load(self, placeholder, loaders_by_name, last_representation):
|
||||
repre = get_representation_context(last_representation)
|
||||
return load_with_repre_context(
|
||||
loaders_by_name[placeholder.loader_name],
|
||||
repre,
|
||||
options=parse_loader_args(placeholder.loader_args))
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder.parent_in_hierarchy(container)
|
||||
|
||||
def load_failed(self, placeholder, last_representation):
|
||||
self.log.warning(
|
||||
"Got error trying to load {}:{} with {}".format(
|
||||
last_representation['context']['asset'],
|
||||
last_representation['context']['subset'],
|
||||
placeholder.loader_name
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
def postload(self, placeholder):
|
||||
placeholder.clean()
|
||||
|
||||
def update_missing_containers(self):
|
||||
loaded_containers_ids = self.get_loaded_containers_by_id()
|
||||
self.populate_template(ignored_ids=loaded_containers_ids)
|
||||
|
||||
def get_placeholders(self):
|
||||
placeholders = map(self.placeholder_class, self.get_template_nodes())
|
||||
valid_placeholders = filter(
|
||||
lambda i: i.is_valid,
|
||||
placeholders
|
||||
)
|
||||
sorted_placeholders = list(sorted(
|
||||
valid_placeholders,
|
||||
key=lambda i: i.order
|
||||
))
|
||||
return sorted_placeholders
|
||||
|
||||
@abstractmethod
|
||||
def get_loaded_containers_by_id(self):
|
||||
"""
|
||||
Collect already loaded containers for updating scene
|
||||
Return:
|
||||
dict (string, node): A dictionnary id as key
|
||||
and containers as value
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def import_template(self, template_path):
|
||||
"""
|
||||
Import template in current host
|
||||
Args:
|
||||
template_path (str): fullpath to current task and
|
||||
host's template file
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_template_nodes(self):
|
||||
"""
|
||||
Returning a list of nodes acting as host placeholders for
|
||||
templating. The data representation is by user.
|
||||
AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes
|
||||
Args :
|
||||
None
|
||||
Returns:
|
||||
list(AnyNode): Solved template path
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractPlaceholder:
|
||||
"""Abstraction of placeholders logic.
|
||||
|
||||
Properties:
|
||||
required_keys: A list of mandatory keys to decribe placeholder
|
||||
and assets to load.
|
||||
optional_keys: A list of optional keys to decribe
|
||||
placeholder and assets to load
|
||||
loader_name: Name of linked loader to use while loading assets
|
||||
|
||||
Args:
|
||||
identifier (str): Placeholder identifier. Should be possible to be
|
||||
used as identifier in "a scene" (e.g. unique node name).
|
||||
"""
|
||||
|
||||
required_keys = {
|
||||
"builder_type",
|
||||
"family",
|
||||
"representation",
|
||||
"order",
|
||||
"loader",
|
||||
"loader_args"
|
||||
}
|
||||
optional_keys = {}
|
||||
|
||||
def __init__(self, identifier):
|
||||
self._log = None
|
||||
self._name = identifier
|
||||
self.get_data(identifier)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(repr(self))
|
||||
return self._log
|
||||
|
||||
def __repr__(self):
|
||||
return "< {} {} >".format(self.__class__.__name__, self.name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def loader_args(self):
|
||||
return self.data["loader_args"]
|
||||
|
||||
@property
|
||||
def builder_type(self):
|
||||
return self.data["builder_type"]
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
return self.data["order"]
|
||||
|
||||
@property
|
||||
def loader_name(self):
|
||||
"""Return placeholder loader name.
|
||||
|
||||
Returns:
|
||||
str: Loader name that will be used to load placeholder
|
||||
representations.
|
||||
"""
|
||||
|
||||
return self.data["loader"]
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
"""Test validity of placeholder.
|
||||
|
||||
i.e.: every required key exists in placeholder data
|
||||
|
||||
Returns:
|
||||
bool: True if every key is in data
|
||||
"""
|
||||
|
||||
if set(self.required_keys).issubset(self.data.keys()):
|
||||
self.log.debug("Valid placeholder : {}".format(self.name))
|
||||
return True
|
||||
self.log.info("Placeholder is not valid : {}".format(self.name))
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def parent_in_hierarchy(self, container):
|
||||
"""Place loaded container in correct hierarchy given by placeholder
|
||||
|
||||
Args:
|
||||
container (Dict[str, Any]): Loaded container created by loader.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def clean(self):
|
||||
"""Clean placeholder from hierarchy after loading assets."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
"""Query representations based on placeholder data.
|
||||
|
||||
Args:
|
||||
current_asset_doc (Dict[str, Any]): Document of current
|
||||
context asset.
|
||||
linked_asset_docs (List[Dict[str, Any]]): Documents of assets
|
||||
linked to current context asset.
|
||||
|
||||
Returns:
|
||||
Iterable[Dict[str, Any]]: Representations that are matching
|
||||
placeholder filters.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_data(self, identifier):
|
||||
"""Collect information about placeholder by identifier.
|
||||
|
||||
Args:
|
||||
identifier (str): A unique placeholder identifier defined by
|
||||
implementation.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
import os
|
||||
from importlib import import_module
|
||||
from openpype.lib import classes_from_module
|
||||
from openpype.host import HostBase
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
from .abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader)
|
||||
|
||||
from .build_template_exceptions import (
|
||||
TemplateLoadingFailed,
|
||||
TemplateAlreadyImported,
|
||||
MissingHostTemplateModule,
|
||||
MissingTemplatePlaceholderClass,
|
||||
MissingTemplateLoaderClass
|
||||
)
|
||||
|
||||
_module_path_format = 'openpype.hosts.{host}.api.template_loader'
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
template_loader = build_template_loader()
|
||||
try:
|
||||
template_loader.import_template(template_loader.template_path)
|
||||
except TemplateAlreadyImported as err:
|
||||
template_loader.template_already_imported(err)
|
||||
except TemplateLoadingFailed as err:
|
||||
template_loader.template_loading_failed(err)
|
||||
else:
|
||||
template_loader.populate_template()
|
||||
|
||||
|
||||
def update_workfile_template(*args):
|
||||
template_loader = build_template_loader()
|
||||
template_loader.update_missing_containers()
|
||||
|
||||
|
||||
def build_template_loader():
|
||||
# TODO refactor to use advantage of 'HostBase' and don't import dynamically
|
||||
# - hosts should have methods that gives option to return builders
|
||||
host = registered_host()
|
||||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
if not host_name:
|
||||
host_name = host.__name__.split(".")[-2]
|
||||
|
||||
module_path = _module_path_format.format(host=host_name)
|
||||
module = import_module(module_path)
|
||||
if not module:
|
||||
raise MissingHostTemplateModule(
|
||||
"No template loader found for host {}".format(host_name))
|
||||
|
||||
template_loader_class = classes_from_module(
|
||||
AbstractTemplateLoader,
|
||||
module
|
||||
)
|
||||
template_placeholder_class = classes_from_module(
|
||||
AbstractPlaceholder,
|
||||
module
|
||||
)
|
||||
|
||||
if not template_loader_class:
|
||||
raise MissingTemplateLoaderClass()
|
||||
template_loader_class = template_loader_class[0]
|
||||
|
||||
if not template_placeholder_class:
|
||||
raise MissingTemplatePlaceholderClass()
|
||||
template_placeholder_class = template_placeholder_class[0]
|
||||
return template_loader_class(template_placeholder_class)
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
class MissingHostTemplateModule(Exception):
|
||||
"""Error raised when expected module does not exists"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingTemplatePlaceholderClass(Exception):
|
||||
"""Error raised when module doesn't implement a placeholder class"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingTemplateLoaderClass(Exception):
|
||||
"""Error raised when module doesn't implement a template loader class"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateNotFound(Exception):
|
||||
"""Exception raised when template does not exist."""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateProfileNotFound(Exception):
|
||||
"""Exception raised when current profile
|
||||
doesn't match any template profile"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateAlreadyImported(Exception):
|
||||
"""Error raised when Template was already imported by host for
|
||||
this session"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateLoadingFailed(Exception):
|
||||
"""Error raised whend Template loader was unable to load the template"""
|
||||
pass
|
||||
|
|
@ -1,3 +1,14 @@
|
|||
"""Workfile build based on settings.
|
||||
|
||||
Workfile builder will do stuff based on project settings. Advantage is that
|
||||
it need only access to settings. Disadvantage is that it is hard to focus
|
||||
build per context and being explicit about loaded content.
|
||||
|
||||
For more explicit workfile build is recommended 'AbstractTemplateBuilder'
|
||||
from '~/openpype/pipeline/workfile/workfile_template_builder'. Which gives
|
||||
more abilities to define how build happens but require more code to achive it.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import collections
|
||||
|
|
|
|||
1451
openpype/pipeline/workfile/workfile_template_builder.py
Normal file
1451
openpype/pipeline/workfile/workfile_template_builder.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -25,7 +25,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
for created_instance in create_context.instances:
|
||||
instance_data = created_instance.data_to_store()
|
||||
if instance_data["active"]:
|
||||
self.create_instance(context, instance_data)
|
||||
self.create_instance(
|
||||
context, instance_data, created_instance.transient_data
|
||||
)
|
||||
|
||||
# Update global data to context
|
||||
context.data.update(create_context.context_data_to_store())
|
||||
|
|
@ -37,7 +39,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
|
||||
def create_instance(self, context, in_data):
|
||||
def create_instance(self, context, in_data, transient_data):
|
||||
subset = in_data["subset"]
|
||||
# If instance data already contain families then use it
|
||||
instance_families = in_data.get("families") or []
|
||||
|
|
@ -56,5 +58,8 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
for key, value in in_data.items():
|
||||
if key not in instance.data:
|
||||
instance.data[key] = value
|
||||
|
||||
instance.data["transientData"] = transient_data
|
||||
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
self.log.info("parsing data: {}".format(in_data))
|
||||
|
|
|
|||
|
|
@ -296,7 +296,7 @@ class HSLColor:
|
|||
if "%" in sat_str:
|
||||
sat = float(sat_str.rstrip("%")) / 100
|
||||
else:
|
||||
sat = float(sat)
|
||||
sat = float(sat_str)
|
||||
|
||||
if "%" in light_str:
|
||||
light = float(light_str.rstrip("%")) / 100
|
||||
|
|
@ -337,8 +337,8 @@ class HSLAColor:
|
|||
as float (0-1 range).
|
||||
|
||||
Examples:
|
||||
"hsl(27, 0.7, 0.3)"
|
||||
"hsl(27, 70%, 30%)"
|
||||
"hsla(27, 0.7, 0.3, 0.5)"
|
||||
"hsla(27, 70%, 30%, 0.5)"
|
||||
"""
|
||||
def __init__(self, value):
|
||||
modified_color = value.lower().strip()
|
||||
|
|
@ -350,7 +350,7 @@ class HSLAColor:
|
|||
if "%" in sat_str:
|
||||
sat = float(sat_str.rstrip("%")) / 100
|
||||
else:
|
||||
sat = float(sat)
|
||||
sat = float(sat_str)
|
||||
|
||||
if "%" in light_str:
|
||||
light = float(light_str.rstrip("%")) / 100
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@
|
|||
"color": {
|
||||
"font": "#D3D8DE",
|
||||
"font-hover": "#F0F2F5",
|
||||
"font-disabled": "#99A3B2",
|
||||
"font-disabled": "#5b6779",
|
||||
"font-view-selection": "#ffffff",
|
||||
"font-view-hover": "#F0F2F5",
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from openpype.tools.utils import (
|
|||
BaseClickableFrame,
|
||||
set_style_property,
|
||||
)
|
||||
from openpype.style import get_objected_colors
|
||||
from openpype.pipeline.create import (
|
||||
SUBSET_NAME_ALLOWED_SYMBOLS,
|
||||
TaskNotSetError,
|
||||
|
|
@ -125,28 +126,21 @@ class PublishIconBtn(IconButton):
|
|||
def __init__(self, pixmap_path, *args, **kwargs):
|
||||
super(PublishIconBtn, self).__init__(*args, **kwargs)
|
||||
|
||||
loaded_image = QtGui.QImage(pixmap_path)
|
||||
colors = get_objected_colors()
|
||||
icon = self.generate_icon(
|
||||
pixmap_path,
|
||||
enabled_color=colors["font"].get_qcolor(),
|
||||
disabled_color=colors["font-disabled"].get_qcolor())
|
||||
self.setIcon(icon)
|
||||
|
||||
pixmap = self.paint_image_with_color(loaded_image, QtCore.Qt.white)
|
||||
|
||||
self._base_image = loaded_image
|
||||
self._enabled_icon = QtGui.QIcon(pixmap)
|
||||
self._disabled_icon = None
|
||||
|
||||
self.setIcon(self._enabled_icon)
|
||||
|
||||
def get_enabled_icon(self):
|
||||
"""Enabled icon."""
|
||||
return self._enabled_icon
|
||||
|
||||
def get_disabled_icon(self):
|
||||
"""Disabled icon."""
|
||||
if self._disabled_icon is None:
|
||||
pixmap = self.paint_image_with_color(
|
||||
self._base_image, QtCore.Qt.gray
|
||||
)
|
||||
self._disabled_icon = QtGui.QIcon(pixmap)
|
||||
return self._disabled_icon
|
||||
def generate_icon(self, pixmap_path, enabled_color, disabled_color):
|
||||
icon = QtGui.QIcon()
|
||||
image = QtGui.QImage(pixmap_path)
|
||||
enabled_pixmap = self.paint_image_with_color(image, enabled_color)
|
||||
icon.addPixmap(enabled_pixmap, icon.Normal)
|
||||
disabled_pixmap = self.paint_image_with_color(image, disabled_color)
|
||||
icon.addPixmap(disabled_pixmap, icon.Disabled)
|
||||
return icon
|
||||
|
||||
@staticmethod
|
||||
def paint_image_with_color(image, color):
|
||||
|
|
@ -187,13 +181,6 @@ class PublishIconBtn(IconButton):
|
|||
|
||||
return pixmap
|
||||
|
||||
def setEnabled(self, enabled):
|
||||
super(PublishIconBtn, self).setEnabled(enabled)
|
||||
if self.isEnabled():
|
||||
self.setIcon(self.get_enabled_icon())
|
||||
else:
|
||||
self.setIcon(self.get_disabled_icon())
|
||||
|
||||
|
||||
class ResetBtn(PublishIconBtn):
|
||||
"""Publish reset button."""
|
||||
|
|
|
|||
|
|
@ -40,8 +40,6 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
project_name = os.getenv("AVALON_PROJECT") or "<Project not set>"
|
||||
self.setWindowTitle("Scene Inventory 1.0 - {}".format(project_name))
|
||||
self.setObjectName("SceneInventory")
|
||||
# Maya only property
|
||||
self.setProperty("saveWindowPref", True)
|
||||
|
||||
self.resize(1100, 480)
|
||||
|
||||
|
|
|
|||
5
openpype/tools/workfile_template_build/__init__.py
Normal file
5
openpype/tools/workfile_template_build/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .window import WorkfileBuildPlaceholderDialog
|
||||
|
||||
__all__ = (
|
||||
"WorkfileBuildPlaceholderDialog",
|
||||
)
|
||||
242
openpype/tools/workfile_template_build/window.py
Normal file
242
openpype/tools/workfile_template_build/window.py
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
from Qt import QtWidgets
|
||||
|
||||
from openpype import style
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.widgets.attribute_defs import AttributeDefinitionsWidget
|
||||
|
||||
|
||||
class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog):
|
||||
def __init__(self, host, builder, parent=None):
|
||||
super(WorkfileBuildPlaceholderDialog, self).__init__(parent)
|
||||
self.setWindowTitle("Workfile Placeholder Manager")
|
||||
|
||||
self._log = None
|
||||
|
||||
self._first_show = True
|
||||
self._first_refreshed = False
|
||||
|
||||
self._builder = builder
|
||||
self._host = host
|
||||
# Mode can be 0 (create) or 1 (update)
|
||||
# TODO write it a little bit better
|
||||
self._mode = 0
|
||||
self._update_item = None
|
||||
self._last_selected_plugin = None
|
||||
|
||||
host_name = getattr(self._host, "name", None)
|
||||
if not host_name:
|
||||
host_name = legacy_io.Session.get("AVALON_APP") or "NA"
|
||||
self._host_name = host_name
|
||||
|
||||
plugins_combo = QtWidgets.QComboBox(self)
|
||||
|
||||
content_widget = QtWidgets.QWidget(self)
|
||||
content_layout = QtWidgets.QVBoxLayout(content_widget)
|
||||
content_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
btns_widget = QtWidgets.QWidget(self)
|
||||
create_btn = QtWidgets.QPushButton("Create", btns_widget)
|
||||
save_btn = QtWidgets.QPushButton("Save", btns_widget)
|
||||
close_btn = QtWidgets.QPushButton("Close", btns_widget)
|
||||
|
||||
create_btn.setVisible(False)
|
||||
save_btn.setVisible(False)
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout(btns_widget)
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(create_btn, 0)
|
||||
btns_layout.addWidget(save_btn, 0)
|
||||
btns_layout.addWidget(close_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(plugins_combo, 0)
|
||||
main_layout.addWidget(content_widget, 1)
|
||||
main_layout.addWidget(btns_widget, 0)
|
||||
|
||||
create_btn.clicked.connect(self._on_create_click)
|
||||
save_btn.clicked.connect(self._on_save_click)
|
||||
close_btn.clicked.connect(self._on_close_click)
|
||||
plugins_combo.currentIndexChanged.connect(self._on_plugin_change)
|
||||
|
||||
self._attr_defs_widget = None
|
||||
self._plugins_combo = plugins_combo
|
||||
|
||||
self._content_widget = content_widget
|
||||
self._content_layout = content_layout
|
||||
|
||||
self._create_btn = create_btn
|
||||
self._save_btn = save_btn
|
||||
self._close_btn = close_btn
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def _clear_content_widget(self):
|
||||
while self._content_layout.count() > 0:
|
||||
item = self._content_layout.takeAt(0)
|
||||
widget = item.widget()
|
||||
if widget:
|
||||
widget.setVisible(False)
|
||||
widget.deleteLater()
|
||||
|
||||
def _add_message_to_content(self, message):
|
||||
msg_label = QtWidgets.QLabel(message, self._content_widget)
|
||||
self._content_layout.addWidget(msg_label, 0)
|
||||
self._content_layout.addStretch(1)
|
||||
|
||||
def refresh(self):
|
||||
self._first_refreshed = True
|
||||
|
||||
self._clear_content_widget()
|
||||
|
||||
if not self._builder:
|
||||
self._add_message_to_content((
|
||||
"Host \"{}\" does not have implemented logic"
|
||||
" for template workfile build."
|
||||
).format(self._host_name))
|
||||
self._update_ui_visibility()
|
||||
return
|
||||
|
||||
placeholder_plugins = self._builder.placeholder_plugins
|
||||
|
||||
if self._mode == 1:
|
||||
self._last_selected_plugin
|
||||
plugin = self._builder.placeholder_plugins.get(
|
||||
self._last_selected_plugin
|
||||
)
|
||||
self._create_option_widgets(
|
||||
plugin, self._update_item.to_dict()
|
||||
)
|
||||
self._update_ui_visibility()
|
||||
return
|
||||
|
||||
if not placeholder_plugins:
|
||||
self._add_message_to_content((
|
||||
"Host \"{}\" does not have implemented plugins"
|
||||
" for template workfile build."
|
||||
).format(self._host_name))
|
||||
self._update_ui_visibility()
|
||||
return
|
||||
|
||||
last_selected_plugin = self._last_selected_plugin
|
||||
self._last_selected_plugin = None
|
||||
self._plugins_combo.clear()
|
||||
for identifier, plugin in placeholder_plugins.items():
|
||||
label = plugin.label or identifier
|
||||
self._plugins_combo.addItem(label, identifier)
|
||||
|
||||
index = self._plugins_combo.findData(last_selected_plugin)
|
||||
if index < 0:
|
||||
index = 0
|
||||
self._plugins_combo.setCurrentIndex(index)
|
||||
self._on_plugin_change()
|
||||
|
||||
self._update_ui_visibility()
|
||||
|
||||
def set_create_mode(self):
|
||||
if self._mode == 0:
|
||||
return
|
||||
|
||||
self._mode = 0
|
||||
self._update_item = None
|
||||
self.refresh()
|
||||
|
||||
def set_update_mode(self, update_item):
|
||||
if self._mode == 1:
|
||||
return
|
||||
|
||||
self._mode = 1
|
||||
self._update_item = update_item
|
||||
if update_item:
|
||||
self._last_selected_plugin = update_item.plugin.identifier
|
||||
self.refresh()
|
||||
return
|
||||
|
||||
self._clear_content_widget()
|
||||
self._add_message_to_content((
|
||||
"Nothing to update."
|
||||
" (You maybe don't have selected placeholder.)"
|
||||
))
|
||||
self._update_ui_visibility()
|
||||
|
||||
def _create_option_widgets(self, plugin, options=None):
|
||||
self._clear_content_widget()
|
||||
attr_defs = plugin.get_placeholder_options(options)
|
||||
widget = AttributeDefinitionsWidget(attr_defs, self._content_widget)
|
||||
self._content_layout.addWidget(widget, 0)
|
||||
self._content_layout.addStretch(1)
|
||||
self._attr_defs_widget = widget
|
||||
self._last_selected_plugin = plugin.identifier
|
||||
|
||||
def _update_ui_visibility(self):
|
||||
create_mode = self._mode == 0
|
||||
self._plugins_combo.setVisible(create_mode)
|
||||
|
||||
if not self._builder:
|
||||
self._save_btn.setVisible(False)
|
||||
self._create_btn.setVisible(False)
|
||||
return
|
||||
|
||||
save_enabled = not create_mode
|
||||
if save_enabled:
|
||||
save_enabled = self._update_item is not None
|
||||
self._save_btn.setVisible(save_enabled)
|
||||
self._create_btn.setVisible(create_mode)
|
||||
|
||||
def _on_plugin_change(self):
|
||||
index = self._plugins_combo.currentIndex()
|
||||
plugin_identifier = self._plugins_combo.itemData(index)
|
||||
if plugin_identifier == self._last_selected_plugin:
|
||||
return
|
||||
|
||||
plugin = self._builder.placeholder_plugins.get(plugin_identifier)
|
||||
self._create_option_widgets(plugin)
|
||||
|
||||
def _on_save_click(self):
|
||||
options = self._attr_defs_widget.current_value()
|
||||
plugin = self._builder.placeholder_plugins.get(
|
||||
self._last_selected_plugin
|
||||
)
|
||||
# TODO much better error handling
|
||||
try:
|
||||
plugin.update_placeholder(self._update_item, options)
|
||||
self.accept()
|
||||
except Exception:
|
||||
self.log.warning("Something went wrong", exc_info=True)
|
||||
dialog = QtWidgets.QMessageBox(self)
|
||||
dialog.setWindowTitle("Something went wrong")
|
||||
dialog.setText("Something went wrong")
|
||||
dialog.exec_()
|
||||
|
||||
def _on_create_click(self):
|
||||
options = self._attr_defs_widget.current_value()
|
||||
plugin = self._builder.placeholder_plugins.get(
|
||||
self._last_selected_plugin
|
||||
)
|
||||
# TODO much better error handling
|
||||
try:
|
||||
plugin.create_placeholder(options)
|
||||
self.accept()
|
||||
except Exception:
|
||||
self.log.warning("Something went wrong", exc_info=True)
|
||||
dialog = QtWidgets.QMessageBox(self)
|
||||
dialog.setWindowTitle("Something went wrong")
|
||||
dialog.setText("Something went wrong")
|
||||
dialog.exec_()
|
||||
|
||||
def _on_close_click(self):
|
||||
self.reject()
|
||||
|
||||
def showEvent(self, event):
|
||||
super(WorkfileBuildPlaceholderDialog, self).showEvent(event)
|
||||
if not self._first_refreshed:
|
||||
self.refresh()
|
||||
|
||||
if self._first_show:
|
||||
self._first_show = False
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.resize(390, 450)
|
||||
|
|
@ -108,10 +108,12 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
|
|||
|
||||
row = 0
|
||||
for attr_def in attr_defs:
|
||||
if attr_def.key in self._current_keys:
|
||||
raise KeyError("Duplicated key \"{}\"".format(attr_def.key))
|
||||
if not isinstance(attr_def, UIDef):
|
||||
if attr_def.key in self._current_keys:
|
||||
raise KeyError(
|
||||
"Duplicated key \"{}\"".format(attr_def.key))
|
||||
|
||||
self._current_keys.add(attr_def.key)
|
||||
self._current_keys.add(attr_def.key)
|
||||
widget = create_widget_for_attr_def(attr_def, self)
|
||||
|
||||
expand_cols = 2
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue