mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/add-ruff-and-code-spell
This commit is contained in:
commit
a38d1ecdc4
116 changed files with 2612 additions and 2339 deletions
|
|
@ -27,7 +27,7 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
- default interfaces are defined in `interfaces.py`
|
||||
|
||||
## IPluginPaths
|
||||
- addon wants to add directory path/s to avalon or publish plugins
|
||||
- addon wants to add directory path/s to publish, load, create or inventory plugins
|
||||
- addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"`
|
||||
- each key may contain list or string with a path to directory with plugins
|
||||
|
||||
|
|
@ -89,4 +89,4 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
|
||||
### TrayAddonsManager
|
||||
- inherits from `AddonsManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayAddon` methods
|
||||
- has specific implementation for AYON Tray and handle `ITrayAddon` methods
|
||||
|
|
|
|||
|
|
@ -741,7 +741,7 @@ class AddonsManager:
|
|||
|
||||
addon_classes = []
|
||||
for module in openpype_modules:
|
||||
# Go through globals in `pype.modules`
|
||||
# Go through globals in `ayon_core.modules`
|
||||
for name in dir(module):
|
||||
modules_item = getattr(module, name, None)
|
||||
# Filter globals that are not classes which inherit from
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
padding = anatomy.templates.get("frame_padding", 4)
|
||||
padding = anatomy.templates_obj.frame_padding
|
||||
product_type = "render"
|
||||
anatomy_data.update({
|
||||
"frame": f"%0{padding}d",
|
||||
|
|
@ -28,15 +28,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
})
|
||||
anatomy_data["product"]["type"] = product_type
|
||||
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
|
||||
# get anatomy rendering keys
|
||||
r_anatomy_key = self.anatomy_template_key_render_files
|
||||
m_anatomy_key = self.anatomy_template_key_metadata
|
||||
|
||||
# get folder and path for rendering images from celaction
|
||||
render_dir = anatomy_filled[r_anatomy_key]["folder"]
|
||||
render_path = anatomy_filled[r_anatomy_key]["path"]
|
||||
r_template_item = anatomy.get_template_item("publish", r_anatomy_key)
|
||||
render_dir = r_template_item["directory"].format_strict(anatomy_data)
|
||||
render_path = r_template_item["path"].format_strict(anatomy_data)
|
||||
self.log.debug("__ render_path: `{}`".format(render_path))
|
||||
|
||||
# create dir if it doesnt exists
|
||||
|
|
@ -51,11 +50,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
instance.data["path"] = render_path
|
||||
|
||||
# get anatomy for published renders folder path
|
||||
if anatomy_filled.get(m_anatomy_key):
|
||||
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
|
||||
m_anatomy_key]["folder"]
|
||||
self.log.info("Metadata render path: `{}`".format(
|
||||
instance.data["publishRenderMetadataFolder"]
|
||||
))
|
||||
m_template_item = anatomy.get_template_item(
|
||||
"publish", m_anatomy_key, default=None
|
||||
)
|
||||
if m_template_item is not None:
|
||||
metadata_path = m_template_item["directory"].format_strict(
|
||||
anatomy_data
|
||||
)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_path
|
||||
self.log.info("Metadata render path: `{}`".format(metadata_path))
|
||||
|
||||
self.log.info(f"Render output path set to: `{render_path}`")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
OpenPype Autodesk Flame api
|
||||
AYON Autodesk Flame api
|
||||
"""
|
||||
from .constants import (
|
||||
COLOR_MAP,
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
|
||||
"""
|
||||
OpenPype Flame api constances
|
||||
AYON Flame api constances
|
||||
"""
|
||||
# OpenPype marker workflow variables
|
||||
# AYON marker workflow variables
|
||||
MARKER_NAME = "OpenPypeData"
|
||||
MARKER_DURATION = 0
|
||||
MARKER_COLOR = "cyan"
|
||||
MARKER_PUBLISH_DEFAULT = False
|
||||
|
||||
# OpenPype color definitions
|
||||
# AYON color definitions
|
||||
COLOR_MAP = {
|
||||
"red": (1.0, 0.0, 0.0),
|
||||
"orange": (1.0, 0.5, 0.0),
|
||||
|
|
|
|||
|
|
@ -38,12 +38,12 @@ def install():
|
|||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("OpenPype Flame plug-ins registered ...")
|
||||
log.info("AYON Flame plug-ins registered ...")
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host installed ...")
|
||||
log.info("AYON Flame host installed ...")
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
@ -57,7 +57,7 @@ def uninstall():
|
|||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host uninstalled ...")
|
||||
log.info("AYON Flame host uninstalled ...")
|
||||
|
||||
|
||||
def containerise(flame_clip_segment,
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class WireTapCom(object):
|
|||
|
||||
def get_launch_args(
|
||||
self, project_name, project_data, user_name, *args, **kwargs):
|
||||
"""Forming launch arguments for OpenPype launcher.
|
||||
"""Forming launch arguments for AYON launcher.
|
||||
|
||||
Args:
|
||||
project_name (str): name of project
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ log = Logger.get_logger(__name__)
|
|||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for flame.
|
||||
|
||||
To be able to run start OpenPype within Flame we have to copy
|
||||
To be able to run start AYON within Flame we have to copy
|
||||
all utility_scripts and additional FLAME_SCRIPT_DIR into
|
||||
`/opt/Autodesk/shared/python`. This will be always synchronizing those
|
||||
folders.
|
||||
|
|
@ -124,7 +124,7 @@ def setup(env=None):
|
|||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Flame OpenPype wrapper has been installed")
|
||||
log.info("Flame AYON wrapper has been installed")
|
||||
|
||||
|
||||
def get_flame_version():
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
project_data = {
|
||||
"Name": project_entity["name"],
|
||||
"Nickname": project_entity["code"],
|
||||
"Description": "Created by OpenPype",
|
||||
"Description": "Created by AYON",
|
||||
"SetupDir": project_entity["name"],
|
||||
"FrameWidth": int(width),
|
||||
"FrameHeight": int(height),
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ class FlameBabyPublisherPanel(object):
|
|||
|
||||
# creating ui
|
||||
self.window.setMinimumSize(1500, 600)
|
||||
self.window.setWindowTitle('OpenPype: Baby-publisher')
|
||||
self.window.setWindowTitle('AYON: Baby-publisher')
|
||||
self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.window.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ def scope_sequence(selection):
|
|||
def get_media_panel_custom_ui_actions():
|
||||
return [
|
||||
{
|
||||
"name": "OpenPype: Baby-publisher",
|
||||
"name": "AYON: Baby-publisher",
|
||||
"actions": [
|
||||
{
|
||||
"name": "Create Shots",
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline import (
|
|||
|
||||
|
||||
def openpype_install():
|
||||
"""Registering OpenPype in context
|
||||
"""Registering AYON in context
|
||||
"""
|
||||
install_host(opfapi)
|
||||
print("Registered host: {}".format(registered_host()))
|
||||
|
|
@ -28,7 +28,7 @@ def exeption_handler(exctype, value, _traceback):
|
|||
tb (str): traceback to show
|
||||
"""
|
||||
import traceback
|
||||
msg = "OpenPype: Python exception {} in {}".format(value, exctype)
|
||||
msg = "AYON: Python exception {} in {}".format(value, exctype)
|
||||
mbox = QtWidgets.QMessageBox()
|
||||
mbox.setText(msg)
|
||||
mbox.setDetailedText(
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from .lib import (
|
|||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from .menu import launch_openpype_menu
|
||||
from .menu import launch_ayon_menu
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -35,5 +35,5 @@ __all__ = [
|
|||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# menu
|
||||
"launch_openpype_menu",
|
||||
"launch_ayon_menu",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -28,9 +28,9 @@ self = sys.modules[__name__]
|
|||
self.menu = None
|
||||
|
||||
|
||||
class OpenPypeMenu(QtWidgets.QWidget):
|
||||
class AYONMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OpenPypeMenu, self).__init__(*args, **kwargs)
|
||||
super(AYONMenu, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName(f"{MENU_LABEL}Menu")
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
self._pulse = FusionPulse(parent=self)
|
||||
self._pulse.start()
|
||||
|
||||
# Detect Fusion events as OpenPype events
|
||||
# Detect Fusion events as AYON events
|
||||
self._event_handler = FusionEventHandler(parent=self)
|
||||
self._event_handler.start()
|
||||
|
||||
|
|
@ -174,16 +174,16 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
set_current_context_framerange()
|
||||
|
||||
|
||||
def launch_openpype_menu():
|
||||
def launch_ayon_menu():
|
||||
app = get_qt_app()
|
||||
|
||||
pype_menu = OpenPypeMenu()
|
||||
ayon_menu = AYONMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
ayon_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
self.menu = pype_menu
|
||||
ayon_menu.show()
|
||||
self.menu = ayon_menu
|
||||
|
||||
result = app.exec_()
|
||||
print("Shutting down..")
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
name = "fusion"
|
||||
|
||||
def install(self):
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
"""Install fusion-specific functionality of AYON.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
|
@ -177,7 +177,7 @@ def on_after_open(event):
|
|||
if any_outdated_containers():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Find OpenPype menu to attach to
|
||||
# Find AYON menu to attach to
|
||||
from . import menu
|
||||
|
||||
def _on_show_scene_inventory():
|
||||
|
|
@ -326,9 +326,9 @@ class FusionEventThread(QtCore.QThread):
|
|||
|
||||
|
||||
class FusionEventHandler(QtCore.QObject):
|
||||
"""Emits OpenPype events based on Fusion events captured in a QThread.
|
||||
"""Emits AYON events based on Fusion events captured in a QThread.
|
||||
|
||||
This will emit the following OpenPype events based on Fusion actions:
|
||||
This will emit the following AYON events based on Fusion actions:
|
||||
save: Comp_Save, Comp_SaveAs
|
||||
open: Comp_Opened
|
||||
new: Comp_New
|
||||
|
|
@ -374,7 +374,7 @@ class FusionEventHandler(QtCore.QObject):
|
|||
self._event_thread.stop()
|
||||
|
||||
def _on_event(self, event):
|
||||
"""Handle Fusion events to emit OpenPype events"""
|
||||
"""Handle Fusion events to emit AYON events"""
|
||||
if not event:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class GenericCreateSaver(Creator):
|
|||
formatting_data = deepcopy(data)
|
||||
|
||||
# get frame padding from anatomy templates
|
||||
frame_padding = self.project_anatomy.templates["frame_padding"]
|
||||
frame_padding = self.project_anatomy.templates_obj.frame_padding
|
||||
|
||||
# get output format
|
||||
ext = data["creator_attributes"]["image_format"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
### OpenPype deploy MenuScripts
|
||||
### AYON deploy MenuScripts
|
||||
|
||||
Note that this `MenuScripts` is not an official Fusion folder.
|
||||
OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions.
|
||||
AYON only uses this folder in `{fusion}/deploy/` to trigger the AYON menu actions.
|
||||
|
||||
They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`.
|
||||
|
|
@ -35,7 +35,7 @@ def main(env):
|
|||
log = Logger.get_logger(__name__)
|
||||
log.info(f"Registered host: {registered_host()}")
|
||||
|
||||
menu.launch_openpype_menu()
|
||||
menu.launch_ayon_menu()
|
||||
|
||||
# Initiate a QTimer to check if Fusion is still alive every X interval
|
||||
# If Fusion is not found - kill itself
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
|||
Prepares local Fusion profile directory, copies existing Fusion profile.
|
||||
This also sets FUSION MasterPrefs variable, which is used
|
||||
to apply Master.prefs file to override some Fusion profile settings to:
|
||||
- enable the OpenPype menu
|
||||
- enable the AYON menu
|
||||
- force Python 3 over Python 2
|
||||
- force English interface
|
||||
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from ayon_core.hosts.fusion import (
|
|||
|
||||
class FusionPrelaunch(PreLaunchHook):
|
||||
"""
|
||||
Prepares OpenPype Fusion environment.
|
||||
Prepares AYON Fusion environment.
|
||||
Requires correct Python home variable to be defined in the environment
|
||||
settings for Fusion to point at a valid Python 3 build for Fusion.
|
||||
Python3 versions that are supported by Fusion:
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ class CreateComposite(harmony.Creator):
|
|||
|
||||
name = "compositeDefault"
|
||||
label = "Composite"
|
||||
product_type = "mindbender.template"
|
||||
product_type = "template"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateComposite, self).__init__(*args, **kwargs)
|
||||
|
|
@ -221,7 +221,7 @@ class CreateRender(harmony.Creator):
|
|||
|
||||
name = "writeDefault"
|
||||
label = "Write"
|
||||
product_type = "mindbender.imagesequence"
|
||||
product_type = "render"
|
||||
node_type = "WRITE"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -304,7 +304,7 @@ class ExtractImage(pyblish.api.InstancePlugin):
|
|||
label = "Extract Image Sequence"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
hosts = ["harmony"]
|
||||
families = ["mindbender.imagesequence"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
project_path = harmony.send(
|
||||
|
|
@ -582,8 +582,16 @@ class ImageSequenceLoader(load.LoaderPlugin):
|
|||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
product_types = {"mindbender.imagesequence"}
|
||||
product_types = {
|
||||
"shot",
|
||||
"render",
|
||||
"image",
|
||||
"plate",
|
||||
"reference",
|
||||
"review",
|
||||
}
|
||||
representations = ["*"]
|
||||
extensions = {"jpeg", "png", "jpg"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
files = []
|
||||
|
|
|
|||
|
|
@ -632,7 +632,9 @@ def sync_avalon_data_to_workfile():
|
|||
project_name = get_current_project_name()
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
work_template = anatomy.templates["work"]["path"]
|
||||
work_template = anatomy.get_template_item(
|
||||
"work", "default", "path"
|
||||
)
|
||||
work_root = anatomy.root_value_for_template(work_template)
|
||||
active_project_root = (
|
||||
os.path.join(work_root, project_name)
|
||||
|
|
@ -825,7 +827,7 @@ class PublishAction(QtWidgets.QAction):
|
|||
# root_node = hiero.core.nuke.RootNode()
|
||||
#
|
||||
# anatomy = Anatomy(get_current_project_name())
|
||||
# work_template = anatomy.templates["work"]["path"]
|
||||
# work_template = anatomy.get_template_item("work", "default", "path")
|
||||
# root_path = anatomy.root_value_for_template(work_template)
|
||||
#
|
||||
# nuke_script.addNode(root_node)
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class CreateShotClip(phiero.Creator):
|
|||
|
||||
gui_tracks = [track.name()
|
||||
for track in phiero.get_current_sequence().videoTracks()]
|
||||
gui_name = "Pype publish attributes creator"
|
||||
gui_name = "AYON publish attributes creator"
|
||||
gui_info = "Define sequential rename and fill hierarchy data."
|
||||
gui_inputs = {
|
||||
"renameHierarchy": {
|
||||
|
|
|
|||
|
|
@ -19,10 +19,6 @@ from ayon_core.lib import BoolDef
|
|||
from .lib import imprint, read, lsattr, add_self_publish_button
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator plugin to create instances in Houdini
|
||||
|
||||
|
|
@ -92,8 +88,8 @@ class Creator(LegacyCreator):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
|
||||
|
|
@ -209,8 +205,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def lock_parameters(self, node, parameters):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
|
@ -52,7 +53,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
# if node type has not its definition, it is not user
|
||||
# created hda. We test if hda can be created from the node.
|
||||
if not to_hda.canCreateDigitalAsset():
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
|
|
@ -61,7 +62,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
)
|
||||
hda_node.layoutChildren()
|
||||
elif self._check_existing(folder_path, node_name):
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
("product {} is already published with different HDA"
|
||||
"definition.").format(node_name))
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin to create Redshift ROP."""
|
||||
import hou # noqa
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
|
@ -42,7 +43,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
"Redshift_IPR", node_name=f"{basename}_IPR"
|
||||
)
|
||||
except hou.OperationFailed as e:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
(
|
||||
"Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import hou
|
||||
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
from ayon_core.pipeline import CreatedInstance, CreatorError
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
"vray", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"Cannot create Vray render node. "
|
||||
"Make sure Vray installed and enabled!"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
|
|
@ -44,7 +45,14 @@ def get_image_avalon_container():
|
|||
class ImageLoader(load.LoaderPlugin):
|
||||
"""Load images into COP2"""
|
||||
|
||||
product_types = {"imagesequence"}
|
||||
product_types = {
|
||||
"imagesequence",
|
||||
"review",
|
||||
"render",
|
||||
"plate",
|
||||
"image",
|
||||
"online",
|
||||
}
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
|
@ -55,10 +63,8 @@ class ImageLoader(load.LoaderPlugin):
|
|||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = os.path.normpath(file_path)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
path = self.filepath_from_context(context)
|
||||
path = self.format_path(path, representation=context["representation"])
|
||||
|
||||
# Get the root node
|
||||
parent = get_image_avalon_container()
|
||||
|
|
@ -70,7 +76,10 @@ class ImageLoader(load.LoaderPlugin):
|
|||
node = parent.createNode("file", node_name=node_name)
|
||||
node.moveToGoodPosition()
|
||||
|
||||
node.setParms({"filename1": file_path})
|
||||
parms = {"filename1": path}
|
||||
parms.update(self.get_colorspace_parms(context["representation"]))
|
||||
|
||||
node.setParms(parms)
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
|
|
@ -93,16 +102,17 @@ class ImageLoader(load.LoaderPlugin):
|
|||
|
||||
# Update the file path
|
||||
file_path = get_representation_path(repre_entity)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
file_path = self.format_path(file_path, repre_entity)
|
||||
|
||||
parms = {
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
|
||||
parms.update(self.get_colorspace_parms(repre_entity))
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
)
|
||||
node.setParms(parms)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
|
|
@ -119,14 +129,58 @@ class ImageLoader(load.LoaderPlugin):
|
|||
if not parent.children():
|
||||
parent.destroy()
|
||||
|
||||
def _get_file_sequence(self, file_path):
|
||||
root = os.path.dirname(file_path)
|
||||
files = sorted(os.listdir(root))
|
||||
@staticmethod
|
||||
def format_path(path, representation):
|
||||
"""Format file path correctly for single image or sequence."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
first_fname = files[0]
|
||||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
ext = os.path.splitext(path)[-1]
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
is_sequence = bool(representation["context"].get("frame"))
|
||||
# The path is either a single file or sequence in a folder.
|
||||
if not is_sequence:
|
||||
filename = path
|
||||
else:
|
||||
filename = re.sub(r"(.*)\.(\d+){}$".format(re.escape(ext)),
|
||||
"\\1.$F4{}".format(ext),
|
||||
path)
|
||||
|
||||
filename = os.path.join(path, filename)
|
||||
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
return filename
|
||||
|
||||
def get_colorspace_parms(self, representation: dict) -> dict:
|
||||
"""Return the color space parameters.
|
||||
|
||||
Returns the values for the colorspace parameters on the node if there
|
||||
is colorspace data on the representation.
|
||||
|
||||
Arguments:
|
||||
representation (dict): The representation entity.
|
||||
|
||||
Returns:
|
||||
dict: Parm to value mapping if colorspace data is defined.
|
||||
|
||||
"""
|
||||
# Using OCIO colorspace on COP2 File node is only supported in Hou 20+
|
||||
major, _, _ = hou.applicationVersion()
|
||||
if major < 20:
|
||||
return {}
|
||||
|
||||
data = representation.get("data", {}).get("colorspaceData", {})
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
colorspace = data["colorspace"]
|
||||
if colorspace:
|
||||
return {
|
||||
"colorspace": 3, # Use OpenColorIO
|
||||
"ocio_space": colorspace
|
||||
}
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<subMenu id="ayon_menu">
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AYON_MENU_LABEL") or "AYON"
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ from ayon_core.tools.utils import host_tools
|
|||
from ayon_core.hosts.max.api import lib
|
||||
|
||||
|
||||
class OpenPypeMenu(object):
|
||||
"""Object representing OpenPype/AYON menu.
|
||||
class AYONMenu(object):
|
||||
"""Object representing AYON menu.
|
||||
|
||||
This is using "hack" to inject itself before "Help" menu of 3dsmax.
|
||||
For some reason `postLoadingMenus` event doesn't fire, and main menu
|
||||
|
|
@ -39,7 +39,7 @@ class OpenPypeMenu(object):
|
|||
|
||||
self._counter = 0
|
||||
self._timer.stop()
|
||||
self.build_openpype_menu()
|
||||
self._build_ayon_menu()
|
||||
|
||||
@staticmethod
|
||||
def get_main_widget():
|
||||
|
|
@ -50,8 +50,8 @@ class OpenPypeMenu(object):
|
|||
"""Get main Menubar by 3dsmax main window."""
|
||||
return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0]
|
||||
|
||||
def get_or_create_openpype_menu(
|
||||
self, name: str = "&Openpype",
|
||||
def _get_or_create_ayon_menu(
|
||||
self, name: str = "&AYON",
|
||||
before: str = "&Help") -> QtWidgets.QAction:
|
||||
"""Create AYON menu.
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ class OpenPypeMenu(object):
|
|||
help_action = None
|
||||
for item in menu_items:
|
||||
if name in item.title():
|
||||
# we already have OpenPype menu
|
||||
# we already have AYON menu
|
||||
return item
|
||||
|
||||
if before in item.title():
|
||||
|
|
@ -85,50 +85,50 @@ class OpenPypeMenu(object):
|
|||
self.menu = op_menu
|
||||
return op_menu
|
||||
|
||||
def build_openpype_menu(self) -> QtWidgets.QAction:
|
||||
def _build_ayon_menu(self) -> QtWidgets.QAction:
|
||||
"""Build items in AYON menu."""
|
||||
openpype_menu = self.get_or_create_openpype_menu()
|
||||
load_action = QtWidgets.QAction("Load...", openpype_menu)
|
||||
ayon_menu = self._get_or_create_ayon_menu()
|
||||
load_action = QtWidgets.QAction("Load...", ayon_menu)
|
||||
load_action.triggered.connect(self.load_callback)
|
||||
openpype_menu.addAction(load_action)
|
||||
ayon_menu.addAction(load_action)
|
||||
|
||||
publish_action = QtWidgets.QAction("Publish...", openpype_menu)
|
||||
publish_action = QtWidgets.QAction("Publish...", ayon_menu)
|
||||
publish_action.triggered.connect(self.publish_callback)
|
||||
openpype_menu.addAction(publish_action)
|
||||
ayon_menu.addAction(publish_action)
|
||||
|
||||
manage_action = QtWidgets.QAction("Manage...", openpype_menu)
|
||||
manage_action = QtWidgets.QAction("Manage...", ayon_menu)
|
||||
manage_action.triggered.connect(self.manage_callback)
|
||||
openpype_menu.addAction(manage_action)
|
||||
ayon_menu.addAction(manage_action)
|
||||
|
||||
library_action = QtWidgets.QAction("Library...", openpype_menu)
|
||||
library_action = QtWidgets.QAction("Library...", ayon_menu)
|
||||
library_action.triggered.connect(self.library_callback)
|
||||
openpype_menu.addAction(library_action)
|
||||
ayon_menu.addAction(library_action)
|
||||
|
||||
openpype_menu.addSeparator()
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu)
|
||||
workfiles_action = QtWidgets.QAction("Work Files...", ayon_menu)
|
||||
workfiles_action.triggered.connect(self.workfiles_callback)
|
||||
openpype_menu.addAction(workfiles_action)
|
||||
ayon_menu.addAction(workfiles_action)
|
||||
|
||||
openpype_menu.addSeparator()
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
res_action = QtWidgets.QAction("Set Resolution", openpype_menu)
|
||||
res_action = QtWidgets.QAction("Set Resolution", ayon_menu)
|
||||
res_action.triggered.connect(self.resolution_callback)
|
||||
openpype_menu.addAction(res_action)
|
||||
ayon_menu.addAction(res_action)
|
||||
|
||||
frame_action = QtWidgets.QAction("Set Frame Range", openpype_menu)
|
||||
frame_action = QtWidgets.QAction("Set Frame Range", ayon_menu)
|
||||
frame_action.triggered.connect(self.frame_range_callback)
|
||||
openpype_menu.addAction(frame_action)
|
||||
ayon_menu.addAction(frame_action)
|
||||
|
||||
colorspace_action = QtWidgets.QAction("Set Colorspace", openpype_menu)
|
||||
colorspace_action = QtWidgets.QAction("Set Colorspace", ayon_menu)
|
||||
colorspace_action.triggered.connect(self.colorspace_callback)
|
||||
openpype_menu.addAction(colorspace_action)
|
||||
ayon_menu.addAction(colorspace_action)
|
||||
|
||||
unit_scale_action = QtWidgets.QAction("Set Unit Scale", openpype_menu)
|
||||
unit_scale_action = QtWidgets.QAction("Set Unit Scale", ayon_menu)
|
||||
unit_scale_action.triggered.connect(self.unit_scale_callback)
|
||||
openpype_menu.addAction(unit_scale_action)
|
||||
ayon_menu.addAction(unit_scale_action)
|
||||
|
||||
return openpype_menu
|
||||
return ayon_menu
|
||||
|
||||
def load_callback(self):
|
||||
"""Callback to show Loader tool."""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Houdini integration."""
|
||||
"""Pipeline tools for AYON 3ds max integration."""
|
||||
import os
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
|
|
@ -14,7 +14,7 @@ from ayon_core.pipeline import (
|
|||
AVALON_CONTAINER_ID,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.hosts.max.api.menu import OpenPypeMenu
|
||||
from ayon_core.hosts.max.api.menu import AYONMenu
|
||||
from ayon_core.hosts.max.api import lib
|
||||
from ayon_core.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from ayon_core.hosts.max import MAX_HOST_DIR
|
||||
|
|
@ -48,7 +48,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# self._register_callbacks()
|
||||
self.menu = OpenPypeMenu()
|
||||
self.menu = AYONMenu()
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
|
|
@ -94,7 +94,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
def _deferred_menu_creation(self):
|
||||
self.log.info("Building menu ...")
|
||||
self.menu = OpenPypeMenu()
|
||||
self.menu = AYONMenu()
|
||||
|
||||
@staticmethod
|
||||
def create_context_node():
|
||||
|
|
@ -148,7 +148,7 @@ attributes "OpenPypeContext"
|
|||
|
||||
|
||||
def ls() -> list:
|
||||
"""Get all OpenPype instances."""
|
||||
"""Get all AYON containers."""
|
||||
objs = rt.objects
|
||||
containers = [
|
||||
obj for obj in objs
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""3dsmax specific Avalon/Pyblish plugin definitions."""
|
||||
"""3dsmax specific AYON/Pyblish plugin definitions."""
|
||||
from abc import ABCMeta
|
||||
|
||||
import six
|
||||
|
|
@ -156,10 +156,6 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
|||
)"""
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
class MaxCreatorBase(object):
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
|||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
"""Inject OpenPype environment to 3ds max.
|
||||
"""Inject AYON environment to 3ds max.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
|||
|
||||
|
||||
class InjectPythonPath(PreLaunchHook):
|
||||
"""Inject OpenPype environment to 3dsmax.
|
||||
"""Inject AYON environment to 3dsmax.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
-- OpenPype Init Script
|
||||
-- AYON Init Script
|
||||
(
|
||||
local sysPath = dotNetClass "System.IO.Path"
|
||||
local sysDir = dotNetClass "System.IO.Directory"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""OpenPype script commands to be used directly in Maya."""
|
||||
"""AYON script commands to be used directly in Maya."""
|
||||
from maya import cmds
|
||||
|
||||
from ayon_api import get_project, get_folder_by_path
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_lookmanager",
|
||||
"ayon_toolbox_lookmanager",
|
||||
annotation="Look Manager",
|
||||
label="Look Manager",
|
||||
image=os.path.join(icons, "lookmanager.png"),
|
||||
|
|
@ -122,7 +122,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_workfiles",
|
||||
"ayon_toolbox_workfiles",
|
||||
annotation="Work Files",
|
||||
label="Work Files",
|
||||
image=os.path.join(icons, "workfiles.png"),
|
||||
|
|
@ -137,7 +137,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_loader",
|
||||
"ayon_toolbox_loader",
|
||||
annotation="Loader",
|
||||
label="Loader",
|
||||
image=os.path.join(icons, "loader.png"),
|
||||
|
|
@ -152,7 +152,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_manager",
|
||||
"ayon_toolbox_manager",
|
||||
annotation="Inventory",
|
||||
label="Inventory",
|
||||
image=os.path.join(icons, "inventory.png"),
|
||||
|
|
|
|||
|
|
@ -2931,13 +2931,13 @@ def bake_to_world_space(nodes,
|
|||
|
||||
|
||||
def load_capture_preset(data):
|
||||
"""Convert OpenPype Extract Playblast settings to `capture` arguments
|
||||
"""Convert AYON Extract Playblast settings to `capture` arguments
|
||||
|
||||
Input data is the settings from:
|
||||
`project_settings/maya/publish/ExtractPlayblast/capture_preset`
|
||||
|
||||
Args:
|
||||
data (dict): Capture preset settings from OpenPype settings
|
||||
data (dict): Capture preset settings from AYON settings
|
||||
|
||||
Returns:
|
||||
dict: `capture.capture` compatible keyword arguments
|
||||
|
|
@ -3288,7 +3288,7 @@ def set_colorspace():
|
|||
else:
|
||||
# TODO: deprecated code from 3.15.5 - remove
|
||||
# Maya 2022+ introduces new OCIO v2 color management settings that
|
||||
# can override the old color management preferences. OpenPype has
|
||||
# can override the old color management preferences. AYON has
|
||||
# separate settings for both so we fall back when necessary.
|
||||
use_ocio_v2 = imageio["colorManagementPreference_v2"]["enabled"]
|
||||
if use_ocio_v2 and not ocio_v2_support:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
https://github.com/Colorbleed/colorbleed-config/blob/acre/colorbleed/maya/lib_rendersetup.py
|
||||
Credits: Roy Nieterau (BigRoy) / Colorbleed
|
||||
Modified for use in OpenPype
|
||||
Modified for use in AYON
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def get_context_label():
|
|||
|
||||
def install(project_settings):
|
||||
if cmds.about(batch=True):
|
||||
log.info("Skipping openpype.menu initialization in batch mode..")
|
||||
log.info("Skipping AYON menu initialization in batch mode..")
|
||||
return
|
||||
|
||||
def add_menu():
|
||||
|
|
@ -261,7 +261,7 @@ def popup():
|
|||
|
||||
|
||||
def update_menu_task_label():
|
||||
"""Update the task label in Avalon menu to current session"""
|
||||
"""Update the task label in AYON menu to current session"""
|
||||
|
||||
if IS_HEADLESS:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -361,13 +361,13 @@ def parse_container(container):
|
|||
|
||||
|
||||
def _ls():
|
||||
"""Yields Avalon container node names.
|
||||
"""Yields AYON container node names.
|
||||
|
||||
Used by `ls()` to retrieve the nodes and then query the full container's
|
||||
data.
|
||||
|
||||
Yields:
|
||||
str: Avalon container node name (objectSet)
|
||||
str: AYON container node name (objectSet)
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -384,7 +384,7 @@ def _ls():
|
|||
}
|
||||
|
||||
# Iterate over all 'set' nodes in the scene to detect whether
|
||||
# they have the avalon container ".id" attribute.
|
||||
# they have the ayon container ".id" attribute.
|
||||
fn_dep = om.MFnDependencyNode()
|
||||
iterator = om.MItDependencyNodes(om.MFn.kSet)
|
||||
for mobject in _maya_iterate(iterator):
|
||||
|
|
@ -673,7 +673,7 @@ def workfile_save_before_xgen(event):
|
|||
switching context.
|
||||
|
||||
Args:
|
||||
event (Event) - openpype/lib/events.py
|
||||
event (Event) - ayon_core/lib/events.py
|
||||
"""
|
||||
if not cmds.pluginInfo("xgenToolkit", query=True, loaded=True):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -899,7 +899,7 @@ class ReferenceLoader(Loader):
|
|||
cmds.disconnectAttr(input, node_attr)
|
||||
cmds.setAttr(node_attr, data["value"])
|
||||
|
||||
# Fix PLN-40 for older containers created with Avalon that had the
|
||||
# Fix PLN-40 for older containers created with AYON that had the
|
||||
# `.verticesOnlySet` set to True.
|
||||
if cmds.getAttr("{}.verticesOnlySet".format(node)):
|
||||
self.log.info("Setting %s.verticesOnlySet to False", node)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ Export Maya nodes from Render Setup layer as if flattened in that layer instead
|
|||
of exporting the defaultRenderLayer as Maya forces by default
|
||||
|
||||
Credits: Roy Nieterau (BigRoy) / Colorbleed
|
||||
Modified for use in OpenPype
|
||||
Modified for use in AYON
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ def load_package(filepath, name, namespace=None):
|
|||
containers.append(container)
|
||||
|
||||
# TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
|
||||
# Cripple the original avalon containers so they don't show up in the
|
||||
# Cripple the original AYON containers so they don't show up in the
|
||||
# manager
|
||||
# for container in containers:
|
||||
# cmds.setAttr("%s.id" % container,
|
||||
|
|
@ -175,7 +175,7 @@ def _add(instance, representation_id, loaders, namespace, root="|"):
|
|||
namespace (str):
|
||||
|
||||
Returns:
|
||||
str: The created Avalon container.
|
||||
str: The created AYON container.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ def _instances_by_namespace(data):
|
|||
|
||||
|
||||
def get_contained_containers(container):
|
||||
"""Get the Avalon containers in this container
|
||||
"""Get the AYON containers in this container
|
||||
|
||||
Args:
|
||||
container (dict): The container dict.
|
||||
|
|
@ -256,7 +256,7 @@ def get_contained_containers(container):
|
|||
|
||||
from .pipeline import parse_container
|
||||
|
||||
# Get avalon containers in this package setdress container
|
||||
# Get AYON containers in this package setdress container
|
||||
containers = []
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
for node in cmds.ls(members, type="objectSet"):
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class MayaLegacyConvertor(ProductConvertorPlugin,
|
|||
|
||||
Its limitation is that you can have multiple creators creating product
|
||||
of the same type and there is no way to handle it. This code should
|
||||
nevertheless cover all creators that came with OpenPype.
|
||||
nevertheless cover all creators that came with AYON.
|
||||
|
||||
"""
|
||||
identifier = "io.openpype.creators.maya.legacy"
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ class LoadVDBtoArnold(load.LoaderPlugin):
|
|||
|
||||
from maya import cmds
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin):
|
|||
def remove(self, container):
|
||||
from maya import cmds
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -277,7 +277,7 @@ class LoadVDBtoVRay(load.LoaderPlugin):
|
|||
|
||||
def remove(self, container):
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -79,12 +79,12 @@ def iter_history(nodes,
|
|||
def collect_input_containers(containers, nodes):
|
||||
"""Collect containers that contain any of the node in `nodes`.
|
||||
|
||||
This will return any loaded Avalon container that contains at least one of
|
||||
the nodes. As such, the Avalon container is an input for it. Or in short,
|
||||
This will return any loaded AYON container that contains at least one of
|
||||
the nodes. As such, the AYON container is an input for it. Or in short,
|
||||
there are member nodes of that container.
|
||||
|
||||
Returns:
|
||||
list: Input avalon containers
|
||||
list: Input loaded containers
|
||||
|
||||
"""
|
||||
# Assume the containers have collected their cached '_members' data
|
||||
|
|
|
|||
|
|
@ -106,10 +106,10 @@ class TextureProcessor:
|
|||
self.log = log
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply OpenPype system/project settings to the TextureProcessor
|
||||
"""Apply AYON system/project settings to the TextureProcessor
|
||||
|
||||
Args:
|
||||
project_settings (dict): OpenPype project settings
|
||||
project_settings (dict): AYON project settings
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
|
@ -278,7 +278,7 @@ class MakeTX(TextureProcessor):
|
|||
"""Process the texture.
|
||||
|
||||
This function requires the `maketx` executable to be available in an
|
||||
OpenImageIO toolset detectable by OpenPype.
|
||||
OpenImageIO toolset detectable by AYON.
|
||||
|
||||
Args:
|
||||
source (str): Path to source file.
|
||||
|
|
|
|||
|
|
@ -128,9 +128,11 @@ class ExtractWorkfileXgen(publish.Extractor):
|
|||
alembic_files.append(alembic_file)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
published_maya_path = StringTemplate(
|
||||
instance.context.data["anatomy"].templates["publish"]["file"]
|
||||
).format(template_data)
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "file"
|
||||
)
|
||||
published_maya_path = publish_template.format(template_data)
|
||||
published_basename, _ = os.path.splitext(published_maya_path)
|
||||
|
||||
for source in alembic_files:
|
||||
|
|
|
|||
|
|
@ -39,8 +39,9 @@ class ExtractXgen(publish.Extractor):
|
|||
# Get published xgen file name.
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "xgen"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
xgen_filename = StringTemplate(templates["file"]).format(template_data)
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
file_template = anatomy.get_template_item("publish", "default", "file")
|
||||
xgen_filename = file_template.format(template_data)
|
||||
|
||||
xgen_path = os.path.join(
|
||||
self.staging_dir(instance), xgen_filename
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class ValidateLoadedPlugin(pyblish.api.ContextPlugin,
|
|||
|
||||
invalid = []
|
||||
loaded_plugin = cmds.pluginInfo(query=True, listPlugins=True)
|
||||
# get variable from OpenPype settings
|
||||
# get variable from AYON settings
|
||||
whitelist_native_plugins = cls.whitelist_native_plugins
|
||||
authorized_plugins = cls.authorized_plugins or []
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin,
|
|||
|
||||
Shading engines should be named "{surface_shader}SG"
|
||||
"""
|
||||
``
|
||||
|
||||
order = ValidateContentsOrder
|
||||
families = ["look"]
|
||||
hosts = ["maya"]
|
||||
|
|
|
|||
|
|
@ -92,9 +92,9 @@ class ValidateRigContents(pyblish.api.InstancePlugin,
|
|||
"""Validate missing objectsets in rig sets
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
required_objsets (list): list of objectset names
|
||||
rig_sets (list): list of rig sets
|
||||
instance (pyblish.api.Instance): instance
|
||||
required_objsets (list[str]): list of objectset names
|
||||
rig_sets (list[str]): list of rig sets
|
||||
|
||||
Raises:
|
||||
PublishValidationError: When the error is raised, it will show
|
||||
|
|
@ -114,15 +114,15 @@ class ValidateRigContents(pyblish.api.InstancePlugin,
|
|||
Check if all rig set members are within the hierarchy of the rig root
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
content (list): list of content from rig sets
|
||||
instance (pyblish.api.Instance): instance
|
||||
content (list[str]): list of content from rig sets
|
||||
|
||||
Raises:
|
||||
PublishValidationError: It means no dag nodes in
|
||||
the rig instance
|
||||
|
||||
Returns:
|
||||
list: invalid hierarchy
|
||||
List[str]: invalid hierarchy
|
||||
"""
|
||||
# Ensure there are at least some transforms or dag nodes
|
||||
# in the rig instance
|
||||
|
|
@ -145,15 +145,13 @@ class ValidateRigContents(pyblish.api.InstancePlugin,
|
|||
|
||||
@classmethod
|
||||
def validate_geometry(cls, set_members):
|
||||
"""
|
||||
Checks if the node types of the set members valid
|
||||
"""Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the controls_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
set_members (list[str]): nodes of the out_set
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
list[str]: Nodes of invalid types.
|
||||
"""
|
||||
|
||||
# Validate all shape types
|
||||
|
|
@ -167,18 +165,17 @@ class ValidateRigContents(pyblish.api.InstancePlugin,
|
|||
if cmds.nodeType(shape) not in cls.accepted_output:
|
||||
invalid.append(shape)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def validate_controls(cls, set_members):
|
||||
"""
|
||||
Checks if the control set members are allowed node types.
|
||||
Checks if the node types of the set members valid
|
||||
"""Checks if the node types of the set members are valid for controls.
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the controls_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
set_members (list[str]): list of nodes of the controls_set
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
list: Controls of disallowed node types.
|
||||
"""
|
||||
|
||||
# Validate control types
|
||||
|
|
@ -194,7 +191,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin,
|
|||
"""Get the target objectsets and rig sets nodes
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
instance (pyblish.api.Instance): instance
|
||||
|
||||
Returns:
|
||||
tuple: 2-tuple of list of objectsets,
|
||||
|
|
@ -253,11 +250,10 @@ class ValidateSkeletonRigContents(ValidateRigContents):
|
|||
"""Get the target objectsets and rig sets nodes
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
instance (pyblish.api.Instance): instance
|
||||
|
||||
Returns:
|
||||
tuple: 2-tuple of list of objectsets,
|
||||
list of rig sets nodes
|
||||
tuple: 2-tuple of list of objectsets, list of rig sets nodes
|
||||
"""
|
||||
objectsets = ["skeletonMesh_SET"]
|
||||
skeleton_mesh_nodes = instance.data.get("skeleton_mesh", [])
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin,
|
|||
self.log.warning((
|
||||
"Referenced AOVs are enabled in Vray "
|
||||
"Render Settings and are detected in scene, but "
|
||||
"Pype render instance option for referenced AOVs is "
|
||||
"AYON render instance option for referenced AOVs is "
|
||||
"disabled. Those AOVs will be rendered but not published "
|
||||
"by Pype."
|
||||
))
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from maya import cmds
|
|||
host = MayaHost()
|
||||
install_host(host)
|
||||
|
||||
print("Starting OpenPype usersetup...")
|
||||
print("Starting AYON usersetup...")
|
||||
|
||||
project_name = get_current_project_name()
|
||||
settings = get_project_settings(project_name)
|
||||
|
|
@ -47,4 +47,4 @@ if bool(int(os.environ.get(key, "0"))):
|
|||
)
|
||||
|
||||
|
||||
print("Finished OpenPype usersetup.")
|
||||
print("Finished AYON usersetup.")
|
||||
|
|
|
|||
|
|
@ -982,26 +982,18 @@ def format_anatomy(data):
|
|||
|
||||
project_name = get_current_project_name()
|
||||
anatomy = Anatomy(project_name)
|
||||
log.debug("__ anatomy.templates: {}".format(anatomy.templates))
|
||||
|
||||
padding = None
|
||||
if "frame_padding" in anatomy.templates.keys():
|
||||
padding = int(anatomy.templates["frame_padding"])
|
||||
elif "render" in anatomy.templates.keys():
|
||||
padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding"
|
||||
)
|
||||
)
|
||||
frame_padding = anatomy.templates_obj.frame_padding
|
||||
|
||||
version = data.get("version", None)
|
||||
if not version:
|
||||
version = data.get("version")
|
||||
if version is None:
|
||||
file = script_name()
|
||||
data["version"] = get_version_from_path(file)
|
||||
|
||||
folder_path = data["folderPath"]
|
||||
task_name = data["task"]
|
||||
host_name = get_current_host_name()
|
||||
|
||||
context_data = get_template_data_with_names(
|
||||
project_name, folder_path, task_name, host_name
|
||||
)
|
||||
|
|
@ -1013,7 +1005,7 @@ def format_anatomy(data):
|
|||
"name": data["productName"],
|
||||
"type": data["productType"],
|
||||
},
|
||||
"frame": "#" * padding,
|
||||
"frame": "#" * frame_padding,
|
||||
})
|
||||
return anatomy.format(data)
|
||||
|
||||
|
|
@ -1171,7 +1163,9 @@ def create_write_node(
|
|||
anatomy_filled = format_anatomy(data)
|
||||
|
||||
# build file path to workfiles
|
||||
fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
|
||||
fdir = str(
|
||||
anatomy_filled["work"]["default"]["directory"]
|
||||
).replace("\\", "/")
|
||||
data["work"] = fdir
|
||||
fpath = StringTemplate(data["fpath_template"]).format_strict(data)
|
||||
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ class NukeHost(
|
|||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
# Register AYON event for workfiles loading.
|
||||
register_event_callback("workio.open_file", check_inventory_versions)
|
||||
register_event_callback("taskChanged", change_context_label)
|
||||
|
||||
|
|
@ -230,9 +230,9 @@ def get_context_label():
|
|||
|
||||
|
||||
def _install_menu():
|
||||
"""Install Avalon menu into Nuke's main menu bar."""
|
||||
"""Install AYON menu into Nuke's main menu bar."""
|
||||
|
||||
# uninstall original avalon menu
|
||||
# uninstall original AYON menu
|
||||
main_window = get_main_window()
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.addMenu(MENU_LABEL)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
""" OpenPype custom script for setting up write nodes for non-publish """
|
||||
""" AYON custom script for setting up write nodes for non-publish """
|
||||
import os
|
||||
import nuke
|
||||
import nukescripts
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.pipeline import Anatomy, get_current_project_name
|
||||
from ayon_core.hosts.nuke.api.lib import (
|
||||
set_node_knobs_from_settings,
|
||||
get_nuke_imageio_settings
|
||||
|
|
@ -102,13 +102,9 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel):
|
|||
for knob in ext_knob_list:
|
||||
ext = knob["value"]
|
||||
|
||||
anatomy = Anatomy()
|
||||
anatomy = Anatomy(get_current_project_name())
|
||||
|
||||
frame_padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding"
|
||||
)
|
||||
)
|
||||
frame_padding = anatomy.templates_obj.frame_padding
|
||||
for write_node in write_selected_nodes:
|
||||
# data for mapping the path
|
||||
# TODO add more fill data
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
""" OpenPype custom script for resetting read nodes start frame values """
|
||||
""" AYON custom script for resetting read nodes start frame values """
|
||||
|
||||
import nuke
|
||||
import nukescripts
|
||||
|
|
|
|||
|
|
@ -392,15 +392,13 @@ class PhotoshopRoute(WebSocketRoute):
|
|||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
file_template = anatomy.templates[template_key]["file"]
|
||||
work_template = anatomy.get_template_item("work", template_key)
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
|
||||
folder_template = anatomy.templates[template_key]["folder"]
|
||||
work_root = StringTemplate.format_strict_template(
|
||||
folder_template, data
|
||||
)
|
||||
work_root = work_template["directory"].format_strict(data)
|
||||
file_template = work_template["file"].template
|
||||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ This is how it looks on my testing project timeline
|
|||

|
||||
Notice I had renamed tracks to `main` (holding metadata markers) and `review` used for generating review data with ffmpeg confersion to jpg sequence.
|
||||
|
||||
1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__**
|
||||
1. you need to start AYON menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__**
|
||||
2. then select any clips in `main` track and change their color to `Chocolate`
|
||||
3. in OpenPype Menu select `Create`
|
||||
4. in Creator select `Create Publishable Clip [New]` (temporary name)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ from .lib import (
|
|||
get_reformated_path
|
||||
)
|
||||
|
||||
from .menu import launch_pype_menu
|
||||
from .menu import launch_ayon_menu
|
||||
|
||||
from .plugin import (
|
||||
ClipLoader,
|
||||
|
|
@ -113,7 +113,7 @@ __all__ = [
|
|||
"get_reformated_path",
|
||||
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
"launch_ayon_menu",
|
||||
|
||||
# plugin
|
||||
"ClipLoader",
|
||||
|
|
|
|||
|
|
@ -38,9 +38,9 @@ class Spacer(QtWidgets.QWidget):
|
|||
self.setLayout(layout)
|
||||
|
||||
|
||||
class OpenPypeMenu(QtWidgets.QWidget):
|
||||
class AYONMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OpenPypeMenu, self).__init__(*args, **kwargs)
|
||||
super(AYONMenu, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName(f"{MENU_LABEL}Menu")
|
||||
|
||||
|
|
@ -170,14 +170,14 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
host_tools.show_experimental_tools_dialog()
|
||||
|
||||
|
||||
def launch_pype_menu():
|
||||
def launch_ayon_menu():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
pype_menu = OpenPypeMenu()
|
||||
ayon_menu = AYONMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
ayon_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
ayon_menu.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ QLineEdit {
|
|||
qproperty-alignment: AlignCenter;
|
||||
}
|
||||
|
||||
#OpenPypeMenu {
|
||||
#AYONMenu {
|
||||
qproperty-alignment: AlignLeft;
|
||||
min-width: 10em;
|
||||
border: 1px solid #fef9ef;
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ def ensure_installed_host():
|
|||
def launch_menu():
|
||||
print("Launching Resolve AYON menu..")
|
||||
ensure_installed_host()
|
||||
ayon_core.hosts.resolve.api.launch_pype_menu()
|
||||
ayon_core.hosts.resolve.api.launch_ayon_menu()
|
||||
|
||||
|
||||
def open_workfile(path):
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
|
||||
def main(env):
|
||||
from ayon_core.hosts.resolve.api import ResolveHost, launch_pype_menu
|
||||
from ayon_core.hosts.resolve.api import ResolveHost, launch_ayon_menu
|
||||
|
||||
# activate resolve from openpype
|
||||
host = ResolveHost()
|
||||
install_host(host)
|
||||
|
||||
launch_pype_menu()
|
||||
launch_ayon_menu()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class ImportSound(plugin.Loader):
|
|||
def load(self, context, name, namespace, options):
|
||||
# Create temp file for output
|
||||
output_file = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="pype_tvp_", suffix=".txt", delete=False
|
||||
mode="w", prefix="ayon_tvp_", suffix=".txt", delete=False
|
||||
)
|
||||
output_file.close()
|
||||
output_filepath = output_file.name.replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class LoadWorkfile(plugin.Loader):
|
|||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
file_template = anatomy.templates[template_key]["file"]
|
||||
work_template = anatomy.get_template_item("work", template_key)
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
|
|
@ -91,14 +91,11 @@ class LoadWorkfile(plugin.Loader):
|
|||
# Fall back to the first extension supported for this host.
|
||||
extension = extensions[0]
|
||||
|
||||
data["ext"] = extension
|
||||
data["ext"] = extension.lstrip(".")
|
||||
|
||||
folder_template = anatomy.templates[template_key]["folder"]
|
||||
work_root = StringTemplate.format_strict_template(
|
||||
folder_template, data
|
||||
)
|
||||
work_root = work_template["directory"].format_strict(data)
|
||||
version = get_last_workfile_with_version(
|
||||
work_root, file_template, data, extensions
|
||||
work_root, work_template["file"].template, data, extensions
|
||||
)[1]
|
||||
|
||||
if version is None:
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ class UnrealHost(HostBase, ILoadHost, IPublishHost):
|
|||
|
||||
|
||||
def install():
|
||||
"""Install Unreal configuration for OpenPype."""
|
||||
"""Install Unreal configuration for AYON."""
|
||||
print("-=" * 40)
|
||||
logo = '''.
|
||||
.
|
||||
|
|
|
|||
|
|
@ -66,7 +66,9 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
self.host_name,
|
||||
)
|
||||
# Fill templates
|
||||
template_obj = anatomy.templates_obj[workfile_template_key]["file"]
|
||||
template_obj = anatomy.get_template_item(
|
||||
"work", workfile_template_key, "file"
|
||||
)
|
||||
|
||||
# Return filename
|
||||
return template_obj.format_strict(workdir_data)
|
||||
|
|
|
|||
|
|
@ -81,11 +81,8 @@ from .log import (
|
|||
)
|
||||
|
||||
from .path_templates import (
|
||||
merge_dict,
|
||||
TemplateMissingKey,
|
||||
TemplateUnsolved,
|
||||
StringTemplate,
|
||||
TemplatesDict,
|
||||
FormatObject,
|
||||
)
|
||||
|
||||
|
|
@ -259,11 +256,8 @@ __all__ = [
|
|||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
|
||||
"merge_dict",
|
||||
"TemplateMissingKey",
|
||||
"TemplateUnsolved",
|
||||
"StringTemplate",
|
||||
"TemplatesDict",
|
||||
"FormatObject",
|
||||
|
||||
"terminal",
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ class ApplicationGroup:
|
|||
Application group wraps different versions(variants) of application.
|
||||
e.g. "maya" is group and "maya_2020" is variant.
|
||||
|
||||
Group hold `host_name` which is implementation name used in pype. Also
|
||||
Group hold `host_name` which is implementation name used in AYON. Also
|
||||
holds `enabled` if whole app group is enabled or `icon` for application
|
||||
icon path in resources.
|
||||
|
||||
|
|
@ -1862,7 +1862,9 @@ def _prepare_last_workfile(data, workdir, addons_manager):
|
|||
project_settings=project_settings
|
||||
)
|
||||
# Find last workfile
|
||||
file_template = str(anatomy.templates[template_key]["file"])
|
||||
file_template = anatomy.get_template_item(
|
||||
"work", template_key, "file"
|
||||
).template
|
||||
|
||||
workdir_data.update({
|
||||
"version": 1,
|
||||
|
|
@ -1897,12 +1899,12 @@ def should_start_last_workfile(
|
|||
`"0", "1", "true", "false", "yes", "no"`.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
host_name (str): Name of host which is launched. In avalon's
|
||||
application context it's value stored in app definition under
|
||||
key `"application_dir"`. Is not case sensitive.
|
||||
task_name (str): Name of task which is used for launching the host.
|
||||
Task name is not case sensitive.
|
||||
project_name (str): Project name.
|
||||
host_name (str): Host name.
|
||||
task_name (str): Task name.
|
||||
task_type (str): Task type.
|
||||
default_output (Optional[bool]): Default output if no profile is
|
||||
found.
|
||||
|
||||
Returns:
|
||||
bool: True if host should start workfile.
|
||||
|
|
@ -1947,12 +1949,12 @@ def should_workfile_tool_start(
|
|||
`"0", "1", "true", "false", "yes", "no"`.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
host_name (str): Name of host which is launched. In avalon's
|
||||
application context it's value stored in app definition under
|
||||
key `"application_dir"`. Is not case sensitive.
|
||||
task_name (str): Name of task which is used for launching the host.
|
||||
Task name is not case sensitive.
|
||||
project_name (str): Project name.
|
||||
host_name (str): Host name.
|
||||
task_name (str): Task name.
|
||||
task_type (str): Task type.
|
||||
default_output (Optional[bool]): Default output if no profile is
|
||||
found.
|
||||
|
||||
Returns:
|
||||
bool: True if host should start workfile.
|
||||
|
|
|
|||
|
|
@ -102,8 +102,8 @@ def get_all_current_info():
|
|||
def extract_ayon_info_to_file(dirpath, filename=None):
|
||||
"""Extract all current info to a file.
|
||||
|
||||
It is possible to define only directory path. Filename is concatenated with
|
||||
pype version, workstation site id and timestamp.
|
||||
It is possible to define only directory path. Filename is concatenated
|
||||
with AYON version, workstation site id and timestamp.
|
||||
|
||||
Args:
|
||||
dirpath (str): Path to directory where file will be stored.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import numbers
|
||||
import collections
|
||||
|
||||
import six
|
||||
|
||||
|
|
@ -12,44 +10,6 @@ SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
|||
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
||||
|
||||
|
||||
def merge_dict(main_dict, enhance_dict):
|
||||
"""Merges dictionaries by keys.
|
||||
|
||||
Function call itself if value on key is again dictionary.
|
||||
|
||||
Args:
|
||||
main_dict (dict): First dict to merge second one into.
|
||||
enhance_dict (dict): Second dict to be merged.
|
||||
|
||||
Returns:
|
||||
dict: Merged result.
|
||||
|
||||
.. note:: does not overrides whole value on first found key
|
||||
but only values differences from enhance_dict
|
||||
|
||||
"""
|
||||
for key, value in enhance_dict.items():
|
||||
if key not in main_dict:
|
||||
main_dict[key] = value
|
||||
elif isinstance(value, dict) and isinstance(main_dict[key], dict):
|
||||
main_dict[key] = merge_dict(main_dict[key], value)
|
||||
else:
|
||||
main_dict[key] = value
|
||||
return main_dict
|
||||
|
||||
|
||||
class TemplateMissingKey(Exception):
|
||||
"""Exception for cases when key does not exist in template."""
|
||||
|
||||
msg = "Template key does not exist: `{}`."
|
||||
|
||||
def __init__(self, parents):
|
||||
parent_join = "".join(["[\"{0}\"]".format(key) for key in parents])
|
||||
super(TemplateMissingKey, self).__init__(
|
||||
self.msg.format(parent_join)
|
||||
)
|
||||
|
||||
|
||||
class TemplateUnsolved(Exception):
|
||||
"""Exception for unsolved template when strict is set to True."""
|
||||
|
||||
|
|
@ -240,137 +200,6 @@ class StringTemplate(object):
|
|||
new_parts.extend(tmp_parts[idx])
|
||||
return new_parts
|
||||
|
||||
|
||||
class TemplatesDict(object):
|
||||
def __init__(self, templates=None):
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
self.set_templates(templates)
|
||||
|
||||
def set_templates(self, templates):
|
||||
if templates is None:
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
elif isinstance(templates, dict):
|
||||
self._raw_templates = copy.deepcopy(templates)
|
||||
self._templates = templates
|
||||
self._objected_templates = self.create_objected_templates(
|
||||
templates)
|
||||
else:
|
||||
raise TypeError("<{}> argument must be a dict, not {}.".format(
|
||||
self.__class__.__name__, str(type(templates))
|
||||
))
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.objected_templates[key]
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.objected_templates.get(key, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def raw_templates(self):
|
||||
return self._raw_templates
|
||||
|
||||
@property
|
||||
def templates(self):
|
||||
return self._templates
|
||||
|
||||
@property
|
||||
def objected_templates(self):
|
||||
return self._objected_templates
|
||||
|
||||
def _create_template_object(self, template):
|
||||
"""Create template object from a template string.
|
||||
|
||||
Separated into method to give option change class of templates.
|
||||
|
||||
Args:
|
||||
template (str): Template string.
|
||||
|
||||
Returns:
|
||||
StringTemplate: Object of template.
|
||||
"""
|
||||
|
||||
return StringTemplate(template)
|
||||
|
||||
def create_objected_templates(self, templates):
|
||||
if not isinstance(templates, dict):
|
||||
raise TypeError("Expected dict object, got {}".format(
|
||||
str(type(templates))
|
||||
))
|
||||
|
||||
objected_templates = copy.deepcopy(templates)
|
||||
inner_queue = collections.deque()
|
||||
inner_queue.append(objected_templates)
|
||||
while inner_queue:
|
||||
item = inner_queue.popleft()
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
for key in tuple(item.keys()):
|
||||
value = item[key]
|
||||
if isinstance(value, six.string_types):
|
||||
item[key] = self._create_template_object(value)
|
||||
elif isinstance(value, dict):
|
||||
inner_queue.append(value)
|
||||
return objected_templates
|
||||
|
||||
def _format_value(self, value, data):
|
||||
if isinstance(value, StringTemplate):
|
||||
return value.format(data)
|
||||
|
||||
if isinstance(value, dict):
|
||||
return self._solve_dict(value, data)
|
||||
return value
|
||||
|
||||
def _solve_dict(self, templates, data):
|
||||
""" Solves templates with entered data.
|
||||
|
||||
Args:
|
||||
templates (dict): All templates which will be formatted.
|
||||
data (dict): Containing keys to be filled into template.
|
||||
|
||||
Returns:
|
||||
dict: With `TemplateResult` in values containing filled or
|
||||
partially filled templates.
|
||||
"""
|
||||
output = collections.defaultdict(dict)
|
||||
for key, value in templates.items():
|
||||
output[key] = self._format_value(value, data)
|
||||
|
||||
return output
|
||||
|
||||
def format(self, in_data, only_keys=True, strict=True):
|
||||
""" Solves templates based on entered data.
|
||||
|
||||
Args:
|
||||
data (dict): Containing keys to be filled into template.
|
||||
only_keys (bool, optional): Decides if environ will be used to
|
||||
fill templates or only keys in data.
|
||||
|
||||
Returns:
|
||||
TemplatesResultDict: Output `TemplateResult` have `strict`
|
||||
attribute set to True so accessing unfilled keys in templates
|
||||
will raise exceptions with explaned error.
|
||||
"""
|
||||
# Create a copy of inserted data
|
||||
data = copy.deepcopy(in_data)
|
||||
|
||||
# Add environment variable to data
|
||||
if only_keys is False:
|
||||
for key, val in os.environ.items():
|
||||
env_key = "$" + key
|
||||
if env_key not in data:
|
||||
data[env_key] = val
|
||||
|
||||
solved = self._solve_dict(self.objected_templates, data)
|
||||
|
||||
output = TemplatesResultDict(solved)
|
||||
output.strict = strict
|
||||
return output
|
||||
|
||||
|
||||
class TemplateResult(str):
|
||||
"""Result of template format with most of information in.
|
||||
|
||||
|
|
@ -379,8 +208,8 @@ class TemplateResult(str):
|
|||
only used keys.
|
||||
solved (bool): For check if all required keys were filled.
|
||||
template (str): Original template.
|
||||
missing_keys (list): Missing keys that were not in the data. Include
|
||||
missing optional keys.
|
||||
missing_keys (Iterable[str]): Missing keys that were not in the data.
|
||||
Include missing optional keys.
|
||||
invalid_types (dict): When key was found in data, but value had not
|
||||
allowed DataType. Allowed data types are `numbers`,
|
||||
`str`(`basestring`) and `dict`. Dictionary may cause invalid type
|
||||
|
|
@ -445,99 +274,6 @@ class TemplateResult(str):
|
|||
)
|
||||
|
||||
|
||||
class TemplatesResultDict(dict):
|
||||
"""Holds and wrap TemplateResults for easy bug report."""
|
||||
|
||||
def __init__(self, in_data, key=None, parent=None, strict=None):
|
||||
super(TemplatesResultDict, self).__init__()
|
||||
for _key, _value in in_data.items():
|
||||
if isinstance(_value, dict):
|
||||
_value = self.__class__(_value, _key, self)
|
||||
self[_key] = _value
|
||||
|
||||
self.key = key
|
||||
self.parent = parent
|
||||
self.strict = strict
|
||||
if self.parent is None and strict is None:
|
||||
self.strict = True
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key not in self.keys():
|
||||
hier = self.hierarchy()
|
||||
hier.append(key)
|
||||
raise TemplateMissingKey(hier)
|
||||
|
||||
value = super(TemplatesResultDict, self).__getitem__(key)
|
||||
if isinstance(value, self.__class__):
|
||||
return value
|
||||
|
||||
# Raise exception when expected solved templates and it is not.
|
||||
if self.raise_on_unsolved and hasattr(value, "validate"):
|
||||
value.validate()
|
||||
return value
|
||||
|
||||
@property
|
||||
def raise_on_unsolved(self):
|
||||
"""To affect this change `strict` attribute."""
|
||||
if self.strict is not None:
|
||||
return self.strict
|
||||
return self.parent.raise_on_unsolved
|
||||
|
||||
def hierarchy(self):
|
||||
"""Return dictionary keys one by one to root parent."""
|
||||
if self.parent is None:
|
||||
return []
|
||||
|
||||
hier_keys = []
|
||||
par_hier = self.parent.hierarchy()
|
||||
if par_hier:
|
||||
hier_keys.extend(par_hier)
|
||||
hier_keys.append(self.key)
|
||||
|
||||
return hier_keys
|
||||
|
||||
@property
|
||||
def missing_keys(self):
|
||||
"""Return missing keys of all children templates."""
|
||||
missing_keys = set()
|
||||
for value in self.values():
|
||||
missing_keys |= value.missing_keys
|
||||
return missing_keys
|
||||
|
||||
@property
|
||||
def invalid_types(self):
|
||||
"""Return invalid types of all children templates."""
|
||||
invalid_types = {}
|
||||
for value in self.values():
|
||||
invalid_types = merge_dict(invalid_types, value.invalid_types)
|
||||
return invalid_types
|
||||
|
||||
@property
|
||||
def used_values(self):
|
||||
"""Return used values for all children templates."""
|
||||
used_values = {}
|
||||
for value in self.values():
|
||||
used_values = merge_dict(used_values, value.used_values)
|
||||
return used_values
|
||||
|
||||
def get_solved(self):
|
||||
"""Get only solved key from templates."""
|
||||
result = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, self.__class__):
|
||||
value = value.get_solved()
|
||||
if not value:
|
||||
continue
|
||||
result[key] = value
|
||||
|
||||
elif (
|
||||
not hasattr(value, "solved") or
|
||||
value.solved
|
||||
):
|
||||
result[key] = value
|
||||
return self.__class__(result, key=self.key, parent=self.parent)
|
||||
|
||||
|
||||
class TemplatePartResult:
|
||||
"""Result to store result of template parts."""
|
||||
def __init__(self, optional=False):
|
||||
|
|
|
|||
|
|
@ -118,8 +118,8 @@ def classes_from_module(superclass, module):
|
|||
|
||||
Arguments:
|
||||
superclass (superclass): Superclass of subclasses to look for
|
||||
module (types.ModuleType): Imported module from which to
|
||||
parse valid Avalon plug-ins.
|
||||
module (types.ModuleType): Imported module where to look for
|
||||
'superclass' subclasses.
|
||||
|
||||
Returns:
|
||||
List of plug-ins, or empty list if none is found.
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class Terminal:
|
|||
Terminal.use_colors = False
|
||||
print(
|
||||
"Module `blessed` failed on import or terminal creation."
|
||||
" Pype terminal won't use colors."
|
||||
" AYON terminal won't use colors."
|
||||
)
|
||||
Terminal._initialized = True
|
||||
return
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class SyncClockifyServer(ServerAction):
|
|||
label = "Sync To Clockify (server)"
|
||||
description = "Synchronise data to Clockify workspace"
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "project Manager"]
|
||||
role_list = ["Administrator", "project Manager"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncClockifyServer, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class SyncClockifyLocal(BaseAction):
|
|||
#: Action description.
|
||||
description = 'Synchronise data to Clockify workspace'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "project Manager"]
|
||||
role_list = ["Administrator", "project Manager"]
|
||||
#: icon
|
||||
icon = statics_icon("app_icons", "clockify-white.png")
|
||||
|
||||
|
|
|
|||
|
|
@ -74,6 +74,10 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
render_path = os.path.normpath(render_path)
|
||||
script_name = os.path.basename(script_path)
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in instance.context:
|
||||
if "workfile" in item.data["productType"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
|
|
@ -84,9 +88,9 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
anatomy_filled = instance.context.data["anatomy"].format(
|
||||
template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -123,6 +123,10 @@ class FusionSubmitDeadline(
|
|||
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in context:
|
||||
if "workfile" in item.data["families"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
|
|
@ -133,8 +137,9 @@ class FusionSubmitDeadline(
|
|||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
anatomy_filled = context.data["anatomy"].format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -196,6 +196,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
def _get_published_workfile_path(self, context):
|
||||
"""This method is temporary while the class is not inherited from
|
||||
AbstractSubmitDeadline"""
|
||||
anatomy = context.data["anatomy"]
|
||||
# WARNING Hardcoded template name 'default' > may not be used
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for instance in context:
|
||||
if (
|
||||
instance.data["productType"] != "workfile"
|
||||
|
|
@ -216,11 +221,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
template_data["ext"] = ext
|
||||
template_data["comment"] = None
|
||||
|
||||
anatomy = context.data["anatomy"]
|
||||
# WARNING Hardcoded template name 'publish' > may not be used
|
||||
template_obj = anatomy.templates_obj["publish"]["path"]
|
||||
|
||||
template_filled = template_obj.format(template_data)
|
||||
template_filled = publish_template.format(template_data)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(
|
||||
|
|
|
|||
|
|
@ -450,23 +450,10 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"type": product_type,
|
||||
}
|
||||
|
||||
render_templates = anatomy.templates_obj[template_name]
|
||||
if "folder" in render_templates:
|
||||
publish_folder = render_templates["folder"].format_strict(
|
||||
template_data
|
||||
)
|
||||
else:
|
||||
# solve deprecated situation when `folder` key is not underneath
|
||||
# `publish` anatomy
|
||||
self.log.warning((
|
||||
"Deprecation warning: Anatomy does not have set `folder`"
|
||||
" key underneath `publish` (in global of for project `{}`)."
|
||||
).format(project_name))
|
||||
|
||||
file_path = render_templates["path"].format_strict(template_data)
|
||||
publish_folder = os.path.dirname(file_path)
|
||||
|
||||
return publish_folder
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
|
|
|
|||
|
|
@ -573,23 +573,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"type": product_type,
|
||||
}
|
||||
|
||||
render_templates = anatomy.templates_obj[template_name]
|
||||
if "folder" in render_templates:
|
||||
publish_folder = render_templates["folder"].format_strict(
|
||||
template_data
|
||||
)
|
||||
else:
|
||||
# solve deprecated situation when `folder` key is not underneath
|
||||
# `publish` anatomy
|
||||
self.log.warning((
|
||||
"Deprecation warning: Anatomy does not have set `folder`"
|
||||
" key underneath `publish` (in global of for project `{}`)."
|
||||
).format(project_name))
|
||||
|
||||
file_path = render_templates["path"].format_strict(template_data)
|
||||
publish_folder = os.path.dirname(file_path)
|
||||
|
||||
return publish_folder
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
17
client/ayon_core/pipeline/anatomy/__init__.py
Normal file
17
client/ayon_core/pipeline/anatomy/__init__.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from .exceptions import (
|
||||
ProjectNotSet,
|
||||
RootCombinationError,
|
||||
TemplateMissingKey,
|
||||
AnatomyTemplateUnsolved,
|
||||
)
|
||||
from .anatomy import Anatomy
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ProjectNotSet",
|
||||
"RootCombinationError",
|
||||
"TemplateMissingKey",
|
||||
"AnatomyTemplateUnsolved",
|
||||
|
||||
"Anatomy",
|
||||
)
|
||||
556
client/ayon_core/pipeline/anatomy/anatomy.py
Normal file
556
client/ayon_core/pipeline/anatomy/anatomy.py
Normal file
|
|
@ -0,0 +1,556 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import platform
|
||||
import collections
|
||||
import time
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib import Logger, get_local_site_id, StringTemplate
|
||||
from ayon_core.addon import AddonsManager
|
||||
|
||||
from .exceptions import RootCombinationError, ProjectNotSet
|
||||
from .roots import AnatomyRoots
|
||||
from .templates import AnatomyTemplates
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class BaseAnatomy(object):
|
||||
"""Anatomy module helps to keep project settings.
|
||||
|
||||
Wraps key project specifications, AnatomyTemplates and AnatomyRoots.
|
||||
"""
|
||||
root_key_regex = re.compile(r"{(root?[^}]+)}")
|
||||
root_name_regex = re.compile(r"root\[([^]]+)\]")
|
||||
|
||||
def __init__(self, project_entity, root_overrides=None):
|
||||
self._project_name = project_entity["name"]
|
||||
self._project_code = project_entity["code"]
|
||||
|
||||
self._data = self._prepare_anatomy_data(
|
||||
project_entity, root_overrides
|
||||
)
|
||||
self._templates_obj = AnatomyTemplates(self)
|
||||
self._roots_obj = AnatomyRoots(self)
|
||||
|
||||
# Anatomy used as dictionary
|
||||
# - implemented only getters returning copy
|
||||
def __getitem__(self, key):
|
||||
return copy.deepcopy(self._data[key])
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key not in self._data:
|
||||
return default
|
||||
return copy.deepcopy(self._data[key])
|
||||
|
||||
def keys(self):
|
||||
return copy.deepcopy(self._data).keys()
|
||||
|
||||
def values(self):
|
||||
return copy.deepcopy(self._data).values()
|
||||
|
||||
def items(self):
|
||||
return copy.deepcopy(self._data).items()
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
"""Project name for which is anatomy prepared.
|
||||
|
||||
Returns:
|
||||
str: Project name.
|
||||
|
||||
"""
|
||||
return self._project_name
|
||||
|
||||
@property
|
||||
def project_code(self):
|
||||
"""Project name for which is anatomy prepared.
|
||||
|
||||
Returns:
|
||||
str: Project code.
|
||||
|
||||
"""
|
||||
return self._project_code
|
||||
|
||||
def _prepare_anatomy_data(self, project_entity, root_overrides):
|
||||
"""Prepare anatomy data for further processing.
|
||||
|
||||
Method added to replace `{task}` with `{task[name]}` in templates.
|
||||
"""
|
||||
|
||||
anatomy_data = self._project_entity_to_anatomy_data(project_entity)
|
||||
|
||||
self._apply_local_settings_on_anatomy_data(
|
||||
anatomy_data,
|
||||
root_overrides
|
||||
)
|
||||
|
||||
return anatomy_data
|
||||
|
||||
@property
|
||||
def templates(self):
|
||||
"""Wrap property `templates` of Anatomy's AnatomyTemplates instance."""
|
||||
return self._templates_obj.templates
|
||||
|
||||
@property
|
||||
def templates_obj(self):
|
||||
"""Return `AnatomyTemplates` object of current Anatomy instance."""
|
||||
return self._templates_obj
|
||||
|
||||
def get_template_item(self, *args, **kwargs):
|
||||
"""Get template item from category.
|
||||
|
||||
Args:
|
||||
category_name (str): Category name.
|
||||
template_name (str): Template name.
|
||||
subkey (Optional[str]): Subkey name.
|
||||
default (Any): Default value.
|
||||
|
||||
Returns:
|
||||
Any: Template item, subkey value as AnatomyStringTemplate or None.
|
||||
|
||||
"""
|
||||
return self._templates_obj.get_template_item(*args, **kwargs)
|
||||
|
||||
def format(self, *args, **kwargs):
|
||||
"""Wrap `format` method of Anatomy's `templates_obj`."""
|
||||
return self._templates_obj.format(*args, **kwargs)
|
||||
|
||||
def format_all(self, *args, **kwargs):
|
||||
"""Wrap `format_all` method of Anatomy's `templates_obj`.
|
||||
|
||||
Deprecated:
|
||||
Use ``format`` method with ``strict=False`` instead.
|
||||
|
||||
"""
|
||||
return self._templates_obj.format_all(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def roots(self):
|
||||
"""Wrap `roots` property of Anatomy's `roots_obj`."""
|
||||
return self._roots_obj.roots
|
||||
|
||||
@property
|
||||
def roots_obj(self):
|
||||
"""Roots wrapper object.
|
||||
|
||||
Returns:
|
||||
AnatomyRoots: Roots wrapper.
|
||||
|
||||
"""
|
||||
return self._roots_obj
|
||||
|
||||
def root_environments(self):
|
||||
"""Return AYON_PROJECT_ROOT_* environments for current project."""
|
||||
return self._roots_obj.root_environments()
|
||||
|
||||
def root_environmets_fill_data(self, template=None):
|
||||
"""Environment variable values in dictionary for rootless path.
|
||||
|
||||
Args:
|
||||
template (str): Template for environment variable key fill.
|
||||
By default is set to `"${}"`.
|
||||
"""
|
||||
return self.roots_obj.root_environmets_fill_data(template)
|
||||
|
||||
def find_root_template_from_path(self, *args, **kwargs):
|
||||
"""Wrapper for AnatomyRoots `find_root_template_from_path`."""
|
||||
return self.roots_obj.find_root_template_from_path(*args, **kwargs)
|
||||
|
||||
def path_remapper(self, *args, **kwargs):
|
||||
"""Wrapper for AnatomyRoots `path_remapper`."""
|
||||
return self.roots_obj.path_remapper(*args, **kwargs)
|
||||
|
||||
def all_root_paths(self):
|
||||
"""Wrapper for AnatomyRoots `all_root_paths`."""
|
||||
return self.roots_obj.all_root_paths()
|
||||
|
||||
def set_root_environments(self):
|
||||
"""Set AYON_PROJECT_ROOT_* environments for current project."""
|
||||
self._roots_obj.set_root_environments()
|
||||
|
||||
def root_names(self):
|
||||
"""Return root names for current project."""
|
||||
return self.root_names_from_templates(self.templates)
|
||||
|
||||
def _root_keys_from_templates(self, data):
|
||||
"""Extract root key from templates in data.
|
||||
|
||||
Args:
|
||||
data (dict): Data that may contain templates as string.
|
||||
|
||||
Return:
|
||||
set: Set of all root names from templates as strings.
|
||||
|
||||
Output example: `{"root[work]", "root[publish]"}`
|
||||
"""
|
||||
|
||||
output = set()
|
||||
keys_queue = collections.deque()
|
||||
keys_queue.append(data)
|
||||
while keys_queue:
|
||||
queue_data = keys_queue.popleft()
|
||||
if isinstance(queue_data, StringTemplate):
|
||||
queue_data = queue_data.template
|
||||
|
||||
if isinstance(queue_data, dict):
|
||||
for value in queue_data.values():
|
||||
keys_queue.append(value)
|
||||
|
||||
elif isinstance(queue_data, str):
|
||||
for group in re.findall(self.root_key_regex, queue_data):
|
||||
output.add(group)
|
||||
|
||||
return output
|
||||
|
||||
def root_value_for_template(self, template):
|
||||
"""Returns value of root key from template."""
|
||||
if isinstance(template, StringTemplate):
|
||||
template = template.template
|
||||
root_templates = []
|
||||
for group in re.findall(self.root_key_regex, template):
|
||||
root_templates.append("{" + group + "}")
|
||||
|
||||
if not root_templates:
|
||||
return None
|
||||
|
||||
return root_templates[0].format(**{"root": self.roots})
|
||||
|
||||
def root_names_from_templates(self, templates):
|
||||
"""Extract root names form anatomy templates.
|
||||
|
||||
Returns None if values in templates contain only "{root}".
|
||||
Empty list is returned if there is no "root" in templates.
|
||||
Else returns all root names from templates in list.
|
||||
|
||||
RootCombinationError is raised when templates contain both root types,
|
||||
basic "{root}" and with root name specification "{root[work]}".
|
||||
|
||||
Args:
|
||||
templates (dict): Anatomy templates where roots are not filled.
|
||||
|
||||
Return:
|
||||
list/None: List of all root names from templates as strings when
|
||||
multiroot setup is used, otherwise None is returned.
|
||||
"""
|
||||
roots = list(self._root_keys_from_templates(templates))
|
||||
# Return empty list if no roots found in templates
|
||||
if not roots:
|
||||
return roots
|
||||
|
||||
# Raise exception when root keys have roots with and without root name.
|
||||
# Invalid output example: ["root", "root[project]", "root[render]"]
|
||||
if len(roots) > 1 and "root" in roots:
|
||||
raise RootCombinationError(roots)
|
||||
|
||||
# Return None if "root" without root name in templates
|
||||
if len(roots) == 1 and roots[0] == "root":
|
||||
return None
|
||||
|
||||
names = set()
|
||||
for root in roots:
|
||||
for group in re.findall(self.root_name_regex, root):
|
||||
names.add(group)
|
||||
return list(names)
|
||||
|
||||
def fill_root(self, template_path):
|
||||
"""Fill template path where is only "root" key unfilled.
|
||||
|
||||
Args:
|
||||
template_path (str): Path with "root" key in.
|
||||
Example path: "{root}/projects/MyProject/Shot01/Lighting/..."
|
||||
|
||||
Return:
|
||||
str: formatted path
|
||||
"""
|
||||
# NOTE does not care if there are different keys than "root"
|
||||
return template_path.format(**{"root": self.roots})
|
||||
|
||||
@classmethod
|
||||
def fill_root_with_path(cls, rootless_path, root_path):
|
||||
"""Fill path without filled "root" key with passed path.
|
||||
|
||||
This is helper to fill root with different directory path than anatomy
|
||||
has defined no matter if is single or multiroot.
|
||||
|
||||
Output path is same as input path if `rootless_path` does not contain
|
||||
unfilled root key.
|
||||
|
||||
Args:
|
||||
rootless_path (str): Path without filled "root" key. Example:
|
||||
"{root[work]}/MyProject/..."
|
||||
root_path (str): What should replace root key in `rootless_path`.
|
||||
|
||||
Returns:
|
||||
str: Path with filled root.
|
||||
"""
|
||||
output = str(rootless_path)
|
||||
for group in re.findall(cls.root_key_regex, rootless_path):
|
||||
replacement = "{" + group + "}"
|
||||
output = output.replace(replacement, root_path)
|
||||
|
||||
return output
|
||||
|
||||
def replace_root_with_env_key(self, filepath, template=None):
|
||||
"""Replace root of path with environment key.
|
||||
|
||||
# Example:
|
||||
## Project with roots:
|
||||
```
|
||||
{
|
||||
"nas": {
|
||||
"windows": P:/projects",
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## Entered filepath
|
||||
"P:/projects/project/folder/task/animation_v001.ma"
|
||||
|
||||
## Entered template
|
||||
"<{}>"
|
||||
|
||||
## Output
|
||||
"<AYON_PROJECT_ROOT_NAS>/project/folder/task/animation_v001.ma"
|
||||
|
||||
Args:
|
||||
filepath (str): Full file path where root should be replaced.
|
||||
template (str): Optional template for environment key. Must
|
||||
have one index format key.
|
||||
Default value if not entered: "${}"
|
||||
|
||||
Returns:
|
||||
str: Path where root is replaced with environment root key.
|
||||
|
||||
Raise:
|
||||
ValueError: When project's roots were not found in entered path.
|
||||
"""
|
||||
success, rootless_path = self.find_root_template_from_path(filepath)
|
||||
if not success:
|
||||
raise ValueError(
|
||||
"{}: Project's roots were not found in path: {}".format(
|
||||
self.project_name, filepath
|
||||
)
|
||||
)
|
||||
|
||||
data = self.root_environmets_fill_data(template)
|
||||
return rootless_path.format(**data)
|
||||
|
||||
def _project_entity_to_anatomy_data(self, project_entity):
|
||||
"""Convert project document to anatomy data.
|
||||
|
||||
Probably should fill missing keys and values.
|
||||
"""
|
||||
|
||||
output = copy.deepcopy(project_entity["config"])
|
||||
# TODO remove AYON convertion
|
||||
task_types = copy.deepcopy(project_entity["taskTypes"])
|
||||
new_task_types = {}
|
||||
for task_type in task_types:
|
||||
name = task_type["name"]
|
||||
new_task_types[name] = task_type
|
||||
output["tasks"] = new_task_types
|
||||
output["attributes"] = copy.deepcopy(project_entity["attrib"])
|
||||
|
||||
return output
|
||||
|
||||
def _apply_local_settings_on_anatomy_data(
|
||||
self, anatomy_data, root_overrides
|
||||
):
|
||||
"""Apply local settings on anatomy data.
|
||||
|
||||
ATM local settings can modify project roots. Project name is required
|
||||
as local settings have data stored data by project's name.
|
||||
|
||||
Local settings override root values in this order:
|
||||
1.) Check if local settings contain overrides for default project and
|
||||
apply it's values on roots if there are any.
|
||||
2.) If passed `project_name` is not None then check project specific
|
||||
overrides in local settings for the project and apply it's value on
|
||||
roots if there are any.
|
||||
|
||||
NOTE: Root values of default project from local settings are always
|
||||
applied if are set.
|
||||
|
||||
Args:
|
||||
anatomy_data (dict): Data for anatomy.
|
||||
root_overrides (dict): Data of local settings.
|
||||
"""
|
||||
|
||||
# Skip processing if roots for current active site are not available in
|
||||
# local settings
|
||||
if not root_overrides:
|
||||
return
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
|
||||
root_data = anatomy_data["roots"]
|
||||
for root_name, path in root_overrides.items():
|
||||
if root_name not in root_data:
|
||||
continue
|
||||
anatomy_data["roots"][root_name][current_platform] = (
|
||||
path
|
||||
)
|
||||
|
||||
|
||||
class CacheItem:
|
||||
"""Helper to cache data.
|
||||
|
||||
Helper does not handle refresh of data and does not mark data as outdated.
|
||||
Who uses the object should check of outdated state on his own will.
|
||||
"""
|
||||
|
||||
default_lifetime = 10
|
||||
|
||||
def __init__(self, lifetime=None):
|
||||
self._data = None
|
||||
self._cached = None
|
||||
self._lifetime = lifetime or self.default_lifetime
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
"""Cached data/object.
|
||||
|
||||
Returns:
|
||||
Any: Whatever was cached.
|
||||
"""
|
||||
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def is_outdated(self):
|
||||
"""Item has outdated cache.
|
||||
|
||||
Lifetime of cache item expired or was not yet set.
|
||||
|
||||
Returns:
|
||||
bool: Item is outdated.
|
||||
"""
|
||||
|
||||
if self._cached is None:
|
||||
return True
|
||||
return (time.time() - self._cached) > self._lifetime
|
||||
|
||||
def update_data(self, data):
|
||||
"""Update cache of data.
|
||||
|
||||
Args:
|
||||
data (Any): Data to cache.
|
||||
"""
|
||||
|
||||
self._data = data
|
||||
self._cached = time.time()
|
||||
|
||||
|
||||
class Anatomy(BaseAnatomy):
|
||||
_sync_server_addon_cache = CacheItem()
|
||||
_project_cache = collections.defaultdict(CacheItem)
|
||||
_default_site_id_cache = collections.defaultdict(CacheItem)
|
||||
_root_overrides_cache = collections.defaultdict(
|
||||
lambda: collections.defaultdict(CacheItem)
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, project_name=None, site_name=None, project_entity=None
|
||||
):
|
||||
if not project_name:
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
if not project_name:
|
||||
raise ProjectNotSet((
|
||||
"Implementation bug: Project name is not set. Anatomy requires"
|
||||
" to load data for specific project."
|
||||
))
|
||||
|
||||
if not project_entity:
|
||||
project_entity = self.get_project_entity_from_cache(project_name)
|
||||
root_overrides = self._get_site_root_overrides(
|
||||
project_name, site_name
|
||||
)
|
||||
|
||||
super(Anatomy, self).__init__(project_entity, root_overrides)
|
||||
|
||||
@classmethod
|
||||
def get_project_entity_from_cache(cls, project_name):
|
||||
project_cache = cls._project_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
project_cache.update_data(ayon_api.get_project(project_name))
|
||||
return copy.deepcopy(project_cache.data)
|
||||
|
||||
@classmethod
|
||||
def get_sync_server_addon(cls):
|
||||
if cls._sync_server_addon_cache.is_outdated:
|
||||
manager = AddonsManager()
|
||||
cls._sync_server_addon_cache.update_data(
|
||||
manager.get_enabled_addon("sync_server")
|
||||
)
|
||||
return cls._sync_server_addon_cache.data
|
||||
|
||||
@classmethod
|
||||
def _get_studio_roots_overrides(cls, project_name):
|
||||
"""This would return 'studio' site override by local settings.
|
||||
|
||||
Notes:
|
||||
This logic handles local overrides of studio site which may be
|
||||
available even when sync server is not enabled.
|
||||
Handling of 'studio' and 'local' site was separated as preparation
|
||||
for AYON development where that will be received from
|
||||
separated sources.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
|
||||
Returns:
|
||||
Union[Dict[str, str], None]): Local root overrides.
|
||||
"""
|
||||
if not project_name:
|
||||
return
|
||||
return ayon_api.get_project_roots_for_site(
|
||||
project_name, get_local_site_id()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_site_root_overrides(cls, project_name, site_name):
|
||||
"""Get root overrides for site.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name for which root overrides should be
|
||||
received.
|
||||
site_name (Union[str, None]): Name of site for which root overrides
|
||||
should be returned.
|
||||
"""
|
||||
|
||||
# First check if sync server is available and enabled
|
||||
sync_server = cls.get_sync_server_addon()
|
||||
if sync_server is None or not sync_server.enabled:
|
||||
# QUESTION is ok to force 'studio' when site sync is not enabled?
|
||||
site_name = "studio"
|
||||
|
||||
elif not site_name:
|
||||
# Use sync server to receive active site name
|
||||
project_cache = cls._default_site_id_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
project_cache.update_data(
|
||||
sync_server.get_active_site_type(project_name)
|
||||
)
|
||||
site_name = project_cache.data
|
||||
|
||||
site_cache = cls._root_overrides_cache[project_name][site_name]
|
||||
if site_cache.is_outdated:
|
||||
if site_name == "studio":
|
||||
# Handle studio root overrides without sync server
|
||||
# - studio root overrides can be done even without sync server
|
||||
roots_overrides = cls._get_studio_roots_overrides(
|
||||
project_name
|
||||
)
|
||||
else:
|
||||
# Ask sync server to get roots overrides
|
||||
roots_overrides = sync_server.get_site_root_overrides(
|
||||
project_name, site_name
|
||||
)
|
||||
site_cache.update_data(roots_overrides)
|
||||
return site_cache.data
|
||||
39
client/ayon_core/pipeline/anatomy/exceptions.py
Normal file
39
client/ayon_core/pipeline/anatomy/exceptions.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
from ayon_core.lib.path_templates import TemplateUnsolved
|
||||
|
||||
|
||||
class ProjectNotSet(Exception):
|
||||
"""Exception raised when is created Anatomy without project name."""
|
||||
|
||||
|
||||
class RootCombinationError(Exception):
|
||||
"""This exception is raised when templates has combined root types."""
|
||||
|
||||
def __init__(self, roots):
|
||||
joined_roots = ", ".join(
|
||||
["\"{}\"".format(_root) for _root in roots]
|
||||
)
|
||||
# TODO better error message
|
||||
msg = (
|
||||
"Combination of root with and"
|
||||
" without root name in AnatomyTemplates. {}"
|
||||
).format(joined_roots)
|
||||
|
||||
super(RootCombinationError, self).__init__(msg)
|
||||
|
||||
|
||||
class TemplateMissingKey(Exception):
|
||||
"""Exception for cases when key does not exist in template."""
|
||||
|
||||
msg = "Template key '{}' was not found."
|
||||
|
||||
def __init__(self, parents):
|
||||
parent_join = "".join(["[\"{0}\"]".format(key) for key in parents])
|
||||
super(TemplateMissingKey, self).__init__(
|
||||
self.msg.format(parent_join)
|
||||
)
|
||||
|
||||
|
||||
class AnatomyTemplateUnsolved(TemplateUnsolved):
|
||||
"""Exception for unsolved template when strict is set to True."""
|
||||
|
||||
msg = "Anatomy template \"{0}\" is unsolved.{1}{2}"
|
||||
524
client/ayon_core/pipeline/anatomy/roots.py
Normal file
524
client/ayon_core/pipeline/anatomy/roots.py
Normal file
|
|
@ -0,0 +1,524 @@
|
|||
import os
|
||||
import platform
|
||||
import numbers
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib.path_templates import FormatObject
|
||||
|
||||
|
||||
class RootItem(FormatObject):
|
||||
"""Represents one item or roots.
|
||||
|
||||
Holds raw data of root item specification. Raw data contain value
|
||||
for each platform, but current platform value is used when object
|
||||
is used for formatting of template.
|
||||
|
||||
Args:
|
||||
parent (AnatomyRoots): Parent object.
|
||||
root_raw_data (dict): Dictionary containing root values by platform
|
||||
names. ["windows", "linux" and "darwin"]
|
||||
name (str): Root name which is representing. Used with
|
||||
multi root setup otherwise None value is expected.
|
||||
"""
|
||||
def __init__(self, parent, root_raw_data, name):
|
||||
super(RootItem, self).__init__()
|
||||
self._log = None
|
||||
lowered_platform_keys = {}
|
||||
for key, value in root_raw_data.items():
|
||||
lowered_platform_keys[key.lower()] = value
|
||||
self.raw_data = lowered_platform_keys
|
||||
self.cleaned_data = self._clean_roots(lowered_platform_keys)
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
|
||||
self.available_platforms = set(lowered_platform_keys.keys())
|
||||
self.value = lowered_platform_keys.get(platform.system().lower())
|
||||
self.clean_value = self._clean_root(self.value)
|
||||
|
||||
def __format__(self, *args, **kwargs):
|
||||
return self.value.__format__(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __getitem__(self, key):
|
||||
if isinstance(key, numbers.Number):
|
||||
return self.value[key]
|
||||
|
||||
additional_info = ""
|
||||
if self.parent and self.parent.project_name:
|
||||
additional_info += " for project \"{}\"".format(
|
||||
self.parent.project_name
|
||||
)
|
||||
|
||||
raise KeyError(
|
||||
"Root key \"{}\" is missing{}.".format(
|
||||
key, additional_info
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def full_key(self):
|
||||
"""Full key value for dictionary formatting in template.
|
||||
|
||||
Returns:
|
||||
str: Return full replacement key for formatting. This helps when
|
||||
multiple roots are set. In that case e.g. `"root[work]"` is
|
||||
returned.
|
||||
|
||||
"""
|
||||
return "root[{}]".format(self.name)
|
||||
|
||||
@staticmethod
|
||||
def _clean_path(path):
|
||||
"""Just replace backslashes with forward slashes.
|
||||
|
||||
Args:
|
||||
path (str): Path which should be cleaned.
|
||||
|
||||
Returns:
|
||||
str: Cleaned path with forward slashes.
|
||||
|
||||
"""
|
||||
return str(path).replace("\\", "/")
|
||||
|
||||
def _clean_root(self, root):
|
||||
"""Clean root value.
|
||||
|
||||
Args:
|
||||
root (str): Root value which should be cleaned.
|
||||
|
||||
Returns:
|
||||
str: Cleaned root value.
|
||||
|
||||
"""
|
||||
return self._clean_path(root).rstrip("/")
|
||||
|
||||
def _clean_roots(self, raw_data):
|
||||
"""Clean all values of raw root item values."""
|
||||
cleaned = {}
|
||||
for key, value in raw_data.items():
|
||||
cleaned[key] = self._clean_root(value)
|
||||
return cleaned
|
||||
|
||||
def path_remapper(self, path, dst_platform=None, src_platform=None):
|
||||
"""Remap path for specific platform.
|
||||
|
||||
Args:
|
||||
path (str): Source path which need to be remapped.
|
||||
dst_platform (str, optional): Specify destination platform
|
||||
for which remapping should happen.
|
||||
src_platform (str, optional): Specify source platform. This is
|
||||
recommended to not use and keep unset until you really want
|
||||
to use specific platform.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: When path does not contain known root then
|
||||
None is returned else returns remapped path with
|
||||
"{root[<name>]}".
|
||||
|
||||
"""
|
||||
cleaned_path = self._clean_path(path)
|
||||
if dst_platform:
|
||||
dst_root_clean = self.cleaned_data.get(dst_platform)
|
||||
if not dst_root_clean:
|
||||
self.log.warning(
|
||||
"Root \"{}\" miss platform \"{}\" definition.".format(
|
||||
self.full_key, dst_platform
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
if cleaned_path.startswith(dst_root_clean):
|
||||
return cleaned_path
|
||||
|
||||
if src_platform:
|
||||
src_root_clean = self.cleaned_data.get(src_platform)
|
||||
if src_root_clean is None:
|
||||
self.log.warning(
|
||||
"Root \"{}\" miss platform \"{}\" definition.".format(
|
||||
self.full_key, src_platform
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
if not cleaned_path.startswith(src_root_clean):
|
||||
return None
|
||||
|
||||
subpath = cleaned_path[len(src_root_clean):]
|
||||
if dst_platform:
|
||||
# `dst_root_clean` is used from upper condition
|
||||
return dst_root_clean + subpath
|
||||
return self.clean_value + subpath
|
||||
|
||||
result, template = self.find_root_template_from_path(path)
|
||||
if not result:
|
||||
return None
|
||||
|
||||
if dst_platform:
|
||||
fill_data = {self.name: dst_root_clean}
|
||||
else:
|
||||
fill_data = {self.name: self.value}
|
||||
|
||||
return template.format(**{"root": fill_data})
|
||||
|
||||
def find_root_template_from_path(self, path):
|
||||
"""Replaces known root value with formattable key in path.
|
||||
|
||||
All platform values are checked for this replacement.
|
||||
|
||||
Args:
|
||||
path (str): Path where root value should be found.
|
||||
|
||||
Returns:
|
||||
tuple: Tuple contain 2 values: `success` (bool) and `path` (str).
|
||||
When success it True then path should contain replaced root
|
||||
value with formattable key.
|
||||
|
||||
Example:
|
||||
When input path is::
|
||||
"C:/windows/path/root/projects/my_project/file.ext"
|
||||
|
||||
And raw data of item looks like::
|
||||
{
|
||||
"windows": "C:/windows/path/root",
|
||||
"linux": "/mount/root"
|
||||
}
|
||||
|
||||
Output will be::
|
||||
(True, "{root}/projects/my_project/file.ext")
|
||||
|
||||
If any of raw data value wouldn't match path's root output is::
|
||||
(False, "C:/windows/path/root/projects/my_project/file.ext")
|
||||
"""
|
||||
result = False
|
||||
output = str(path)
|
||||
|
||||
mod_path = self._clean_path(path)
|
||||
for root_os, root_path in self.cleaned_data.items():
|
||||
# Skip empty paths
|
||||
if not root_path:
|
||||
continue
|
||||
|
||||
_mod_path = mod_path # reset to original cleaned value
|
||||
if root_os == "windows":
|
||||
root_path = root_path.lower()
|
||||
_mod_path = _mod_path.lower()
|
||||
|
||||
if _mod_path.startswith(root_path):
|
||||
result = True
|
||||
replacement = "{" + self.full_key + "}"
|
||||
output = replacement + mod_path[len(root_path):]
|
||||
break
|
||||
|
||||
return (result, output)
|
||||
|
||||
|
||||
class AnatomyRoots:
|
||||
"""Object which should be used for formatting "root" key in templates.
|
||||
|
||||
Args:
|
||||
anatomy (Anatomy): Anatomy object created for a specific project.
|
||||
"""
|
||||
|
||||
env_prefix = "AYON_PROJECT_ROOT"
|
||||
|
||||
def __init__(self, anatomy):
|
||||
self._log = None
|
||||
self._anatomy = anatomy
|
||||
self._loaded_project = None
|
||||
self._roots = None
|
||||
|
||||
def __format__(self, *args, **kwargs):
|
||||
return self.roots.__format__(*args, **kwargs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.roots[key]
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def anatomy(self):
|
||||
"""Parent Anatomy object.
|
||||
|
||||
Returns:
|
||||
Anatomy: Parent anatomy object.
|
||||
|
||||
"""
|
||||
return self._anatomy
|
||||
|
||||
def reset(self):
|
||||
"""Reset current roots value."""
|
||||
self._roots = None
|
||||
|
||||
def path_remapper(
|
||||
self, path, dst_platform=None, src_platform=None, roots=None
|
||||
):
|
||||
"""Remap path for specific platform.
|
||||
|
||||
Args:
|
||||
path (str): Source path which need to be remapped.
|
||||
dst_platform (Optional[str]): Specify destination platform
|
||||
for which remapping should happen.
|
||||
src_platform (Optional[str]): Specify source platform. This is
|
||||
recommended to not use and keep unset until you really want
|
||||
to use specific platform.
|
||||
roots (Optional[Union[dict, RootItem])): It is possible to remap
|
||||
path with different roots then instance where method was
|
||||
called has.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: When path does not contain known root then
|
||||
None is returned else returns remapped path with "{root}"
|
||||
or "{root[<name>]}".
|
||||
|
||||
"""
|
||||
if roots is None:
|
||||
roots = self.roots
|
||||
|
||||
if roots is None:
|
||||
raise ValueError("Roots are not set. Can't find path.")
|
||||
|
||||
if "{root" in path:
|
||||
path = path.format(**{"root": roots})
|
||||
# If `dst_platform` is not specified then return else continue.
|
||||
if not dst_platform:
|
||||
return path
|
||||
|
||||
if isinstance(roots, RootItem):
|
||||
return roots.path_remapper(path, dst_platform, src_platform)
|
||||
|
||||
for _root in roots.values():
|
||||
result = self.path_remapper(
|
||||
path, dst_platform, src_platform, _root
|
||||
)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
def find_root_template_from_path(self, path, roots=None):
|
||||
"""Find root value in entered path and replace it with formatting key.
|
||||
|
||||
Args:
|
||||
path (str): Source path where root will be searched.
|
||||
roots (Optional[Union[AnatomyRoots, dict]): It is possible to use
|
||||
different roots than instance where method was triggered has.
|
||||
|
||||
Returns:
|
||||
tuple: Output contains tuple with bool representing success as
|
||||
first value and path with or without replaced root with
|
||||
formatting key as second value.
|
||||
|
||||
Raises:
|
||||
ValueError: When roots are not entered and can't be loaded.
|
||||
"""
|
||||
if roots is None:
|
||||
self.log.debug(
|
||||
"Looking for matching root in path \"{}\".".format(path)
|
||||
)
|
||||
roots = self.roots
|
||||
|
||||
if roots is None:
|
||||
raise ValueError("Roots are not set. Can't find path.")
|
||||
|
||||
if isinstance(roots, RootItem):
|
||||
return roots.find_root_template_from_path(path)
|
||||
|
||||
for root_name, _root in roots.items():
|
||||
success, result = self.find_root_template_from_path(path, _root)
|
||||
if success:
|
||||
self.log.debug(
|
||||
"Found match in root \"{}\".".format(root_name)
|
||||
)
|
||||
return success, result
|
||||
|
||||
self.log.warning("No matching root was found in current setting.")
|
||||
return (False, path)
|
||||
|
||||
def set_root_environments(self):
|
||||
"""Set root environments for current project."""
|
||||
for key, value in self.root_environments().items():
|
||||
os.environ[key] = value
|
||||
|
||||
def root_environments(self):
|
||||
"""Use root keys to create unique keys for environment variables.
|
||||
|
||||
Concatenates prefix "AYON_PROJECT_ROOT_" with root keys to create
|
||||
unique keys.
|
||||
|
||||
Returns:
|
||||
dict: Result is `{(str): (str)}` dicitonary where key represents
|
||||
unique key concatenated by keys and value is root value of
|
||||
current platform root.
|
||||
|
||||
Example:
|
||||
With raw root values::
|
||||
"work": {
|
||||
"windows": "P:/projects/work",
|
||||
"linux": "/mnt/share/projects/work",
|
||||
"darwin": "/darwin/path/work"
|
||||
},
|
||||
"publish": {
|
||||
"windows": "P:/projects/publish",
|
||||
"linux": "/mnt/share/projects/publish",
|
||||
"darwin": "/darwin/path/publish"
|
||||
}
|
||||
|
||||
Result on windows platform::
|
||||
{
|
||||
"AYON_PROJECT_ROOT_WORK": "P:/projects/work",
|
||||
"AYON_PROJECT_ROOT_PUBLISH": "P:/projects/publish"
|
||||
}
|
||||
|
||||
"""
|
||||
return self._root_environments()
|
||||
|
||||
def all_root_paths(self, roots=None):
|
||||
"""Return all paths for all roots of all platforms."""
|
||||
if roots is None:
|
||||
roots = self.roots
|
||||
|
||||
output = []
|
||||
if isinstance(roots, RootItem):
|
||||
for value in roots.raw_data.values():
|
||||
output.append(value)
|
||||
return output
|
||||
|
||||
for _roots in roots.values():
|
||||
output.extend(self.all_root_paths(_roots))
|
||||
return output
|
||||
|
||||
def _root_environments(self, keys=None, roots=None):
|
||||
if not keys:
|
||||
keys = []
|
||||
if roots is None:
|
||||
roots = self.roots
|
||||
|
||||
if isinstance(roots, RootItem):
|
||||
key_items = [self.env_prefix]
|
||||
for _key in keys:
|
||||
key_items.append(_key.upper())
|
||||
|
||||
key = "_".join(key_items)
|
||||
# Make sure key and value does not contain unicode
|
||||
# - can happen in Python 2 hosts
|
||||
return {str(key): str(roots.value)}
|
||||
|
||||
output = {}
|
||||
for _key, _value in roots.items():
|
||||
_keys = list(keys)
|
||||
_keys.append(_key)
|
||||
output.update(self._root_environments(_keys, _value))
|
||||
return output
|
||||
|
||||
def root_environmets_fill_data(self, template=None):
|
||||
"""Environment variable values in dictionary for rootless path.
|
||||
|
||||
Args:
|
||||
template (str): Template for environment variable key fill.
|
||||
By default is set to `"${}"`.
|
||||
"""
|
||||
if template is None:
|
||||
template = "${}"
|
||||
return self._root_environmets_fill_data(template)
|
||||
|
||||
def _root_environmets_fill_data(self, template, keys=None, roots=None):
|
||||
if keys is None and roots is None:
|
||||
return {
|
||||
"root": self._root_environmets_fill_data(
|
||||
template, [], self.roots
|
||||
)
|
||||
}
|
||||
|
||||
if isinstance(roots, RootItem):
|
||||
key_items = [AnatomyRoots.env_prefix]
|
||||
for _key in keys:
|
||||
key_items.append(_key.upper())
|
||||
key = "_".join(key_items)
|
||||
return template.format(key)
|
||||
|
||||
output = {}
|
||||
for key, value in roots.items():
|
||||
_keys = list(keys)
|
||||
_keys.append(key)
|
||||
output[key] = self._root_environmets_fill_data(
|
||||
template, _keys, value
|
||||
)
|
||||
return output
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
"""Current project name which will be used for loading root values.
|
||||
|
||||
Returns:
|
||||
str: Project name.
|
||||
"""
|
||||
return self._anatomy.project_name
|
||||
|
||||
@property
|
||||
def roots(self):
|
||||
"""Property for filling "root" key in templates.
|
||||
|
||||
This property returns roots for current project or default root values.
|
||||
|
||||
Warning:
|
||||
Default roots value may cause issues when project use different
|
||||
roots settings. That may happen when project use multiroot
|
||||
templates but default roots miss their keys.
|
||||
|
||||
"""
|
||||
if self.project_name != self._loaded_project:
|
||||
self._roots = None
|
||||
|
||||
if self._roots is None:
|
||||
self._roots = self._discover()
|
||||
self._loaded_project = self.project_name
|
||||
return self._roots
|
||||
|
||||
def _discover(self):
|
||||
""" Loads current project's roots or default.
|
||||
|
||||
Default roots are loaded if project override's does not contain roots.
|
||||
|
||||
Returns:
|
||||
`RootItem` or `dict` with multiple `RootItem`s when multiroot
|
||||
setting is used.
|
||||
"""
|
||||
|
||||
return self._parse_dict(self._anatomy["roots"], self)
|
||||
|
||||
@staticmethod
|
||||
def _parse_dict(data, parent):
|
||||
"""Parse roots raw data into RootItem or dictionary with RootItems.
|
||||
|
||||
Converting raw roots data to `RootItem` helps to handle platform keys.
|
||||
This method is recursive to be able handle multiroot setup and
|
||||
is static to be able to load default roots without creating new object.
|
||||
|
||||
Args:
|
||||
data (dict): Should contain raw roots data to be parsed.
|
||||
parent (AnatomyRoots): Parent object set as parent
|
||||
for ``RootItem``.
|
||||
|
||||
Returns:
|
||||
dict[str, RootItem]: Root items by name.
|
||||
|
||||
"""
|
||||
output = {}
|
||||
for root_name, root_values in data.items():
|
||||
output[root_name] = RootItem(
|
||||
parent, root_values, root_name
|
||||
)
|
||||
return output
|
||||
891
client/ayon_core/pipeline/anatomy/templates.py
Normal file
891
client/ayon_core/pipeline/anatomy/templates.py
Normal file
|
|
@ -0,0 +1,891 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import collections
|
||||
import numbers
|
||||
|
||||
from ayon_core.lib.path_templates import (
|
||||
TemplateResult,
|
||||
StringTemplate,
|
||||
)
|
||||
|
||||
from .exceptions import (
|
||||
ProjectNotSet,
|
||||
TemplateMissingKey,
|
||||
AnatomyTemplateUnsolved,
|
||||
)
|
||||
from .roots import RootItem
|
||||
|
||||
_PLACEHOLDER = object()
|
||||
|
||||
|
||||
class AnatomyTemplateResult(TemplateResult):
|
||||
rootless = None
|
||||
|
||||
def __new__(cls, result, rootless_path):
|
||||
new_obj = super(AnatomyTemplateResult, cls).__new__(
|
||||
cls,
|
||||
str(result),
|
||||
result.template,
|
||||
result.solved,
|
||||
result.used_values,
|
||||
result.missing_keys,
|
||||
result.invalid_types
|
||||
)
|
||||
new_obj.rootless = rootless_path
|
||||
return new_obj
|
||||
|
||||
def validate(self):
|
||||
if not self.solved:
|
||||
raise AnatomyTemplateUnsolved(
|
||||
self.template,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
tmp = TemplateResult(
|
||||
str(self),
|
||||
self.template,
|
||||
self.solved,
|
||||
self.used_values,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
return self.__class__(tmp, self.rootless)
|
||||
|
||||
def normalized(self):
|
||||
"""Convert to normalized path."""
|
||||
|
||||
tmp = TemplateResult(
|
||||
os.path.normpath(self),
|
||||
self.template,
|
||||
self.solved,
|
||||
self.used_values,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
return self.__class__(tmp, self.rootless)
|
||||
|
||||
|
||||
class AnatomyStringTemplate(StringTemplate):
|
||||
"""String template which has access to anatomy.
|
||||
|
||||
Args:
|
||||
anatomy_templates (AnatomyTemplates): Anatomy templates object.
|
||||
template (str): Template string.
|
||||
"""
|
||||
|
||||
def __init__(self, anatomy_templates, template):
|
||||
self.anatomy_templates = anatomy_templates
|
||||
super(AnatomyStringTemplate, self).__init__(template)
|
||||
|
||||
def format(self, data):
|
||||
"""Format template and add 'root' key to data if not available.
|
||||
|
||||
Args:
|
||||
data (dict[str, Any]): Formatting data for template.
|
||||
|
||||
Returns:
|
||||
AnatomyTemplateResult: Formatting result.
|
||||
"""
|
||||
|
||||
anatomy_templates = self.anatomy_templates
|
||||
if not data.get("root"):
|
||||
data = copy.deepcopy(data)
|
||||
data["root"] = anatomy_templates.anatomy.roots
|
||||
result = StringTemplate.format(self, data)
|
||||
rootless_path = anatomy_templates.get_rootless_path_from_result(
|
||||
result
|
||||
)
|
||||
return AnatomyTemplateResult(result, rootless_path)
|
||||
|
||||
|
||||
def _merge_dict(main_dict, enhance_dict):
|
||||
"""Merges dictionaries by keys.
|
||||
|
||||
Function call itself if value on key is again dictionary.
|
||||
|
||||
Args:
|
||||
main_dict (dict): First dict to merge second one into.
|
||||
enhance_dict (dict): Second dict to be merged.
|
||||
|
||||
Returns:
|
||||
dict: Merged result.
|
||||
|
||||
.. note:: does not override whole value on first found key
|
||||
but only values differences from enhance_dict
|
||||
|
||||
"""
|
||||
|
||||
merge_queue = collections.deque()
|
||||
merge_queue.append((main_dict, enhance_dict))
|
||||
while merge_queue:
|
||||
queue_item = merge_queue.popleft()
|
||||
l_dict, r_dict = queue_item
|
||||
|
||||
for key, value in r_dict.items():
|
||||
if key not in l_dict:
|
||||
l_dict[key] = value
|
||||
elif isinstance(value, dict) and isinstance(l_dict[key], dict):
|
||||
merge_queue.append((l_dict[key], value))
|
||||
else:
|
||||
l_dict[key] = value
|
||||
return main_dict
|
||||
|
||||
|
||||
class TemplatesResultDict(dict):
|
||||
"""Holds and wrap 'AnatomyTemplateResult' for easy bug report.
|
||||
|
||||
Dictionary like object which holds 'AnatomyTemplateResult' in the same
|
||||
data structure as base dictionary of anatomy templates. It can raise
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, in_data, key=None, parent=None, strict=None):
|
||||
super(TemplatesResultDict, self).__init__()
|
||||
for _key, _value in in_data.items():
|
||||
if isinstance(_value, TemplatesResultDict):
|
||||
_value.parent = self
|
||||
elif isinstance(_value, dict):
|
||||
_value = self.__class__(_value, _key, self)
|
||||
self[_key] = _value
|
||||
|
||||
if strict is None and parent is None:
|
||||
strict = True
|
||||
|
||||
self.key = key
|
||||
self.parent = parent
|
||||
self._is_strict = strict
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key not in self.keys():
|
||||
hier = self.get_hierarchy()
|
||||
hier.append(key)
|
||||
raise TemplateMissingKey(hier)
|
||||
|
||||
value = super(TemplatesResultDict, self).__getitem__(key)
|
||||
if isinstance(value, self.__class__):
|
||||
return value
|
||||
|
||||
# Raise exception when expected solved templates and it is not.
|
||||
if self.is_strict and hasattr(value, "validate"):
|
||||
value.validate()
|
||||
return value
|
||||
|
||||
def get_is_strict(self):
|
||||
return self._is_strict
|
||||
|
||||
def set_is_strict(self, is_strict):
|
||||
if is_strict is None and self.parent is None:
|
||||
is_strict = True
|
||||
self._is_strict = is_strict
|
||||
for child in self.values():
|
||||
if isinstance(child, self.__class__):
|
||||
child.set_is_strict(is_strict)
|
||||
elif isinstance(child, AnatomyTemplateResult):
|
||||
child.strict = is_strict
|
||||
|
||||
strict = property(get_is_strict, set_is_strict)
|
||||
is_strict = property(get_is_strict, set_is_strict)
|
||||
|
||||
def get_hierarchy(self):
|
||||
"""Return dictionary keys one by one to root parent."""
|
||||
if self.key is None:
|
||||
return []
|
||||
|
||||
if self.parent is None:
|
||||
return [self.key]
|
||||
|
||||
par_hier = list(self.parent.get_hierarchy())
|
||||
par_hier.append(self.key)
|
||||
return par_hier
|
||||
|
||||
@property
|
||||
def missing_keys(self):
|
||||
"""Return missing keys of all children templates."""
|
||||
missing_keys = set()
|
||||
for value in self.values():
|
||||
missing_keys |= value.missing_keys
|
||||
return missing_keys
|
||||
|
||||
@property
|
||||
def invalid_types(self):
|
||||
"""Return invalid types of all children templates."""
|
||||
invalid_types = {}
|
||||
for value in self.values():
|
||||
invalid_types = _merge_dict(invalid_types, value.invalid_types)
|
||||
return invalid_types
|
||||
|
||||
@property
|
||||
def used_values(self):
|
||||
"""Return used values for all children templates."""
|
||||
used_values = {}
|
||||
for value in self.values():
|
||||
used_values = _merge_dict(used_values, value.used_values)
|
||||
return used_values
|
||||
|
||||
def get_solved(self):
|
||||
"""Get only solved key from templates."""
|
||||
result = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, self.__class__):
|
||||
value = value.get_solved()
|
||||
if not value:
|
||||
continue
|
||||
result[key] = value
|
||||
|
||||
elif (
|
||||
not hasattr(value, "solved") or
|
||||
value.solved
|
||||
):
|
||||
result[key] = value
|
||||
return self.__class__(result, key=self.key, parent=self.parent)
|
||||
|
||||
|
||||
class TemplateItem:
|
||||
"""Template item under template category.
|
||||
|
||||
This item data usually contains 'file' and 'directory' by anatomy
|
||||
definition, enhanced by common data ('frame_padding',
|
||||
'version_padding'). It adds 'path' key which is combination of
|
||||
'file' and 'directory' values.
|
||||
|
||||
Args:
|
||||
anatomy_templates (AnatomyTemplates): Anatomy templates object.
|
||||
template_data (dict[str, Any]): Templates data.
|
||||
|
||||
"""
|
||||
def __init__(self, anatomy_templates, template_data):
|
||||
template_data = copy.deepcopy(template_data)
|
||||
|
||||
# Backwards compatibility for 'folder'
|
||||
# TODO remove when deprecation not needed anymore
|
||||
if (
|
||||
"folder" not in template_data
|
||||
and "directory" in template_data
|
||||
):
|
||||
template_data["folder"] = template_data["directory"]
|
||||
|
||||
# Add 'path' key
|
||||
if (
|
||||
"path" not in template_data
|
||||
and "file" in template_data
|
||||
and "directory" in template_data
|
||||
):
|
||||
template_data["path"] = "/".join(
|
||||
(template_data["directory"], template_data["file"])
|
||||
)
|
||||
|
||||
for key, value in template_data.items():
|
||||
if isinstance(value, str):
|
||||
value = AnatomyStringTemplate(anatomy_templates, value)
|
||||
template_data[key] = value
|
||||
|
||||
self._template_data = template_data
|
||||
self._anatomy_templates = anatomy_templates
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._template_data[key]
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._template_data.get(key, default)
|
||||
|
||||
def format(self, data, strict=True):
|
||||
output = {}
|
||||
for key, value in self._template_data.items():
|
||||
if isinstance(value, AnatomyStringTemplate):
|
||||
value = value.format(data)
|
||||
output[key] = value
|
||||
return TemplatesResultDict(output, strict=strict)
|
||||
|
||||
|
||||
class TemplateCategory:
|
||||
"""Template category.
|
||||
|
||||
Template category groups template items for specific usage. Categories
|
||||
available at the moment are 'work', 'publish', 'hero', 'delivery',
|
||||
'staging' and 'others'.
|
||||
|
||||
Args:
|
||||
anatomy_templates (AnatomyTemplates): Anatomy templates object.
|
||||
category_name (str): Category name.
|
||||
category_data (dict[str, Any]): Category data.
|
||||
|
||||
"""
|
||||
def __init__(self, anatomy_templates, category_name, category_data):
|
||||
for key, value in category_data.items():
|
||||
if isinstance(value, dict):
|
||||
value = TemplateItem(anatomy_templates, value)
|
||||
elif isinstance(value, str):
|
||||
value = AnatomyStringTemplate(anatomy_templates, value)
|
||||
category_data[key] = value
|
||||
self._name = category_name
|
||||
self._name_prefix = "{}_".format(category_name)
|
||||
self._category_data = category_data
|
||||
|
||||
def __getitem__(self, key):
|
||||
new_key = self._convert_getter_key(key)
|
||||
return self._category_data[new_key]
|
||||
|
||||
def get(self, key, default=None):
|
||||
new_key = self._convert_getter_key(key)
|
||||
return self._category_data.get(new_key, default)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Category name.
|
||||
|
||||
Returns:
|
||||
str: Category name.
|
||||
|
||||
"""
|
||||
return self._name
|
||||
|
||||
def format(self, data, strict=True):
|
||||
output = {}
|
||||
for key, value in self._category_data.items():
|
||||
if isinstance(value, TemplateItem):
|
||||
value = value.format(data, strict)
|
||||
elif isinstance(value, AnatomyStringTemplate):
|
||||
value = value.format(data)
|
||||
|
||||
if isinstance(value, TemplatesResultDict):
|
||||
value.key = key
|
||||
output[key] = value
|
||||
return TemplatesResultDict(output, key=self.name, strict=strict)
|
||||
|
||||
def _convert_getter_key(self, key):
|
||||
"""Convert key for backwards compatibility.
|
||||
|
||||
OpenPype compatible settings did contain template keys prefixed by
|
||||
category name e.g. 'publish_render' which should be just 'render'.
|
||||
|
||||
This method keeps the backwards compatibility but only if the key
|
||||
starts with the category name prefix and the key is available in
|
||||
roots.
|
||||
|
||||
Args:
|
||||
key (str): Key to be converted.
|
||||
|
||||
Returns:
|
||||
str: Converted string.
|
||||
|
||||
"""
|
||||
if key in self._category_data:
|
||||
return key
|
||||
|
||||
# Use default when the key is the category name
|
||||
if key == self._name:
|
||||
return "default"
|
||||
|
||||
# Remove prefix if is key prefixed
|
||||
if key.startswith(self._name_prefix):
|
||||
new_key = key[len(self._name_prefix):]
|
||||
if new_key in self._category_data:
|
||||
return new_key
|
||||
return key
|
||||
|
||||
|
||||
class AnatomyTemplates:
|
||||
inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})")
|
||||
inner_key_name_pattern = re.compile(r"\{@(.*?[^{}0]*)\}")
|
||||
|
||||
def __init__(self, anatomy):
|
||||
self._anatomy = anatomy
|
||||
|
||||
self._loaded_project = None
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
self._validate_discovery()
|
||||
return self._objected_templates[key]
|
||||
|
||||
def get(self, key, default=None):
|
||||
self._validate_discovery()
|
||||
return self._objected_templates.get(key, default)
|
||||
|
||||
def keys(self):
|
||||
return self._objected_templates.keys()
|
||||
|
||||
def reset(self):
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
|
||||
@property
|
||||
def anatomy(self):
|
||||
"""Anatomy instance.
|
||||
|
||||
Returns:
|
||||
Anatomy: Anatomy instance.
|
||||
|
||||
"""
|
||||
return self._anatomy
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
"""Project name.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Project name if set, otherwise None.
|
||||
|
||||
"""
|
||||
return self._anatomy.project_name
|
||||
|
||||
@property
|
||||
def roots(self):
|
||||
"""Anatomy roots object.
|
||||
|
||||
Returns:
|
||||
RootItem: Anatomy roots data.
|
||||
|
||||
"""
|
||||
return self._anatomy.roots
|
||||
|
||||
@property
|
||||
def templates(self):
|
||||
"""Templates data.
|
||||
|
||||
Templates data with replaced common data.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Templates data.
|
||||
|
||||
"""
|
||||
self._validate_discovery()
|
||||
return self._templates
|
||||
|
||||
@property
|
||||
def frame_padding(self):
|
||||
"""Default frame padding.
|
||||
|
||||
Returns:
|
||||
int: Frame padding used by default in templates.
|
||||
|
||||
"""
|
||||
self._validate_discovery()
|
||||
return self["frame_padding"]
|
||||
|
||||
@property
|
||||
def version_padding(self):
|
||||
"""Default version padding.
|
||||
|
||||
Returns:
|
||||
int: Version padding used by default in templates.
|
||||
|
||||
"""
|
||||
self._validate_discovery()
|
||||
return self["version_padding"]
|
||||
|
||||
@classmethod
|
||||
def get_rootless_path_from_result(cls, result):
|
||||
"""Calculate rootless path from formatting result.
|
||||
|
||||
Args:
|
||||
result (TemplateResult): Result of StringTemplate formatting.
|
||||
|
||||
Returns:
|
||||
str: Rootless path if result contains one of anatomy roots.
|
||||
"""
|
||||
|
||||
used_values = result.used_values
|
||||
missing_keys = result.missing_keys
|
||||
template = result.template
|
||||
invalid_types = result.invalid_types
|
||||
if (
|
||||
"root" not in used_values
|
||||
or "root" in missing_keys
|
||||
or "{root" not in template
|
||||
):
|
||||
return
|
||||
|
||||
for invalid_type in invalid_types:
|
||||
if "root" in invalid_type:
|
||||
return
|
||||
|
||||
root_keys = cls._dict_to_subkeys_list({"root": used_values["root"]})
|
||||
if not root_keys:
|
||||
return
|
||||
|
||||
output = str(result)
|
||||
for used_root_keys in root_keys:
|
||||
if not used_root_keys:
|
||||
continue
|
||||
|
||||
used_value = used_values
|
||||
root_key = None
|
||||
for key in used_root_keys:
|
||||
used_value = used_value[key]
|
||||
if root_key is None:
|
||||
root_key = key
|
||||
else:
|
||||
root_key += "[{}]".format(key)
|
||||
|
||||
root_key = "{" + root_key + "}"
|
||||
output = output.replace(str(used_value), root_key)
|
||||
|
||||
return output
|
||||
|
||||
def format(self, data, strict=True):
|
||||
"""Fill all templates based on entered data.
|
||||
|
||||
Args:
|
||||
data (dict[str, Any]): Fill data used for template formatting.
|
||||
strict (Optional[bool]): Raise exception is accessed value is
|
||||
not fully filled.
|
||||
|
||||
Returns:
|
||||
TemplatesResultDict: Output `TemplateResult` have `strict`
|
||||
attribute set to False so accessing unfilled keys in templates
|
||||
won't raise any exceptions.
|
||||
|
||||
"""
|
||||
self._validate_discovery()
|
||||
copy_data = copy.deepcopy(data)
|
||||
roots = self._anatomy.roots
|
||||
if roots:
|
||||
copy_data["root"] = roots
|
||||
|
||||
return self._solve_dict(copy_data, strict)
|
||||
|
||||
def format_all(self, in_data):
|
||||
"""Fill all templates based on entered data.
|
||||
|
||||
Deprecated:
|
||||
Use `format` method with `strict=False` instead.
|
||||
|
||||
Args:
|
||||
in_data (dict): Containing keys to be filled into template.
|
||||
|
||||
Returns:
|
||||
TemplatesResultDict: Output `TemplateResult` have `strict`
|
||||
attribute set to False so accessing unfilled keys in templates
|
||||
won't raise any exceptions.
|
||||
|
||||
"""
|
||||
return self.format(in_data, strict=False)
|
||||
|
||||
def get_template_item(
|
||||
self, category_name, template_name, subkey=None, default=_PLACEHOLDER
|
||||
):
|
||||
"""Get template item from category.
|
||||
|
||||
Args:
|
||||
category_name (str): Category name.
|
||||
template_name (str): Template name.
|
||||
subkey (Optional[str]): Subkey name.
|
||||
default (Any): Default value if template is not found.
|
||||
|
||||
Returns:
|
||||
Any: Template item or subkey value.
|
||||
|
||||
Raises:
|
||||
KeyError: When any passed key is not available. Raise of error
|
||||
does not happen if 'default' is filled.
|
||||
|
||||
"""
|
||||
self._validate_discovery()
|
||||
category = self.get(category_name)
|
||||
if category is None:
|
||||
if default is not _PLACEHOLDER:
|
||||
return default
|
||||
raise KeyError("Category '{}' not found.".format(category_name))
|
||||
|
||||
template_item = category.get(template_name)
|
||||
if template_item is None:
|
||||
if default is not _PLACEHOLDER:
|
||||
return default
|
||||
raise KeyError(
|
||||
"Template '{}' not found in category '{}'.".format(
|
||||
template_name, category_name
|
||||
)
|
||||
)
|
||||
|
||||
if subkey is None:
|
||||
return template_item
|
||||
|
||||
item = template_item.get(subkey)
|
||||
if item is not None:
|
||||
return item
|
||||
|
||||
if default is not _PLACEHOLDER:
|
||||
return default
|
||||
raise KeyError(
|
||||
"Subkey '{}' not found in '{}/{}'.".format(
|
||||
subkey, category_name, template_name
|
||||
)
|
||||
)
|
||||
|
||||
def _solve_dict(self, data, strict):
|
||||
""" Solves templates with entered data.
|
||||
|
||||
Args:
|
||||
data (dict): Containing keys to be filled into template.
|
||||
|
||||
Returns:
|
||||
dict: With `TemplateResult` in values containing filled or
|
||||
partially filled templates.
|
||||
|
||||
"""
|
||||
output = {}
|
||||
for key, value in self._objected_templates.items():
|
||||
if isinstance(value, TemplateCategory):
|
||||
value = value.format(data, strict)
|
||||
elif isinstance(value, AnatomyStringTemplate):
|
||||
value = value.format(data)
|
||||
output[key] = value
|
||||
return TemplatesResultDict(output, strict=strict)
|
||||
|
||||
def _validate_discovery(self):
|
||||
"""Validate if templates are discovered and loaded for anatomy project.
|
||||
|
||||
When project changes the cached data are reset and discovered again.
|
||||
"""
|
||||
if self.project_name != self._loaded_project:
|
||||
self.reset()
|
||||
|
||||
if self._templates is None:
|
||||
self._discover()
|
||||
self._loaded_project = self.project_name
|
||||
|
||||
def _create_objected_templates(self, templates):
|
||||
"""Create objected templates from templates data.
|
||||
|
||||
Args:
|
||||
templates (dict[str, Any]): Templates data from project entity.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Values are cnmverted to template objects.
|
||||
|
||||
"""
|
||||
objected_templates = {}
|
||||
for category_name, category_value in copy.deepcopy(templates).items():
|
||||
if isinstance(category_value, dict):
|
||||
category_value = TemplateCategory(
|
||||
self, category_name, category_value
|
||||
)
|
||||
elif isinstance(category_value, str):
|
||||
category_value = AnatomyStringTemplate(self, category_value)
|
||||
objected_templates[category_name] = category_value
|
||||
return objected_templates
|
||||
|
||||
def _discover(self):
|
||||
"""Load and cache templates from project entity."""
|
||||
if self.project_name is None:
|
||||
raise ProjectNotSet("Anatomy project is not set.")
|
||||
|
||||
templates = self.anatomy["templates"]
|
||||
self._raw_templates = copy.deepcopy(templates)
|
||||
|
||||
templates = copy.deepcopy(templates)
|
||||
# Make sure all the keys are available
|
||||
for key in (
|
||||
"publish",
|
||||
"hero",
|
||||
"work",
|
||||
"delivery",
|
||||
"staging",
|
||||
"others",
|
||||
):
|
||||
templates.setdefault(key, {})
|
||||
|
||||
solved_templates = self._solve_template_inner_links(templates)
|
||||
self._templates = solved_templates
|
||||
self._objected_templates = self._create_objected_templates(
|
||||
solved_templates
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _replace_inner_keys(cls, matches, value, key_values, key):
|
||||
"""Replacement of inner keys in template values."""
|
||||
for match in matches:
|
||||
anatomy_sub_keys = (
|
||||
cls.inner_key_name_pattern.findall(match)
|
||||
)
|
||||
if key in anatomy_sub_keys:
|
||||
raise ValueError((
|
||||
"Unsolvable recursion in inner keys, "
|
||||
"key: \"{}\" is in his own value."
|
||||
" Can't determine source, please check Anatomy templates."
|
||||
).format(key))
|
||||
|
||||
for anatomy_sub_key in anatomy_sub_keys:
|
||||
replace_value = key_values.get(anatomy_sub_key)
|
||||
if replace_value is None:
|
||||
raise KeyError((
|
||||
"Anatomy templates can't be filled."
|
||||
" Anatomy key `{0}` has"
|
||||
" invalid inner key `{1}`."
|
||||
).format(key, anatomy_sub_key))
|
||||
|
||||
if not (
|
||||
isinstance(replace_value, numbers.Number)
|
||||
or isinstance(replace_value, str)
|
||||
):
|
||||
raise ValueError((
|
||||
"Anatomy templates can't be filled."
|
||||
" Anatomy key `{0}` has"
|
||||
" invalid inner key `{1}`"
|
||||
" with value `{2}`."
|
||||
).format(key, anatomy_sub_key, str(replace_value)))
|
||||
|
||||
value = value.replace(match, str(replace_value))
|
||||
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def _prepare_inner_keys(cls, key_values):
|
||||
"""Check values of inner keys.
|
||||
|
||||
Check if inner key exist in template group and has valid value.
|
||||
It is also required to avoid infinite loop with unsolvable recursion
|
||||
when first inner key's value refers to second inner key's value where
|
||||
first is used.
|
||||
"""
|
||||
keys_to_solve = set(key_values.keys())
|
||||
while True:
|
||||
found = False
|
||||
for key in tuple(keys_to_solve):
|
||||
value = key_values[key]
|
||||
|
||||
if isinstance(value, str):
|
||||
matches = cls.inner_key_pattern.findall(value)
|
||||
if not matches:
|
||||
keys_to_solve.remove(key)
|
||||
continue
|
||||
|
||||
found = True
|
||||
key_values[key] = cls._replace_inner_keys(
|
||||
matches, value, key_values, key
|
||||
)
|
||||
continue
|
||||
|
||||
elif not isinstance(value, dict):
|
||||
keys_to_solve.remove(key)
|
||||
continue
|
||||
|
||||
subdict_found = False
|
||||
for _key, _value in tuple(value.items()):
|
||||
matches = cls.inner_key_pattern.findall(_value)
|
||||
if not matches:
|
||||
continue
|
||||
|
||||
subdict_found = True
|
||||
found = True
|
||||
key_values[key][_key] = cls._replace_inner_keys(
|
||||
matches, _value, key_values,
|
||||
"{}.{}".format(key, _key)
|
||||
)
|
||||
|
||||
if not subdict_found:
|
||||
keys_to_solve.remove(key)
|
||||
|
||||
if not found:
|
||||
break
|
||||
|
||||
return key_values
|
||||
|
||||
@classmethod
|
||||
def _solve_template_inner_links(cls, templates):
|
||||
"""Solve templates inner keys identified by "{@*}".
|
||||
|
||||
Process is split into 2 parts.
|
||||
First is collecting all global keys (keys in top hierarchy where value
|
||||
is not dictionary). All global keys are set for all group keys (keys
|
||||
in top hierarchy where value is dictionary). Value of a key is not
|
||||
overridden in group if already contain value for the key.
|
||||
|
||||
In second part all keys with "at" symbol in value are replaced with
|
||||
value of the key afterward "at" symbol from the group.
|
||||
|
||||
Args:
|
||||
templates (dict): Raw templates data.
|
||||
|
||||
Example:
|
||||
templates::
|
||||
key_1: "value_1",
|
||||
key_2: "{@key_1}/{filling_key}"
|
||||
|
||||
group_1:
|
||||
key_3: "value_3/{@key_2}"
|
||||
|
||||
group_2:
|
||||
key_2": "value_2"
|
||||
key_4": "value_4/{@key_2}"
|
||||
|
||||
output::
|
||||
key_1: "value_1"
|
||||
key_2: "value_1/{filling_key}"
|
||||
|
||||
group_1: {
|
||||
key_1: "value_1"
|
||||
key_2: "value_1/{filling_key}"
|
||||
key_3: "value_3/value_1/{filling_key}"
|
||||
|
||||
group_2: {
|
||||
key_1: "value_1"
|
||||
key_2: "value_2"
|
||||
key_4: "value_3/value_2"
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: Solved templates data.
|
||||
|
||||
"""
|
||||
default_key_values = templates.pop("common", {})
|
||||
output = {}
|
||||
for category_name, category_value in templates.items():
|
||||
new_category_value = {}
|
||||
for key, value in category_value.items():
|
||||
key_values = copy.deepcopy(default_key_values)
|
||||
key_values.update(value)
|
||||
new_category_value[key] = cls._prepare_inner_keys(key_values)
|
||||
output[category_name] = new_category_value
|
||||
|
||||
default_keys_by_subkeys = cls._prepare_inner_keys(default_key_values)
|
||||
for key, value in default_keys_by_subkeys.items():
|
||||
output[key] = value
|
||||
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
def _dict_to_subkeys_list(cls, subdict):
|
||||
"""Convert dictionary to list of subkeys.
|
||||
|
||||
Example::
|
||||
|
||||
_dict_to_subkeys_list({
|
||||
"root": {
|
||||
"work": "path/to/work",
|
||||
"publish": "path/to/publish"
|
||||
}
|
||||
})
|
||||
[
|
||||
["root", "work"],
|
||||
["root", "publish"]
|
||||
]
|
||||
|
||||
|
||||
Args:
|
||||
dict[str, Any]: Dictionary to be converted.
|
||||
|
||||
Returns:
|
||||
list[list[str]]: List of subkeys.
|
||||
|
||||
"""
|
||||
output = []
|
||||
subkey_queue = collections.deque()
|
||||
subkey_queue.append((subdict, []))
|
||||
while subkey_queue:
|
||||
queue_item = subkey_queue.popleft()
|
||||
data, pre_keys = queue_item
|
||||
for key, value in data.items():
|
||||
result = list(pre_keys)
|
||||
result.append(key)
|
||||
if isinstance(value, dict):
|
||||
subkey_queue.append((value, result))
|
||||
else:
|
||||
output.append(result)
|
||||
return output
|
||||
|
|
@ -545,7 +545,7 @@ def get_workdir_from_session(session=None, template_key=None):
|
|||
)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
template_obj = anatomy.templates_obj[template_key]["folder"]
|
||||
template_obj = anatomy.get_template_item("work", template_key, "directory")
|
||||
path = template_obj.format_strict(template_data)
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ class LegacyCreator(object):
|
|||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
"""Apply OpenPype settings to a plugin class."""
|
||||
"""Apply AYON settings to a plugin class."""
|
||||
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
plugin_type = "create"
|
||||
|
|
|
|||
|
|
@ -77,8 +77,10 @@ def check_destination_path(
|
|||
"""
|
||||
|
||||
anatomy_data.update(datetime_data)
|
||||
anatomy_filled = anatomy.format_all(anatomy_data)
|
||||
dest_path = anatomy_filled["delivery"][template_name]
|
||||
path_template = anatomy.get_template_item(
|
||||
"delivery", template_name, "path"
|
||||
)
|
||||
dest_path = path_template.format(anatomy_data)
|
||||
report_items = collections.defaultdict(list)
|
||||
|
||||
if not dest_path.solved:
|
||||
|
|
@ -150,7 +152,9 @@ def deliver_single_file(
|
|||
if format_dict:
|
||||
anatomy_data = copy.deepcopy(anatomy_data)
|
||||
anatomy_data["root"] = format_dict["root"]
|
||||
template_obj = anatomy.templates_obj["delivery"][template_name]
|
||||
template_obj = anatomy.get_template_item(
|
||||
"delivery", template_name, "path"
|
||||
)
|
||||
delivery_path = template_obj.format_strict(anatomy_data)
|
||||
|
||||
# Backwards compatibility when extension contained `.`
|
||||
|
|
@ -220,8 +224,9 @@ def deliver_sequence(
|
|||
report_items["Source file was not found"].append(msg)
|
||||
return report_items, 0
|
||||
|
||||
delivery_templates = anatomy.templates.get("delivery") or {}
|
||||
delivery_template = delivery_templates.get(template_name)
|
||||
delivery_template = anatomy.get_template_item(
|
||||
"delivery", template_name, "path", default=None
|
||||
)
|
||||
if delivery_template is None:
|
||||
msg = (
|
||||
"Delivery template \"{}\" in anatomy of project \"{}\""
|
||||
|
|
@ -232,7 +237,7 @@ def deliver_sequence(
|
|||
|
||||
# Check if 'frame' key is available in template which is required
|
||||
# for sequence delivery
|
||||
if "{frame" not in delivery_template:
|
||||
if "{frame" not in delivery_template.template:
|
||||
msg = (
|
||||
"Delivery template \"{}\" in anatomy of project \"{}\""
|
||||
"does not contain '{{frame}}' key to fill. Delivery of sequence"
|
||||
|
|
@ -277,8 +282,7 @@ def deliver_sequence(
|
|||
anatomy_data["frame"] = frame_indicator
|
||||
if format_dict:
|
||||
anatomy_data["root"] = format_dict["root"]
|
||||
template_obj = anatomy.templates_obj["delivery"][template_name]
|
||||
delivery_path = template_obj.format_strict(anatomy_data)
|
||||
delivery_path = delivery_template.format_strict(anatomy_data)
|
||||
|
||||
delivery_path = os.path.normpath(delivery_path.replace("\\", "/"))
|
||||
delivery_folder = os.path.dirname(delivery_path)
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ def from_published_scene(instance, replace_in_path=True):
|
|||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
template_obj = anatomy.templates_obj["publish"]["path"]
|
||||
template_obj = anatomy.get_template_item("publish", "default", "path")
|
||||
template_filled = template_obj.format_strict(template_data)
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
# Publish
|
||||
OpenPype is using `pyblish` for publishing process which is a little bit extented and modified mainly for UI purposes. OpenPype's (new) publish UI does not allow to enable/disable instances or plugins that can be done during creation part. Also does support actions only for validators after validation exception.
|
||||
AYON is using `pyblish` for publishing process which is a little bit extented and modified mainly for UI purposes. OpenPype's (new) publish UI does not allow to enable/disable instances or plugins that can be done during creation part. Also does support actions only for validators after validation exception.
|
||||
|
||||
## Exceptions
|
||||
OpenPype define few specific exceptions that should be used in publish plugins.
|
||||
AYON define few specific exceptions that should be used in publish plugins.
|
||||
|
||||
### Validation exception
|
||||
Validation plugins should raise `PublishValidationError` to show to an artist what's wrong and give him actions to fix it. The exception says that error happened in plugin can be fixed by artist himself (with or without action on plugin). Any other errors will stop publishing immediately. Exception `PublishValidationError` raised after validation order has same effect as any other exception.
|
||||
|
|
@ -35,4 +35,4 @@ class MyExtendedPlugin(
|
|||
### Extensions
|
||||
Currently only extension is ability to define attributes for instances during creation. Method `get_attribute_defs` returns attribute definitions for families defined in plugin's `families` attribute if it's instance plugin or for whole context if it's context plugin. To convert existing values (or to remove legacy values) can be implemented `convert_attribute_values`. Values of publish attributes from created instance are never removed automatically so implementing of this method is best way to remove legacy data or convert them to new data structure.
|
||||
|
||||
Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`.
|
||||
Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`.
|
||||
|
|
|
|||
|
|
@ -6,6 +6,6 @@ ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
|
|||
ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2
|
||||
ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3
|
||||
|
||||
DEFAULT_PUBLISH_TEMPLATE = "publish"
|
||||
DEFAULT_HERO_PUBLISH_TEMPLATE = "hero"
|
||||
TRANSIENT_DIR_TEMPLATE = "transient"
|
||||
DEFAULT_PUBLISH_TEMPLATE = "default"
|
||||
DEFAULT_HERO_PUBLISH_TEMPLATE = "default"
|
||||
TRANSIENT_DIR_TEMPLATE = "default"
|
||||
|
|
|
|||
|
|
@ -742,29 +742,18 @@ def get_custom_staging_dir_info(
|
|||
anatomy = Anatomy(project_name)
|
||||
|
||||
template_name = profile["template_name"] or TRANSIENT_DIR_TEMPLATE
|
||||
_validate_transient_template(project_name, template_name, anatomy)
|
||||
|
||||
custom_staging_dir = anatomy.templates[template_name]["folder"]
|
||||
custom_staging_dir = anatomy.get_template_item(
|
||||
"staging", template_name, "directory", default=None
|
||||
)
|
||||
if custom_staging_dir is None:
|
||||
raise ValueError((
|
||||
"Anatomy of project \"{}\" does not have set"
|
||||
" \"{}\" template key!"
|
||||
).format(project_name, template_name))
|
||||
is_persistent = profile["custom_staging_dir_persistent"]
|
||||
|
||||
return custom_staging_dir, is_persistent
|
||||
|
||||
|
||||
def _validate_transient_template(project_name, template_name, anatomy):
|
||||
"""Check that transient template is correctly configured.
|
||||
|
||||
Raises:
|
||||
ValueError - if misconfigured template
|
||||
"""
|
||||
if template_name not in anatomy.templates:
|
||||
raise ValueError(("Anatomy of project \"{}\" does not have set"
|
||||
" \"{}\" template key!"
|
||||
).format(project_name, template_name))
|
||||
|
||||
if "folder" not in anatomy.templates[template_name]:
|
||||
raise ValueError(("There is not set \"folder\" template in \"{}\" anatomy" # noqa
|
||||
" for project \"{}\"."
|
||||
).format(template_name, project_name))
|
||||
return custom_staging_dir.template, is_persistent
|
||||
|
||||
|
||||
def get_published_workfile_instance(context):
|
||||
|
|
@ -815,9 +804,9 @@ def replace_with_published_scene_path(instance, replace_in_path=True):
|
|||
template_data["ext"] = rep.get("ext")
|
||||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
template = anatomy.get_template_item("publish", "default", "path")
|
||||
template_filled = template.format_strict(template_data)
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
log.info("Using published scene for render {}".format(file_path))
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue