mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'feature/664-3-0-lib-refactor' into develop
This commit is contained in:
commit
2bdc124644
197 changed files with 406314 additions and 2679 deletions
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
* text=auto
|
||||
*.js eol=lf
|
||||
*.c eol=lf
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
pype.aport package
|
||||
==================
|
||||
|
||||
.. automodule:: pype.aport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
pype.aport.api module
|
||||
---------------------
|
||||
|
||||
.. automodule:: pype.aport.api
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
|
|
@ -11,7 +11,6 @@ Subpackages
|
|||
|
||||
.. toctree::
|
||||
|
||||
pype.aport
|
||||
pype.avalon_apps
|
||||
pype.clockify
|
||||
pype.ftrack
|
||||
|
|
|
|||
|
|
@ -39,13 +39,9 @@ from .action import (
|
|||
from .lib import (
|
||||
version_up,
|
||||
get_asset,
|
||||
get_project,
|
||||
get_hierarchy,
|
||||
get_subsets,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment,
|
||||
source_hash,
|
||||
get_latest_version
|
||||
)
|
||||
|
|
@ -88,14 +84,10 @@ __all__ = [
|
|||
|
||||
# get contextual data
|
||||
"version_up",
|
||||
"get_project",
|
||||
"get_hierarchy",
|
||||
"get_asset",
|
||||
"get_subsets",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
"source_hash",
|
||||
|
||||
"subprocess",
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
import os
|
||||
import shutil
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import (
|
||||
Anatomy,
|
||||
Logger
|
||||
)
|
||||
import platform
|
||||
import pype.lib
|
||||
from pype.api import Anatomy, Logger
|
||||
import getpass
|
||||
import avalon.api
|
||||
|
||||
|
||||
class TvpaintPrelaunchHook(PypeHook):
|
||||
class TvpaintPrelaunchHook(pype.lib.PypeHook):
|
||||
"""
|
||||
Workfile preparation hook
|
||||
"""
|
||||
|
|
@ -23,10 +21,22 @@ class TvpaintPrelaunchHook(PypeHook):
|
|||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def install_pywin(self):
|
||||
if platform.system().lower() != "windows":
|
||||
return
|
||||
|
||||
try:
|
||||
from win32com.shell import shell
|
||||
except Exception:
|
||||
output = pype.lib._subprocess(["pip", "install", "pywin32==227"])
|
||||
self.log.info(output)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
self.install_pywin()
|
||||
|
||||
# get context variables
|
||||
project_name = env["AVALON_PROJECT"]
|
||||
asset_name = env["AVALON_ASSET"]
|
||||
|
|
|
|||
74
pype/hosts/aftereffects/__init__.py
Normal file
74
pype/hosts/aftereffects/__init__.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from avalon import api, io
|
||||
from avalon.vendor import Qt
|
||||
from pype import lib
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def check_inventory():
|
||||
if not lib.any_outdated():
|
||||
return
|
||||
|
||||
host = api.registered_host()
|
||||
outdated_containers = []
|
||||
for container in host.ls():
|
||||
representation = container['representation']
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
"_id": io.ObjectId(representation),
|
||||
"type": "representation"
|
||||
},
|
||||
projection={"parent": True}
|
||||
)
|
||||
if representation_doc and not lib.is_latest(representation_doc):
|
||||
outdated_containers.append(container)
|
||||
|
||||
# Warn about outdated containers.
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
# Garbage collect QApplication.
|
||||
del app
|
||||
|
||||
|
||||
def application_launch():
|
||||
check_inventory()
|
||||
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
|
||||
plugins_directory = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
|
||||
"plugins",
|
||||
"aftereffects"
|
||||
)
|
||||
|
||||
pyblish.api.register_plugin_path(
|
||||
os.path.join(plugins_directory, "publish")
|
||||
)
|
||||
api.register_plugin_path(
|
||||
api.Loader, os.path.join(plugins_directory, "load")
|
||||
)
|
||||
api.register_plugin_path(
|
||||
api.Creator, os.path.join(plugins_directory, "create")
|
||||
)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
api.on("application.launched", application_launch)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle layer visibility on instance toggles."""
|
||||
instance[0].Visible = new_value
|
||||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
|
||||
from avalon.vendor.Qt import QtGui
|
||||
import avalon.fusion
|
||||
|
||||
from avalon import io
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._project = None
|
||||
|
|
@ -59,3 +59,84 @@ def get_additional_data(container):
|
|||
return {"color": QtGui.QColor.fromRgbF(tile_color["R"],
|
||||
tile_color["G"],
|
||||
tile_color["B"])}
|
||||
|
||||
|
||||
def switch_item(container,
|
||||
asset_name=None,
|
||||
subset_name=None,
|
||||
representation_name=None):
|
||||
"""Switch container asset, subset or representation of a container by name.
|
||||
|
||||
It'll always switch to the latest version - of course a different
|
||||
approach could be implemented.
|
||||
|
||||
Args:
|
||||
container (dict): data of the item to switch with
|
||||
asset_name (str): name of the asset
|
||||
subset_name (str): name of the subset
|
||||
representation_name (str): name of the representation
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
|
||||
if all(not x for x in [asset_name, subset_name, representation_name]):
|
||||
raise ValueError("Must have at least one change provided to switch.")
|
||||
|
||||
# Collect any of current asset, subset and representation if not provided
|
||||
# so we can use the original name from those.
|
||||
if any(not x for x in [asset_name, subset_name, representation_name]):
|
||||
_id = io.ObjectId(container["representation"])
|
||||
representation = io.find_one({"type": "representation", "_id": _id})
|
||||
version, subset, asset, project = io.parenthood(representation)
|
||||
|
||||
if asset_name is None:
|
||||
asset_name = asset["name"]
|
||||
|
||||
if subset_name is None:
|
||||
subset_name = subset["name"]
|
||||
|
||||
if representation_name is None:
|
||||
representation_name = representation["name"]
|
||||
|
||||
# Find the new one
|
||||
asset = io.find_one({
|
||||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
assert asset, ("Could not find asset in the database with the name "
|
||||
"'%s'" % asset_name)
|
||||
|
||||
subset = io.find_one({
|
||||
"name": subset_name,
|
||||
"type": "subset",
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
assert subset, ("Could not find subset in the database with the name "
|
||||
"'%s'" % subset_name)
|
||||
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[('name', -1)]
|
||||
)
|
||||
|
||||
assert version, "Could not find a version for {}.{}".format(
|
||||
asset_name, subset_name
|
||||
)
|
||||
|
||||
representation = io.find_one({
|
||||
"name": representation_name,
|
||||
"type": "representation",
|
||||
"parent": version["_id"]}
|
||||
)
|
||||
|
||||
assert representation, ("Could not find representation in the database "
|
||||
"with the name '%s'" % representation_name)
|
||||
|
||||
avalon.api.switch(container, representation)
|
||||
|
||||
return representation
|
||||
|
|
|
|||
|
|
@ -234,7 +234,7 @@ def switch(asset_name, filepath=None, new=True):
|
|||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = pype.switch_item(
|
||||
representation = fusion_lib.switch_item(
|
||||
container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
|
|
|
|||
|
|
@ -1,56 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pype Harmony Host implementation."""
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from avalon import api, io, harmony
|
||||
from avalon.vendor import Qt
|
||||
import avalon.tools.sceneinventory
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from pype import lib
|
||||
from pype.api import config
|
||||
|
||||
|
||||
def set_scene_settings(settings):
|
||||
"""Set correct scene settings in Harmony.
|
||||
|
||||
signature = harmony.signature("set_scene_settings")
|
||||
func = """function %s(args)
|
||||
{
|
||||
if (args[0]["fps"])
|
||||
{
|
||||
scene.setFrameRate(args[0]["fps"]);
|
||||
}
|
||||
if (args[0]["frameStart"] && args[0]["frameEnd"])
|
||||
{
|
||||
var duration = args[0]["frameEnd"] - args[0]["frameStart"] + 1
|
||||
Args:
|
||||
settings (dict): Scene settings.
|
||||
|
||||
if (frame.numberOf() < duration)
|
||||
{
|
||||
frame.insert(
|
||||
duration, duration - frame.numberOf()
|
||||
);
|
||||
}
|
||||
Returns:
|
||||
dict: Dictionary of settings to set.
|
||||
|
||||
scene.setStartFrame(1);
|
||||
scene.setStopFrame(duration);
|
||||
}
|
||||
if (args[0]["resolutionWidth"] && args[0]["resolutionHeight"])
|
||||
{
|
||||
scene.setDefaultResolution(
|
||||
args[0]["resolutionWidth"], args[0]["resolutionHeight"], 41.112
|
||||
)
|
||||
}
|
||||
}
|
||||
%s
|
||||
""" % (signature, signature)
|
||||
harmony.send({"function": func, "args": [settings]})
|
||||
"""
|
||||
harmony.send(
|
||||
{"function": "PypeHarmony.setSceneSettings", "args": settings})
|
||||
|
||||
|
||||
def get_asset_settings():
|
||||
"""Get settings on current asset from database.
|
||||
|
||||
Returns:
|
||||
dict: Scene data.
|
||||
|
||||
"""
|
||||
asset_data = lib.get_asset()["data"]
|
||||
fps = asset_data.get("fps")
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
resolution_width = asset_data.get("resolutionWidth")
|
||||
resolution_height = asset_data.get("resolutionHeight")
|
||||
entity_type = asset_data.get("entityType")
|
||||
|
||||
scene_data = {
|
||||
"fps": fps,
|
||||
|
|
@ -63,17 +52,25 @@ def get_asset_settings():
|
|||
try:
|
||||
skip_resolution_check = \
|
||||
config.get_presets()["harmony"]["general"]["skip_resolution_check"]
|
||||
skip_timelines_check = \
|
||||
config.get_presets()["harmony"]["general"]["skip_timelines_check"]
|
||||
except KeyError:
|
||||
skip_resolution_check = []
|
||||
skip_timelines_check = []
|
||||
|
||||
if os.getenv('AVALON_TASK') in skip_resolution_check:
|
||||
scene_data.pop("resolutionWidth")
|
||||
scene_data.pop("resolutionHeight")
|
||||
|
||||
if entity_type in skip_timelines_check:
|
||||
scene_data.pop('frameStart', None)
|
||||
scene_data.pop('frameEnd', None)
|
||||
|
||||
return scene_data
|
||||
|
||||
|
||||
def ensure_scene_settings():
|
||||
"""Validate if Harmony scene has valid settings."""
|
||||
settings = get_asset_settings()
|
||||
|
||||
invalid_settings = []
|
||||
|
|
@ -86,23 +83,22 @@ def ensure_scene_settings():
|
|||
|
||||
# Warn about missing attributes.
|
||||
if invalid_settings:
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication.instance()
|
||||
if not app:
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "Missing attributes:"
|
||||
for item in invalid_settings:
|
||||
msg += f"\n{item}"
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
harmony.send(
|
||||
{"function": "PypeHarmony.message", "args": msg})
|
||||
|
||||
set_scene_settings(valid_settings)
|
||||
|
||||
|
||||
def check_inventory():
|
||||
"""Check is scene contains outdated containers.
|
||||
|
||||
If it does it will colorize outdated nodes and display warning message
|
||||
in Harmony.
|
||||
"""
|
||||
if not lib.any_outdated():
|
||||
return
|
||||
|
||||
|
|
@ -121,89 +117,51 @@ def check_inventory():
|
|||
outdated_containers.append(container)
|
||||
|
||||
# Colour nodes.
|
||||
sig = harmony.signature("set_color")
|
||||
func = """function %s(args){
|
||||
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
node.setColor(args[0][i], red_color);
|
||||
}
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
outdated_nodes = []
|
||||
for container in outdated_containers:
|
||||
if container["loader"] == "ImageSequenceLoader":
|
||||
outdated_nodes.append(
|
||||
harmony.find_node_by_name(container["name"], "READ")
|
||||
)
|
||||
harmony.send({"function": func, "args": [outdated_nodes]})
|
||||
harmony.send({"function": "PypeHarmony.setColor", "args": outdated_nodes})
|
||||
|
||||
# Warn about outdated containers.
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
# Garbage collect QApplication.
|
||||
del app
|
||||
harmony.send({"function": "PypeHarmony.message", "args": msg})
|
||||
|
||||
|
||||
def application_launch():
|
||||
"""Event that is executed after Harmony is launched."""
|
||||
# FIXME: This is breaking server <-> client communication.
|
||||
# It is now moved so it it manually called.
|
||||
# ensure_scene_settings()
|
||||
# check_inventory()
|
||||
pass
|
||||
pype_harmony_path = Path(__file__).parent / "js" / "PypeHarmony.js"
|
||||
pype_harmony_js = pype_harmony_path.read_text()
|
||||
|
||||
# go through js/creators, loaders and publish folders and load all scripts
|
||||
script = ""
|
||||
for item in ["creators", "loaders", "publish"]:
|
||||
dir_to_scan = Path(__file__).parent / "js" / item
|
||||
for child in dir_to_scan.iterdir():
|
||||
script += child.read_text()
|
||||
|
||||
# send scripts to Harmony
|
||||
harmony.send({"script": pype_harmony_js})
|
||||
harmony.send({"script": script})
|
||||
|
||||
|
||||
def export_template(backdrops, nodes, filepath):
|
||||
"""Export Template to file.
|
||||
|
||||
sig = harmony.signature("set_color")
|
||||
func = """function %s(args)
|
||||
{
|
||||
Args:
|
||||
backdrops (list): List of backdrops to export.
|
||||
nodes (list): List of nodes to export.
|
||||
filepath (str): Path where to save Template.
|
||||
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
var template_group = node.createGroup(temp_node, "temp_group");
|
||||
node.deleteNode( template_group + "/temp_note" );
|
||||
|
||||
selection.clearSelection();
|
||||
for (var f = 0; f < args[1].length; f++)
|
||||
{
|
||||
selection.addNodeToSelection(args[1][f]);
|
||||
}
|
||||
|
||||
Action.perform("copy()", "Node View");
|
||||
|
||||
selection.clearSelection();
|
||||
selection.addNodeToSelection(template_group);
|
||||
Action.perform("onActionEnterGroup()", "Node View");
|
||||
Action.perform("paste()", "Node View");
|
||||
|
||||
// Recreate backdrops in group.
|
||||
for (var i = 0 ; i < args[0].length; i++)
|
||||
{
|
||||
MessageLog.trace(args[0][i]);
|
||||
Backdrop.addBackdrop(template_group, args[0][i]);
|
||||
};
|
||||
|
||||
Action.perform( "selectAll()", "Node View" );
|
||||
copyPaste.createTemplateFromSelection(args[2], args[3]);
|
||||
|
||||
// Unfocus the group in Node view, delete all nodes and backdrops
|
||||
// created during the process.
|
||||
Action.perform("onActionUpToParent()", "Node View");
|
||||
node.deleteNode(template_group, true, true);
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
"""
|
||||
harmony.send({
|
||||
"function": func,
|
||||
"function": "PypeHarmony.exportTemplate",
|
||||
"args": [
|
||||
backdrops,
|
||||
nodes,
|
||||
|
|
@ -214,7 +172,8 @@ def export_template(backdrops, nodes, filepath):
|
|||
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
"""Install Pype as host config."""
|
||||
print("Installing Pype config ...")
|
||||
|
||||
plugins_directory = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
|
||||
|
|
@ -242,17 +201,12 @@ def install():
|
|||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node enabling on instance toggles."""
|
||||
|
||||
sig = harmony.signature("enable_node")
|
||||
func = """function %s(args)
|
||||
{
|
||||
node.setEnable(args[0], args[1])
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
try:
|
||||
harmony.send(
|
||||
{"function": func, "args": [instance[0], new_value]}
|
||||
{
|
||||
"function": "PypeHarmony.toggleInstance",
|
||||
"args": [instance[0], new_value]
|
||||
}
|
||||
)
|
||||
except IndexError:
|
||||
print(f"Instance '{instance}' is missing node")
|
||||
|
|
|
|||
117
pype/hosts/harmony/js/.eslintrc.json
Normal file
117
pype/hosts/harmony/js/.eslintrc.json
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
{
|
||||
"env": {
|
||||
"browser": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 3
|
||||
},
|
||||
"rules": {
|
||||
"indent": [
|
||||
"error",
|
||||
4
|
||||
],
|
||||
"linebreak-style": [
|
||||
"error",
|
||||
"unix"
|
||||
],
|
||||
"quotes": [
|
||||
"error",
|
||||
"single"
|
||||
],
|
||||
"semi": [
|
||||
"error",
|
||||
"always"
|
||||
]
|
||||
},
|
||||
"globals": {
|
||||
"$": "readonly",
|
||||
"Action": "readonly",
|
||||
"Backdrop": "readonly",
|
||||
"Button": "readonly",
|
||||
"Cel": "readonly",
|
||||
"Cel3d": "readonly",
|
||||
"CheckBox": "readonly",
|
||||
"ColorRGBA": "readonly",
|
||||
"ComboBox": "readonly",
|
||||
"DateEdit": "readonly",
|
||||
"DateEditEnum": "readonly",
|
||||
"Dialog": "readonly",
|
||||
"Dir": "readonly",
|
||||
"DirSpec": "readonly",
|
||||
"Drawing": "readonly",
|
||||
"DrawingToolParams": "readonly",
|
||||
"DrawingTools": "readonly",
|
||||
"EnvelopeCreator": "readonly",
|
||||
"ExportVideoDlg": "readonly",
|
||||
"File": "readonly",
|
||||
"FileAccess": "readonly",
|
||||
"FileDialog": "readonly",
|
||||
"GroupBox": "readonly",
|
||||
"ImportDrawingDlg": "readonly",
|
||||
"Input": "readonly",
|
||||
"KeyModifiers": "readonly",
|
||||
"Label": "readonly",
|
||||
"LayoutExports": "readonly",
|
||||
"LayoutExportsParams": "readonly",
|
||||
"LineEdit": "readonly",
|
||||
"Matrix4x4": "readonly",
|
||||
"MessageBox": "readonly",
|
||||
"MessageLog": "readonly",
|
||||
"Model3d": "readonly",
|
||||
"MovieImport": "readonly",
|
||||
"NumberEdit": "readonly",
|
||||
"PaletteManager": "readonly",
|
||||
"PaletteObjectManager": "readonly",
|
||||
"PermanentFile": "readonly",
|
||||
"Point2d": "readonly",
|
||||
"Point3d": "readonly",
|
||||
"Process": "readonly",
|
||||
"Process2": "readonly",
|
||||
"Quaternion": "readonly",
|
||||
"QuicktimeExporter": "readonly",
|
||||
"RadioButton": "readonly",
|
||||
"RemoteCmd": "readonly",
|
||||
"Scene": "readonly",
|
||||
"Settings": "readonly",
|
||||
"Slider": "readonly",
|
||||
"SpinBox": "readonly",
|
||||
"SubnodeData": "readonly",
|
||||
"System": "readonly",
|
||||
"TemporaryFile": "readonly",
|
||||
"TextEdit": "readonly",
|
||||
"TimeEdit": "readonly",
|
||||
"Timeline": "readonly",
|
||||
"ToolProperties": "readonly",
|
||||
"UiLoader": "readonly",
|
||||
"Vector2d": "readonly",
|
||||
"Vector3d": "readonly",
|
||||
"WebCCExporter": "readonly",
|
||||
"Workspaces": "readonly",
|
||||
"__scriptManager__": "readonly",
|
||||
"__temporaryFileContext__": "readonly",
|
||||
"about": "readonly",
|
||||
"column": "readonly",
|
||||
"compositionOrder": "readonly",
|
||||
"copyPaste": "readonly",
|
||||
"deformation": "readonly",
|
||||
"drawingExport": "readonly",
|
||||
"element": "readonly",
|
||||
"exporter": "readonly",
|
||||
"fileMapper": "readonly",
|
||||
"frame": "readonly",
|
||||
"func": "readonly",
|
||||
"library": "readonly",
|
||||
"node": "readonly",
|
||||
"preferences": "readonly",
|
||||
"render": "readonly",
|
||||
"scene": "readonly",
|
||||
"selection": "readonly",
|
||||
"sound": "readonly",
|
||||
"specialFolders": "readonly",
|
||||
"translator": "readonly",
|
||||
"view": "readonly",
|
||||
"waypoint": "readonly",
|
||||
"xsheet": "readonly"
|
||||
}
|
||||
}
|
||||
197
pype/hosts/harmony/js/PypeHarmony.js
Normal file
197
pype/hosts/harmony/js/PypeHarmony.js
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
// ***************************************************************************
|
||||
// * Pype Harmony Host *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc PypeHarmony encapsulate all Pype related functions.
|
||||
* @property {Object} loaders Namespace for Loaders JS code.
|
||||
* @property {Object} Creators Namespace for Creators JS code.
|
||||
* @property {Object} Publish Namespace for Publish plugins JS code.
|
||||
*/
|
||||
var PypeHarmony = {
|
||||
Loaders: {},
|
||||
Creators: {},
|
||||
Publish: {}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Show message in Harmony.
|
||||
* @function
|
||||
* @param {string} message Argument containing message.
|
||||
*/
|
||||
PypeHarmony.message = function(message) {
|
||||
MessageBox.information(message);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Set scene setting based on shot/asset settngs.
|
||||
* @function
|
||||
* @param {obj} settings Scene settings.
|
||||
*/
|
||||
PypeHarmony.setSceneSettings = function(settings) {
|
||||
if (settings.fps) {
|
||||
scene.setFrameRate(settings.fps);
|
||||
}
|
||||
|
||||
if (settings.frameStart && settings.frameEnd) {
|
||||
var duration = settings.frameEnd - settings.frameStart + 1;
|
||||
|
||||
if (frame.numberOf() > duration) {
|
||||
frame.remove(duration, frame.numberOf() - duration);
|
||||
}
|
||||
|
||||
if (frame.numberOf() < duration) {
|
||||
frame.insert(duration, duration - frame.numberOf());
|
||||
}
|
||||
|
||||
scene.setStartFrame(1);
|
||||
scene.setStopFrame(duration);
|
||||
}
|
||||
if (settings.resolutionWidth && settings.resolutionHeight) {
|
||||
scene.setDefaultResolution(
|
||||
settings.resolutionWidth, settings.resolutionHeight, 41.112
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get scene settings.
|
||||
* @function
|
||||
* @return {array} Scene settings.
|
||||
*/
|
||||
PypeHarmony.getSceneSettings = function() {
|
||||
return [
|
||||
about.getApplicationPath(),
|
||||
scene.currentProjectPath(),
|
||||
scene.currentScene(),
|
||||
scene.getFrameRate(),
|
||||
scene.getStartFrame(),
|
||||
scene.getStopFrame(),
|
||||
sound.getSoundtrackAll().path(),
|
||||
scene.defaultResolutionX(),
|
||||
scene.defaultResolutionY()
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Set color of nodes.
|
||||
* @function
|
||||
* @param {array} nodes List of nodes.
|
||||
* @param {array} rgba array of RGBA components of color.
|
||||
*/
|
||||
PypeHarmony.setColor = function(nodes, rgba) {
|
||||
for (var i =0; i <= nodes.length - 1; ++i) {
|
||||
var color = PypeHarmony.color(rgba);
|
||||
node.setColor(nodes[i], color);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Extract Template into file.
|
||||
* @function
|
||||
* @param {array} args Arguments for template extraction.
|
||||
*
|
||||
* @example
|
||||
* // arguments are in this order:
|
||||
* var args = [backdrops, nodes, templateFilename, templateDir];
|
||||
*
|
||||
*/
|
||||
PypeHarmony.exportTemplate = function(args) {
|
||||
var tempNode = node.add('Top', 'temp_note', 'NOTE', 0, 0, 0);
|
||||
var templateGroup = node.createGroup(tempNode, 'temp_group');
|
||||
node.deleteNode( templateGroup + '/temp_note' );
|
||||
|
||||
selection.clearSelection();
|
||||
for (var f = 0; f < args[1].length; f++) {
|
||||
selection.addNodeToSelection(args[1][f]);
|
||||
}
|
||||
|
||||
Action.perform('copy()', 'Node View');
|
||||
|
||||
selection.clearSelection();
|
||||
selection.addNodeToSelection(templateGroup);
|
||||
Action.perform('onActionEnterGroup()', 'Node View');
|
||||
Action.perform('paste()', 'Node View');
|
||||
|
||||
// Recreate backdrops in group.
|
||||
for (var i = 0; i < args[0].length; i++) {
|
||||
MessageLog.trace(args[0][i]);
|
||||
Backdrop.addBackdrop(templateGroup, args[0][i]);
|
||||
}
|
||||
|
||||
Action.perform('selectAll()', 'Node View' );
|
||||
copyPaste.createTemplateFromSelection(args[2], args[3]);
|
||||
|
||||
// Unfocus the group in Node view, delete all nodes and backdrops
|
||||
// created during the process.
|
||||
Action.perform('onActionUpToParent()', 'Node View');
|
||||
node.deleteNode(templateGroup, true, true);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Toggle instance in Harmony.
|
||||
* @function
|
||||
* @param {array} args Instance name and value.
|
||||
*/
|
||||
PypeHarmony.toggleInstance = function(args) {
|
||||
node.setEnable(args[0], args[1]);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Delete node in Harmony.
|
||||
* @function
|
||||
* @param {string} _node Node name.
|
||||
*/
|
||||
PypeHarmony.deleteNode = function(_node) {
|
||||
node.deleteNode(_node, true, true);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Copy file.
|
||||
* @function
|
||||
* @param {string} src Source file name.
|
||||
* @param {string} dst Destination file name.
|
||||
*/
|
||||
PypeHarmony.copyFile = function(src, dst) {
|
||||
var srcFile = new PermanentFile(src);
|
||||
var dstFile = new PermanentFile(dst);
|
||||
srcFile.copy(dstFile);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* create RGBA color from array.
|
||||
* @function
|
||||
* @param {array} rgba array of rgba values.
|
||||
* @return {ColorRGBA} ColorRGBA Harmony class.
|
||||
*/
|
||||
PypeHarmony.color = function(rgba) {
|
||||
return new ColorRGBA(rgba[0], rgba[1], rgba[2], rgba[3]);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* get all dependencies for given node.
|
||||
* @function
|
||||
* @param {string} node node path.
|
||||
* @return {array} List of dependent nodes.
|
||||
*/
|
||||
PypeHarmony.getDependencies = function(node) {
|
||||
var target_node = node;
|
||||
var numInput = node.numberOfInputPorts(target_node);
|
||||
var dependencies = [];
|
||||
for (var i = 0 ; i < numInput; i++) {
|
||||
dependencies.push(node.srcNode(target_node, i));
|
||||
}
|
||||
return dependencies;
|
||||
};
|
||||
15
pype/hosts/harmony/js/README.md
Normal file
15
pype/hosts/harmony/js/README.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
## Pype - ToonBoom Harmony integration
|
||||
|
||||
### Development
|
||||
|
||||
#### Setting up ESLint as linter for javasript code
|
||||
|
||||
You nee [node.js](https://nodejs.org/en/) installed. All you need to do then
|
||||
is to run:
|
||||
|
||||
```sh
|
||||
npm intall
|
||||
```
|
||||
in **js** directory. This will install eslint and all requirements locally.
|
||||
|
||||
In [Atom](https://atom.io/) it is enough to install [linter-eslint](https://atom.io/packages/lintecr-eslint) and set global *npm* prefix in its settings.
|
||||
33
pype/hosts/harmony/js/creators/CreateRender.js
Normal file
33
pype/hosts/harmony/js/creators/CreateRender.js
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * CreateRender *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Code creating render containers in Harmony.
|
||||
*/
|
||||
var CreateRender = function() {};
|
||||
|
||||
|
||||
/**
|
||||
* Create render instance.
|
||||
* @function
|
||||
* @param {array} args Arguments for instance.
|
||||
*/
|
||||
CreateRender.prototype.create = function(args) {
|
||||
node.setTextAttr(args[0], 'DRAWING_TYPE', 1, 'PNG4');
|
||||
node.setTextAttr(args[0], 'DRAWING_NAME', 1, args[1]);
|
||||
node.setTextAttr(args[0], 'MOVIE_PATH', 1, args[1]);
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Creators.CreateRender = new CreateRender();
|
||||
281
pype/hosts/harmony/js/loaders/ImageSequenceLoader.js
Normal file
281
pype/hosts/harmony/js/loaders/ImageSequenceLoader.js
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * ImageSequenceLoader *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Image Sequence loader JS code.
|
||||
*/
|
||||
var ImageSequenceLoader = function() {
|
||||
this.PNGTransparencyMode = 0; // Premultiplied wih Black
|
||||
this.TGATransparencyMode = 0; // Premultiplied wih Black
|
||||
this.SGITransparencyMode = 0; // Premultiplied wih Black
|
||||
this.LayeredPSDTransparencyMode = 1; // Straight
|
||||
this.FlatPSDTransparencyMode = 2; // Premultiplied wih White
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get unique column name.
|
||||
* @function
|
||||
* @param {string} columnPrefix Column name.
|
||||
* @return {string} Unique column name.
|
||||
*/
|
||||
ImageSequenceLoader.prototype.getUniqueColumnName = function(columnPrefix) {
|
||||
var suffix = 0;
|
||||
// finds if unique name for a column
|
||||
var columnName = columnPrefix;
|
||||
while (suffix < 2000) {
|
||||
if (!column.type(columnName)) {
|
||||
break;
|
||||
}
|
||||
|
||||
suffix = suffix + 1;
|
||||
columnName = columnPrefix + '_' + suffix;
|
||||
}
|
||||
return columnName;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Import file sequences into Harmony.
|
||||
* @function
|
||||
* @param {object} args Arguments for import, see Example.
|
||||
* @return {string} Read node name
|
||||
*
|
||||
* @example
|
||||
* // Agrguments are in following order:
|
||||
* var args = [
|
||||
* files, // Files in file sequences.
|
||||
* asset, // Asset name.
|
||||
* subset, // Subset name.
|
||||
* startFrame, // Sequence starting frame.
|
||||
* groupId // Unique group ID (uuid4).
|
||||
* ];
|
||||
*/
|
||||
ImageSequenceLoader.prototype.importFiles = function(args) {
|
||||
var doc = $.scn;
|
||||
var files = args[0];
|
||||
var asset = args[1];
|
||||
var subset = args[2];
|
||||
var startFrame = args[3];
|
||||
var groupId = args[4];
|
||||
var vectorFormat = null;
|
||||
var extension = null;
|
||||
var filename = files[0];
|
||||
var pos = filename.lastIndexOf('.');
|
||||
if (pos < 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get the current group
|
||||
var nodeViewWidget = $.app.getWidgetByName('Node View');
|
||||
if (!nodeViewWidget) {
|
||||
$.alert('You must have a Node View open!', 'No Node View!', 'OK!');
|
||||
return;
|
||||
}
|
||||
|
||||
nodeViewWidget.setFocus();
|
||||
var nodeView = view.currentView();
|
||||
var currentGroup = null;
|
||||
if (!nodeView) {
|
||||
currentGroup = doc.root;
|
||||
} else {
|
||||
currentGroup = doc.$node(view.group(nodeView));
|
||||
}
|
||||
// Get a unique iterative name for the container read node
|
||||
var num = 0;
|
||||
var name = '';
|
||||
do {
|
||||
name = asset + '_' + (num++) + '_' + subset;
|
||||
} while (currentGroup.getNodeByName(name) != null);
|
||||
|
||||
|
||||
extension = filename.substr(pos+1).toLowerCase();
|
||||
if (extension == 'jpeg') {
|
||||
extension = 'jpg';
|
||||
}
|
||||
|
||||
if (extension == 'tvg') {
|
||||
vectorFormat = 'TVG';
|
||||
extension ='SCAN'; // element.add() will use this.
|
||||
}
|
||||
|
||||
var elemId = element.add(
|
||||
name,
|
||||
'BW',
|
||||
scene.numberOfUnitsZ(),
|
||||
extension.toUpperCase(),
|
||||
vectorFormat
|
||||
);
|
||||
|
||||
if (elemId == -1) {
|
||||
// hum, unknown file type most likely -- let's skip it.
|
||||
return null; // no read to add.
|
||||
}
|
||||
|
||||
var uniqueColumnName = this.getUniqueColumnName(name);
|
||||
column.add(uniqueColumnName, 'DRAWING');
|
||||
column.setElementIdOfDrawing(uniqueColumnName, elemId);
|
||||
var read = node.add(currentGroup, name, 'READ', 0, 0, 0);
|
||||
var transparencyAttr = node.getAttr(
|
||||
read, frame.current(), 'READ_TRANSPARENCY'
|
||||
);
|
||||
var opacityAttr = node.getAttr(read, frame.current(), 'OPACITY');
|
||||
transparencyAttr.setValue(true);
|
||||
opacityAttr.setValue(true);
|
||||
var alignmentAttr = node.getAttr(read, frame.current(), 'ALIGNMENT_RULE');
|
||||
alignmentAttr.setValue('ASIS');
|
||||
var transparencyModeAttr = node.getAttr(
|
||||
read, frame.current(), 'applyMatteToColor'
|
||||
);
|
||||
if (extension === 'png') {
|
||||
transparencyModeAttr.setValue(this.PNGTransparencyMode);
|
||||
}
|
||||
if (extension === 'tga') {
|
||||
transparencyModeAttr.setValue(this.TGATransparencyMode);
|
||||
}
|
||||
if (extension === 'sgi') {
|
||||
transparencyModeAttr.setValue(this.SGITransparencyMode);
|
||||
}
|
||||
if (extension === 'psd') {
|
||||
transparencyModeAttr.setValue(this.FlatPSDTransparencyMode);
|
||||
}
|
||||
if (extension === 'jpg') {
|
||||
transparencyModeAttr.setValue(this.LayeredPSDTransparencyMode);
|
||||
}
|
||||
|
||||
var drawingFilePath;
|
||||
var timing;
|
||||
node.linkAttr(read, 'DRAWING.ELEMENT', uniqueColumnName);
|
||||
if (files.length === 1) {
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, 1, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
drawingFilePath = Drawing.filename(elemId, '1');
|
||||
PypeHarmony.copyFile(files[0], drawingFilePath);
|
||||
// Expose the image for the entire frame range.
|
||||
for (var i =0; i <= frame.numberOf() - 1; ++i) {
|
||||
timing = startFrame + i;
|
||||
column.setEntry(uniqueColumnName, 1, timing, '1');
|
||||
}
|
||||
} else {
|
||||
// Create a drawing for each file.
|
||||
for (var j =0; j <= files.length - 1; ++j) {
|
||||
timing = startFrame + j;
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
drawingFilePath = Drawing.filename(elemId, timing.toString());
|
||||
PypeHarmony.copyFile(files[j], drawingFilePath);
|
||||
column.setEntry(uniqueColumnName, 1, timing, timing.toString());
|
||||
}
|
||||
}
|
||||
var greenColor = new ColorRGBA(0, 255, 0, 255);
|
||||
node.setColor(read, greenColor);
|
||||
|
||||
// Add uuid to attribute of the container read node
|
||||
node.createDynamicAttr(read, 'STRING', 'uuid', 'uuid', false);
|
||||
node.setTextAttr(read, 'uuid', 1.0, groupId);
|
||||
return read;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Replace files sequences in Harmony.
|
||||
* @function
|
||||
* @param {object} args Arguments for import, see Example.
|
||||
* @return {string} Read node name
|
||||
*
|
||||
* @example
|
||||
* // Agrguments are in following order:
|
||||
* var args = [
|
||||
* files, // Files in file sequences
|
||||
* name, // Node name
|
||||
* startFrame // Sequence starting frame
|
||||
* ];
|
||||
*/
|
||||
ImageSequenceLoader.prototype.replaceFiles = function(args) {
|
||||
var files = args[0];
|
||||
MessageLog.trace(files);
|
||||
MessageLog.trace(files.length);
|
||||
var _node = args[1];
|
||||
var startFrame = args[2];
|
||||
var _column = node.linkedColumn(_node, 'DRAWING.ELEMENT');
|
||||
var elemId = column.getElementIdOfDrawing(_column);
|
||||
// Delete existing drawings.
|
||||
var timings = column.getDrawingTimings(_column);
|
||||
for ( var i =0; i <= timings.length - 1; ++i) {
|
||||
column.deleteDrawingAt(_column, parseInt(timings[i]));
|
||||
}
|
||||
var filename = files[0];
|
||||
var pos = filename.lastIndexOf('.');
|
||||
if (pos < 0) {
|
||||
return null;
|
||||
}
|
||||
var extension = filename.substr(pos+1).toLowerCase();
|
||||
if (extension === 'jpeg') {
|
||||
extension = 'jpg';
|
||||
}
|
||||
|
||||
var transparencyModeAttr = node.getAttr(
|
||||
_node, frame.current(), 'applyMatteToColor'
|
||||
);
|
||||
if (extension === 'png') {
|
||||
transparencyModeAttr.setValue(this.PNGTransparencyMode);
|
||||
}
|
||||
if (extension === 'tga') {
|
||||
transparencyModeAttr.setValue(this.TGATransparencyMode);
|
||||
}
|
||||
if (extension === 'sgi') {
|
||||
transparencyModeAttr.setValue(this.SGITransparencyMode);
|
||||
}
|
||||
if (extension == 'psd') {
|
||||
transparencyModeAttr.setValue(this.FlatPSDTransparencyMode);
|
||||
}
|
||||
if (extension === 'jpg') {
|
||||
transparencyModeAttr.setValue(this.LayeredPSDTransparencyMode);
|
||||
}
|
||||
|
||||
var drawingFilePath;
|
||||
var timing;
|
||||
if (files.length == 1) {
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, 1, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
drawingFilePath = Drawing.filename(elemId, '1');
|
||||
PypeHarmony.copyFile(files[0], drawingFilePath);
|
||||
MessageLog.trace(files[0]);
|
||||
MessageLog.trace(drawingFilePath);
|
||||
// Expose the image for the entire frame range.
|
||||
for (var k =0; k <= frame.numberOf() - 1; ++k) {
|
||||
timing = startFrame + k;
|
||||
column.setEntry(_column, 1, timing, '1');
|
||||
}
|
||||
} else {
|
||||
// Create a drawing for each file.
|
||||
for (var l =0; l <= files.length - 1; ++l) {
|
||||
timing = startFrame + l;
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
drawingFilePath = Drawing.filename(elemId, timing.toString());
|
||||
PypeHarmony.copyFile( files[l], drawingFilePath );
|
||||
column.setEntry(_column, 1, timing, timing.toString());
|
||||
}
|
||||
}
|
||||
var greenColor = new ColorRGBA(0, 255, 0, 255);
|
||||
node.setColor(_node, greenColor);
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Loaders.ImageSequenceLoader = new ImageSequenceLoader();
|
||||
177
pype/hosts/harmony/js/loaders/TemplateLoader.js
Normal file
177
pype/hosts/harmony/js/loaders/TemplateLoader.js
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * TemplateLoader *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Image Sequence loader JS code.
|
||||
*/
|
||||
var TemplateLoader = function() {};
|
||||
|
||||
|
||||
/**
|
||||
* Load template as container.
|
||||
* @function
|
||||
* @param {array} args Arguments, see example.
|
||||
* @return {string} Name of container.
|
||||
*
|
||||
* @example
|
||||
* // arguments are in following order:
|
||||
* var args = [
|
||||
* templatePath, // Path to tpl file.
|
||||
* assetName, // Asset name.
|
||||
* subsetName, // Subset name.
|
||||
* groupId // unique ID (uuid4)
|
||||
* ];
|
||||
*/
|
||||
TemplateLoader.prototype.loadContainer = function(args) {
|
||||
var doc = $.scn;
|
||||
var templatePath = args[0];
|
||||
var assetName = args[1];
|
||||
var subset = args[2];
|
||||
var groupId = args[3];
|
||||
|
||||
// Get the current group
|
||||
var nodeViewWidget = $.app.getWidgetByName('Node View');
|
||||
if (!nodeViewWidget) {
|
||||
$.alert('You must have a Node View open!', 'No Node View!', 'OK!');
|
||||
return;
|
||||
}
|
||||
|
||||
nodeViewWidget.setFocus();
|
||||
var currentGroup;
|
||||
var nodeView = view.currentView();
|
||||
if (!nodeView) {
|
||||
currentGroup = doc.root;
|
||||
} else {
|
||||
currentGroup = doc.$node(view.group(nodeView));
|
||||
}
|
||||
|
||||
// Get a unique iterative name for the container group
|
||||
var num = 0;
|
||||
var containerGroupName = '';
|
||||
do {
|
||||
containerGroupName = assetName + '_' + (num++) + '_' + subset;
|
||||
} while (currentGroup.getNodeByName(containerGroupName) != null);
|
||||
|
||||
// import the template
|
||||
var tplNodes = currentGroup.importTemplate(templatePath);
|
||||
MessageLog.trace(tplNodes);
|
||||
// Create the container group
|
||||
var groupNode = currentGroup.addGroup(
|
||||
containerGroupName, false, false, tplNodes);
|
||||
|
||||
// Add uuid to attribute of the container group
|
||||
node.createDynamicAttr(groupNode, 'STRING', 'uuid', 'uuid', false);
|
||||
node.setTextAttr(groupNode, 'uuid', 1.0, groupId);
|
||||
|
||||
return String(groupNode);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Replace existing node container.
|
||||
* @function
|
||||
* @param {string} dstNodePath Harmony path to destination Node.
|
||||
* @param {string} srcNodePath Harmony path to source Node.
|
||||
* @param {string} renameSrc ...
|
||||
* @param {boolean} cloneSrc ...
|
||||
* @return {boolean} Success
|
||||
* @todo This is work in progress.
|
||||
*/
|
||||
TemplateLoader.prototype.replaceNode = function(
|
||||
dstNodePath, srcNodePath, renameSrc, cloneSrc) {
|
||||
var doc = $.scn;
|
||||
var srcNode = doc.$node(srcNodePath);
|
||||
var dstNode = doc.$node(dstNodePath);
|
||||
// var dstNodeName = dstNode.name;
|
||||
var replacementNode = srcNode;
|
||||
// var dstGroup = dstNode.group;
|
||||
$.beginUndo();
|
||||
if (cloneSrc) {
|
||||
replacementNode = doc.$node(
|
||||
$.nodeTools.copy_paste_node(
|
||||
srcNodePath, dstNode.name + '_CLONE', dstNode.group.path));
|
||||
} else {
|
||||
if (replacementNode.group.path != srcNode.group.path) {
|
||||
replacementNode.moveToGroup(dstNode);
|
||||
}
|
||||
}
|
||||
var inLinks = dstNode.getInLinks();
|
||||
var link, inNode, inPort, outPort, outNode, success;
|
||||
for (var l in inLinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(inLinks, l)) {
|
||||
link = inLinks[l];
|
||||
inPort = Number(link.inPort);
|
||||
outPort = Number(link.outPort);
|
||||
outNode = link.outNode;
|
||||
success = replacementNode.linkInNode(outNode, inPort, outPort);
|
||||
if (success) {
|
||||
$.log('Successfully connected ' + outNode + ' : ' +
|
||||
outPort + ' -> ' + replacementNode + ' : ' + inPort);
|
||||
} else {
|
||||
$.alert('Failed to connect ' + outNode + ' : ' +
|
||||
outPort + ' -> ' + replacementNode + ' : ' + inPort);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var outLinks = dstNode.getOutLinks();
|
||||
for (l in outLinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(outLinks, l)) {
|
||||
link = outLinks[l];
|
||||
inPort = Number(link.inPort);
|
||||
outPort = Number(link.outPort);
|
||||
inNode = link.inNode;
|
||||
// first we must disconnect the port from the node being
|
||||
// replaced to this links inNode port
|
||||
inNode.unlinkInPort(inPort);
|
||||
success = replacementNode.linkOutNode(inNode, outPort, inPort);
|
||||
if (success) {
|
||||
$.log('Successfully connected ' + inNode + ' : ' +
|
||||
inPort + ' <- ' + replacementNode + ' : ' + outPort);
|
||||
} else {
|
||||
if (inNode.type == 'MultiLayerWrite') {
|
||||
$.log('Attempting standard api to connect the nodes...');
|
||||
success = node.link(
|
||||
replacementNode, outPort, inNode,
|
||||
inPort, node.numberOfInputPorts(inNode) + 1);
|
||||
if (success) {
|
||||
$.log('Successfully connected ' + inNode + ' : ' +
|
||||
inPort + ' <- ' + replacementNode + ' : ' + outPort);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!success) {
|
||||
$.alert('Failed to connect ' + inNode + ' : ' +
|
||||
inPort + ' <- ' + replacementNode + ' : ' + outPort);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
TemplateLoader.prototype.askForColumnsUpdate = function() {
|
||||
// Ask user if they want to also update columns and
|
||||
// linked attributes here
|
||||
return ($.confirm(
|
||||
'Would you like to update in place and reconnect all \n' +
|
||||
'ins/outs, attributes, and columns?',
|
||||
'Update & Replace?\n' +
|
||||
'If you choose No, the version will only be loaded.',
|
||||
'Yes',
|
||||
'No'));
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Loaders.TemplateLoader = new TemplateLoader();
|
||||
19
pype/hosts/harmony/js/package.json
Normal file
19
pype/hosts/harmony/js/package.json
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"name": "pype-harmony",
|
||||
"version": "1.0.0",
|
||||
"description": "Avalon Harmony Host integration",
|
||||
"keywords": [
|
||||
"Pype",
|
||||
"Avalon",
|
||||
"Harmony",
|
||||
"pipeline"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "PypeHarmony.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.11.0"
|
||||
}
|
||||
}
|
||||
28
pype/hosts/harmony/js/publish/CollectCurrentFile.js
Normal file
28
pype/hosts/harmony/js/publish/CollectCurrentFile.js
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * CollectCurrentFile *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Collect Current file
|
||||
*/
|
||||
var CollectCurrentFile = function() {};
|
||||
|
||||
CollectCurrentFile.prototype.collect = function() {
|
||||
return (
|
||||
scene.currentProjectPath() + '/' +
|
||||
scene.currentVersionName() + '.xstage'
|
||||
);
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Publish.CollectCurrentFile = new CollectCurrentFile();
|
||||
33
pype/hosts/harmony/js/publish/CollectPalettes.js
Normal file
33
pype/hosts/harmony/js/publish/CollectPalettes.js
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * CollectPalettes *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Image Sequence loader JS code.
|
||||
*/
|
||||
var CollectPalettes = function() {};
|
||||
|
||||
CollectPalettes.prototype.getPalettes = function() {
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
|
||||
var palettes = {};
|
||||
for(var i=0; i < palette_list.numPalettes; ++i) {
|
||||
var palette = palette_list.getPaletteByIndex(i);
|
||||
palettes[palette.getName()] = palette.id;
|
||||
}
|
||||
|
||||
return palettes;
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Publish.CollectPalettes = new CollectPalettes();
|
||||
38
pype/hosts/harmony/js/publish/ExtractPalette.js
Normal file
38
pype/hosts/harmony/js/publish/ExtractPalette.js
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * ExtractPalette *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Code for extracting palettes.
|
||||
*/
|
||||
var ExtractPalette = function() {};
|
||||
|
||||
|
||||
/**
|
||||
* Get palette from Harmony.
|
||||
* @function
|
||||
* @param {string} paletteId ID of palette to get.
|
||||
* @return {array} [paletteName, palettePath]
|
||||
*/
|
||||
ExtractPalette.prototype.getPalette = function(paletteId) {
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
var palette = palette_list.getPaletteById(paletteId);
|
||||
var palette_name = palette.getName();
|
||||
return [
|
||||
palette_name,
|
||||
(palette.getPath() + '/' + palette.getName() + '.plt')
|
||||
];
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Publish.ExtractPalette = new ExtractPalette();
|
||||
54
pype/hosts/harmony/js/publish/ExtractTemplate.js
Normal file
54
pype/hosts/harmony/js/publish/ExtractTemplate.js
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
/* global PypeHarmony:writable, include */
|
||||
// ***************************************************************************
|
||||
// * ExtractTemplate *
|
||||
// ***************************************************************************
|
||||
|
||||
|
||||
// check if PypeHarmony is defined and if not, load it.
|
||||
if (typeof PypeHarmony !== 'undefined') {
|
||||
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
|
||||
include(PYPE_HARMONY_JS + '/pype_harmony.js');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @namespace
|
||||
* @classdesc Code for extracting palettes.
|
||||
*/
|
||||
var ExtractTemplate = function() {};
|
||||
|
||||
|
||||
/**
|
||||
* Get backdrops for given node.
|
||||
* @function
|
||||
* @param {string} probeNode Node path to probe for backdrops.
|
||||
* @return {array} list of backdrops.
|
||||
*/
|
||||
ExtractTemplate.prototype.getBackdropsByNode = function(probeNode) {
|
||||
var backdrops = Backdrop.backdrops('Top');
|
||||
var valid_backdrops = [];
|
||||
for(var i=0; i<backdrops.length; i++)
|
||||
{
|
||||
var position = backdrops[i].position;
|
||||
|
||||
var x_valid = false;
|
||||
var node_x = node.coordX(probeNode);
|
||||
if (position.x < node_x && node_x < (position.x + position.w)){
|
||||
x_valid = true;
|
||||
}
|
||||
|
||||
var y_valid = false;
|
||||
var node_y = node.coordY(probeNode);
|
||||
if (position.y < node_y && node_y < (position.y + position.h)){
|
||||
y_valid = true;
|
||||
}
|
||||
|
||||
if (x_valid && y_valid){
|
||||
valid_backdrops.push(backdrops[i]);
|
||||
}
|
||||
}
|
||||
return valid_backdrops;
|
||||
};
|
||||
|
||||
// add self to Pype Loaders
|
||||
PypeHarmony.Publish.ExtractTemplate = new ExtractTemplate();
|
||||
|
|
@ -4,6 +4,7 @@ import sys
|
|||
import hiero
|
||||
import pyblish.api
|
||||
import avalon.api as avalon
|
||||
import avalon.io
|
||||
from avalon.vendor.Qt import (QtWidgets, QtGui)
|
||||
import pype.api as pype
|
||||
from pype.api import Logger, Anatomy
|
||||
|
|
@ -58,7 +59,8 @@ def sync_avalon_data_to_workfile():
|
|||
project.setProjectRoot(active_project_root)
|
||||
|
||||
# get project data from avalon db
|
||||
project_data = pype.get_project()["data"]
|
||||
project_doc = avalon.io.find_one({"type": "project"})
|
||||
project_data = project_doc["data"]
|
||||
|
||||
log.debug("project_data: {}".format(project_data))
|
||||
|
||||
|
|
|
|||
|
|
@ -564,6 +564,7 @@ class ExpectedFilesVray(AExpectedFiles):
|
|||
if default_ext == "exr (multichannel)" or default_ext == "exr (deep)":
|
||||
default_ext = "exr"
|
||||
layer_data["defaultExt"] = default_ext
|
||||
layer_data["padding"] = cmds.getAttr("vraySettings.fileNamePadding")
|
||||
return layer_data
|
||||
|
||||
def get_files(self):
|
||||
|
|
@ -614,11 +615,14 @@ class ExpectedFilesVray(AExpectedFiles):
|
|||
if default_ext == "exr (multichannel)" or default_ext == "exr (deep)":
|
||||
default_ext = "exr"
|
||||
|
||||
# filter all namespace prefixed AOVs - they are pulled in from
|
||||
# references and are not rendered.
|
||||
vr_aovs = [
|
||||
n
|
||||
for n in cmds.ls(
|
||||
type=["VRayRenderElement", "VRayRenderElementSet"]
|
||||
)
|
||||
if len(n.split(":")) == 1
|
||||
]
|
||||
|
||||
for aov in vr_aovs:
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import math
|
|||
import bson
|
||||
import json
|
||||
import logging
|
||||
import itertools
|
||||
import contextlib
|
||||
from collections import OrderedDict, defaultdict
|
||||
from math import ceil
|
||||
|
|
@ -122,6 +123,12 @@ def float_round(num, places=0, direction=ceil):
|
|||
return direction(num * (10**places)) / float(10**places)
|
||||
|
||||
|
||||
def pairwise(iterable):
|
||||
"""s -> (s0,s1), (s2,s3), (s4, s5), ..."""
|
||||
a = iter(iterable)
|
||||
return itertools.izip(a, a)
|
||||
|
||||
|
||||
def unique(name):
|
||||
assert isinstance(name, string_types), "`name` must be string"
|
||||
|
||||
|
|
@ -419,12 +426,12 @@ def empty_sets(sets, force=False):
|
|||
plugs=True,
|
||||
connections=True) or []
|
||||
original_connections.extend(connections)
|
||||
for dest, src in lib.pairwise(connections):
|
||||
for dest, src in pairwise(connections):
|
||||
cmds.disconnectAttr(src, dest)
|
||||
yield
|
||||
finally:
|
||||
|
||||
for dest, src in lib.pairwise(original_connections):
|
||||
for dest, src in pairwise(original_connections):
|
||||
cmds.connectAttr(src, dest)
|
||||
|
||||
# Restore original members
|
||||
|
|
@ -1857,8 +1864,8 @@ def set_context_settings():
|
|||
"""
|
||||
|
||||
# Todo (Wijnand): apply renderer and resolution of project
|
||||
|
||||
project_data = lib.get_project()["data"]
|
||||
project_doc = io.find_one({"type": "project"})
|
||||
project_data = project_doc["data"]
|
||||
asset_data = lib.get_asset()["data"]
|
||||
|
||||
# Set project fps
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@ def format_anatomy(data):
|
|||
if not version:
|
||||
file = script_name()
|
||||
data["version"] = pype.get_version_from_path(file)
|
||||
project_document = pype.get_project()
|
||||
project_document = io.find_one({"type": "project"})
|
||||
data.update({
|
||||
"subset": data["avalon"]["subset"],
|
||||
"asset": data["avalon"]["asset"],
|
||||
|
|
|
|||
|
|
@ -1 +1,31 @@
|
|||
kwargs = None
|
||||
import os
|
||||
import logging
|
||||
|
||||
from avalon.tvpaint.communication_server import register_localization_file
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from pype import PLUGINS_DIR
|
||||
|
||||
log = logging.getLogger("pype.hosts.tvpaint")
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "tvpaint", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "tvpaint", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "tvpaint", "create")
|
||||
|
||||
|
||||
def install():
|
||||
log.info("Pype - Installing TVPaint integration")
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
localization_file = os.path.join(current_dir, "avalon.loc")
|
||||
register_localization_file(localization_file)
|
||||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
|
||||
|
||||
def uninstall():
|
||||
log.info("Pype - Uninstalling TVPaint integration")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH)
|
||||
|
|
|
|||
37
pype/hosts/tvpaint/avalon.loc
Normal file
37
pype/hosts/tvpaint/avalon.loc
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
#-------------------------------------------------
|
||||
#------------ AVALON PLUGIN LOC FILE -------------
|
||||
#-------------------------------------------------
|
||||
|
||||
#Language : English
|
||||
#Version : 1.0
|
||||
#Date : 27/10/2020
|
||||
|
||||
#-------------------------------------------------
|
||||
#------------ COMMON -----------------------------
|
||||
#-------------------------------------------------
|
||||
|
||||
$100 "Pype Tools"
|
||||
|
||||
$10010 "Workfiles"
|
||||
$10020 "Load"
|
||||
$10030 "Create"
|
||||
$10040 "Scene inventory"
|
||||
$10050 "Publish"
|
||||
$10060 "Library"
|
||||
|
||||
#------------ Help -------------------------------
|
||||
|
||||
$20010 "Open workfiles tool"
|
||||
$20020 "Open loader tool"
|
||||
$20030 "Open creator tool"
|
||||
$20040 "Open scene inventory tool"
|
||||
$20050 "Open publisher"
|
||||
$20060 "Open library loader tool"
|
||||
|
||||
#------------ Errors -----------------------------
|
||||
|
||||
$30001 "Can't Open Requester !"
|
||||
|
||||
#-------------------------------------------------
|
||||
#------------ END --------------------------------
|
||||
#-------------------------------------------------
|
||||
1899
pype/lib.py
1899
pype/lib.py
File diff suppressed because it is too large
Load diff
71
pype/lib/__init__.py
Normal file
71
pype/lib/__init__.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pype lib module."""
|
||||
|
||||
from .deprecated import (
|
||||
get_avalon_database,
|
||||
set_io_database
|
||||
)
|
||||
|
||||
from .avalon_context import (
|
||||
is_latest,
|
||||
any_outdated,
|
||||
get_asset,
|
||||
get_hierarchy,
|
||||
get_linked_assets,
|
||||
get_latest_version,
|
||||
BuildWorkfile
|
||||
)
|
||||
|
||||
from .hooks import PypeHook, execute_hook
|
||||
|
||||
from .applications import (
|
||||
ApplicationLaunchFailed,
|
||||
launch_application,
|
||||
ApplicationAction,
|
||||
_subprocess
|
||||
)
|
||||
|
||||
from .plugin_tools import filter_pyblish_plugins, source_hash
|
||||
|
||||
from .path_tools import (
|
||||
version_up,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
get_paths_from_environ,
|
||||
get_ffmpeg_tool_path
|
||||
)
|
||||
|
||||
from .ffmpeg_utils import ffprobe_streams
|
||||
|
||||
__all__ = [
|
||||
"get_avalon_database",
|
||||
"set_io_database",
|
||||
|
||||
"is_latest",
|
||||
"any_outdated",
|
||||
"get_asset",
|
||||
"get_hierarchy",
|
||||
"get_linked_assets",
|
||||
"get_latest_version",
|
||||
"BuildWorkfile",
|
||||
|
||||
"PypeHook",
|
||||
"execute_hook",
|
||||
|
||||
"ApplicationLaunchFailed",
|
||||
"launch_application",
|
||||
"ApplicationAction",
|
||||
|
||||
"filter_pyblish_plugins",
|
||||
|
||||
"version_up",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"get_paths_from_environ",
|
||||
"get_ffmpeg_tool_path",
|
||||
|
||||
"ffprobe_streams",
|
||||
|
||||
"source_hash",
|
||||
"_subprocess"
|
||||
]
|
||||
457
pype/lib/applications.py
Normal file
457
pype/lib/applications.py
Normal file
|
|
@ -0,0 +1,457 @@
|
|||
import os
|
||||
import sys
|
||||
import getpass
|
||||
import copy
|
||||
import platform
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
import acre
|
||||
|
||||
import avalon.lib
|
||||
|
||||
from ..api import Anatomy, Logger, config
|
||||
from .hooks import execute_hook
|
||||
from .deprecated import get_avalon_database
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApplicationLaunchFailed(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def launch_application(project_name, asset_name, task_name, app_name):
|
||||
"""Launch host application with filling required environments.
|
||||
|
||||
TODO(iLLiCiT): This should be split into more parts.
|
||||
"""
|
||||
# `get_avalon_database` is in Pype 3 replaced with using `AvalonMongoDB`
|
||||
database = get_avalon_database()
|
||||
project_document = database[project_name].find_one({"type": "project"})
|
||||
asset_document = database[project_name].find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
asset_doc_parents = asset_document["data"].get("parents")
|
||||
hierarchy = "/".join(asset_doc_parents)
|
||||
|
||||
app_def = avalon.lib.get_application(app_name)
|
||||
app_label = app_def.get("ftrack_label", app_def.get("label", app_name))
|
||||
|
||||
host_name = app_def["application_dir"]
|
||||
# Workfile data collection may be special function?
|
||||
data = {
|
||||
"project": {
|
||||
"name": project_document["name"],
|
||||
"code": project_document["data"].get("code")
|
||||
},
|
||||
"task": task_name,
|
||||
"asset": asset_name,
|
||||
"app": host_name,
|
||||
"hierarchy": hierarchy
|
||||
}
|
||||
|
||||
try:
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy_filled = anatomy.format(data)
|
||||
workdir = os.path.normpath(anatomy_filled["work"]["folder"])
|
||||
|
||||
except Exception as exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Error in anatomy.format: {}".format(str(exc))
|
||||
)
|
||||
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
last_workfile_path = None
|
||||
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS.get(host_name)
|
||||
if extensions:
|
||||
# Find last workfile
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
data.update({
|
||||
"version": 1,
|
||||
"user": os.environ.get("PYPE_USERNAME") or getpass.getuser(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
last_workfile_path = avalon.api.last_workfile(
|
||||
workdir, file_template, data, extensions, True
|
||||
)
|
||||
|
||||
# set environments for Avalon
|
||||
prep_env = copy.deepcopy(os.environ)
|
||||
prep_env.update({
|
||||
"AVALON_PROJECT": project_name,
|
||||
"AVALON_ASSET": asset_name,
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP": host_name,
|
||||
"AVALON_APP_NAME": app_name,
|
||||
"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_WORKDIR": workdir
|
||||
})
|
||||
|
||||
start_last_workfile = avalon.api.should_start_last_workfile(
|
||||
project_name, host_name, task_name
|
||||
)
|
||||
# Store boolean as "0"(False) or "1"(True)
|
||||
prep_env["AVALON_OPEN_LAST_WORKFILE"] = (
|
||||
str(int(bool(start_last_workfile)))
|
||||
)
|
||||
|
||||
if (
|
||||
start_last_workfile
|
||||
and last_workfile_path
|
||||
and os.path.exists(last_workfile_path)
|
||||
):
|
||||
prep_env["AVALON_LAST_WORKFILE"] = last_workfile_path
|
||||
|
||||
prep_env.update(anatomy.roots_obj.root_environments())
|
||||
|
||||
# collect all the 'environment' attributes from parents
|
||||
tools_attr = [prep_env["AVALON_APP"], prep_env["AVALON_APP_NAME"]]
|
||||
tools_env = asset_document["data"].get("tools_env") or []
|
||||
tools_attr.extend(tools_env)
|
||||
|
||||
tools_env = acre.get_tools(tools_attr)
|
||||
env = acre.compute(tools_env)
|
||||
env = acre.merge(env, current_env=dict(prep_env))
|
||||
|
||||
# Get path to execute
|
||||
st_temp_path = os.environ["PYPE_CONFIG"]
|
||||
os_plat = platform.system().lower()
|
||||
|
||||
# Path to folder with launchers
|
||||
path = os.path.join(st_temp_path, "launchers", os_plat)
|
||||
|
||||
# Full path to executable launcher
|
||||
execfile = None
|
||||
|
||||
launch_hook = app_def.get("launch_hook")
|
||||
if launch_hook:
|
||||
log.info("launching hook: {}".format(launch_hook))
|
||||
ret_val = execute_hook(launch_hook, env=env)
|
||||
if not ret_val:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Hook didn't finish successfully {}".format(app_label)
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
for ext in os.environ["PATHEXT"].split(os.pathsep):
|
||||
fpath = os.path.join(path.strip('"'), app_def["executable"] + ext)
|
||||
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
|
||||
execfile = fpath
|
||||
break
|
||||
|
||||
# Run SW if was found executable
|
||||
if execfile is None:
|
||||
raise ApplicationLaunchFailed(
|
||||
"We didn't find launcher for {}".format(app_label)
|
||||
)
|
||||
|
||||
popen = avalon.lib.launch(
|
||||
executable=execfile, args=[], environment=env
|
||||
)
|
||||
|
||||
elif (
|
||||
sys.platform.startswith("linux")
|
||||
or sys.platform.startswith("darwin")
|
||||
):
|
||||
execfile = os.path.join(path.strip('"'), app_def["executable"])
|
||||
# Run SW if was found executable
|
||||
if execfile is None:
|
||||
raise ApplicationLaunchFailed(
|
||||
"We didn't find launcher for {}".format(app_label)
|
||||
)
|
||||
|
||||
if not os.path.isfile(execfile):
|
||||
raise ApplicationLaunchFailed(
|
||||
"Launcher doesn't exist - {}".format(execfile)
|
||||
)
|
||||
|
||||
try:
|
||||
fp = open(execfile)
|
||||
except PermissionError as perm_exc:
|
||||
raise ApplicationLaunchFailed(
|
||||
"Access denied on launcher {} - {}".format(execfile, perm_exc)
|
||||
)
|
||||
|
||||
fp.close()
|
||||
# check executable permission
|
||||
if not os.access(execfile, os.X_OK):
|
||||
raise ApplicationLaunchFailed(
|
||||
"No executable permission - {}".format(execfile)
|
||||
)
|
||||
|
||||
popen = avalon.lib.launch( # noqa: F841
|
||||
"/usr/bin/env", args=["bash", execfile], environment=env
|
||||
)
|
||||
return popen
|
||||
|
||||
|
||||
class ApplicationAction(avalon.api.Action):
|
||||
"""Default application launcher
|
||||
|
||||
This is a convenience application Action that when "config" refers to a
|
||||
parsed application `.toml` this can launch the application.
|
||||
|
||||
"""
|
||||
_log = None
|
||||
config = None
|
||||
group = None
|
||||
variant = None
|
||||
required_session_keys = (
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK"
|
||||
)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger().get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def is_compatible(self, session):
|
||||
for key in self.required_session_keys:
|
||||
if key not in session:
|
||||
return False
|
||||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
"""Process the full Application action"""
|
||||
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session["AVALON_TASK"]
|
||||
launch_application(
|
||||
project_name, asset_name, task_name, self.name
|
||||
)
|
||||
|
||||
self._ftrack_after_launch_procedure(
|
||||
project_name, asset_name, task_name
|
||||
)
|
||||
|
||||
def _ftrack_after_launch_procedure(
|
||||
self, project_name, asset_name, task_name
|
||||
):
|
||||
# TODO move to launch hook
|
||||
required_keys = ("FTRACK_SERVER", "FTRACK_API_USER", "FTRACK_API_KEY")
|
||||
for key in required_keys:
|
||||
if not os.environ.get(key):
|
||||
self.log.debug((
|
||||
"Missing required environment \"{}\""
|
||||
" for Ftrack after launch procedure."
|
||||
).format(key))
|
||||
return
|
||||
|
||||
try:
|
||||
import ftrack_api
|
||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||
self.log.debug("Ftrack session created")
|
||||
except Exception:
|
||||
self.log.warning("Couldn't create Ftrack session")
|
||||
return
|
||||
|
||||
try:
|
||||
entity = self._find_ftrack_task_entity(
|
||||
session, project_name, asset_name, task_name
|
||||
)
|
||||
self._ftrack_status_change(session, entity, project_name)
|
||||
self._start_timer(session, entity, ftrack_api)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't finish Ftrack procedure.", exc_info=True
|
||||
)
|
||||
return
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def _find_ftrack_task_entity(
|
||||
self, session, project_name, asset_name, task_name
|
||||
):
|
||||
project_entity = session.query(
|
||||
"Project where full_name is \"{}\"".format(project_name)
|
||||
).first()
|
||||
if not project_entity:
|
||||
self.log.warning(
|
||||
"Couldn't find project \"{}\" in Ftrack.".format(project_name)
|
||||
)
|
||||
return
|
||||
|
||||
potential_task_entities = session.query((
|
||||
"TypedContext where parent.name is \"{}\" and project_id is \"{}\""
|
||||
).format(asset_name, project_entity["id"])).all()
|
||||
filtered_entities = []
|
||||
for _entity in potential_task_entities:
|
||||
if (
|
||||
_entity.entity_type.lower() == "task"
|
||||
and _entity["name"] == task_name
|
||||
):
|
||||
filtered_entities.append(_entity)
|
||||
|
||||
if not filtered_entities:
|
||||
self.log.warning((
|
||||
"Couldn't find task \"{}\" under parent \"{}\" in Ftrack."
|
||||
).format(task_name, asset_name))
|
||||
return
|
||||
|
||||
if len(filtered_entities) > 1:
|
||||
self.log.warning((
|
||||
"Found more than one task \"{}\""
|
||||
" under parent \"{}\" in Ftrack."
|
||||
).format(task_name, asset_name))
|
||||
return
|
||||
|
||||
return filtered_entities[0]
|
||||
|
||||
def _ftrack_status_change(self, session, entity, project_name):
|
||||
presets = config.get_presets(project_name)["ftrack"]["ftrack_config"]
|
||||
statuses = presets.get("status_update")
|
||||
if not statuses:
|
||||
return
|
||||
|
||||
actual_status = entity["status"]["name"].lower()
|
||||
already_tested = set()
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in entity["link"]]
|
||||
)
|
||||
while True:
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if key in already_tested:
|
||||
continue
|
||||
if actual_status in value or "_any_" in value:
|
||||
if key != "_ignore_":
|
||||
next_status_name = key
|
||||
already_tested.add(key)
|
||||
break
|
||||
already_tested.add(key)
|
||||
|
||||
if next_status_name is None:
|
||||
break
|
||||
|
||||
try:
|
||||
query = "Status where name is \"{}\"".format(
|
||||
next_status_name
|
||||
)
|
||||
status = session.query(query).one()
|
||||
|
||||
entity["status"] = status
|
||||
session.commit()
|
||||
self.log.debug("Changing status to \"{}\" <{}>".format(
|
||||
next_status_name, ent_path
|
||||
))
|
||||
break
|
||||
|
||||
except Exception:
|
||||
session.rollback()
|
||||
msg = (
|
||||
"Status \"{}\" in presets wasn't found"
|
||||
" on Ftrack entity type \"{}\""
|
||||
).format(next_status_name, entity.entity_type)
|
||||
self.log.warning(msg)
|
||||
|
||||
def _start_timer(self, session, entity, _ftrack_api):
|
||||
self.log.debug("Triggering timer start.")
|
||||
|
||||
user_entity = session.query("User where username is \"{}\"".format(
|
||||
os.environ["FTRACK_API_USER"]
|
||||
)).first()
|
||||
if not user_entity:
|
||||
self.log.warning(
|
||||
"Couldn't find user with username \"{}\" in Ftrack".format(
|
||||
os.environ["FTRACK_API_USER"]
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
source = {
|
||||
"user": {
|
||||
"id": user_entity["id"],
|
||||
"username": user_entity["username"]
|
||||
}
|
||||
}
|
||||
event_data = {
|
||||
"actionIdentifier": "start.timer",
|
||||
"selection": [{"entityId": entity["id"], "entityType": "task"}]
|
||||
}
|
||||
session.event_hub.publish(
|
||||
_ftrack_api.event.base.Event(
|
||||
topic="ftrack.action.launch",
|
||||
data=event_data,
|
||||
source=source
|
||||
),
|
||||
on_error="ignore"
|
||||
)
|
||||
self.log.debug("Timer start triggered successfully.")
|
||||
|
||||
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
def _subprocess(*args, **kwargs):
|
||||
"""Convenience method for getting output errors for subprocess.
|
||||
|
||||
Entered arguments and keyword arguments are passed to subprocess Popen.
|
||||
|
||||
Args:
|
||||
*args: Variable length arument list passed to Popen.
|
||||
**kwargs : Arbitary keyword arguments passed to Popen. Is possible to
|
||||
pass `logging.Logger` object under "logger" if want to use
|
||||
different than lib's logger.
|
||||
|
||||
Returns:
|
||||
str: Full output of subprocess concatenated stdout and stderr.
|
||||
|
||||
Raises:
|
||||
RuntimeError: Exception is raised if process finished with nonzero
|
||||
return code.
|
||||
"""
|
||||
|
||||
# Get environents from kwarg or use current process environments if were
|
||||
# not passed.
|
||||
env = kwargs.get("env") or os.environ
|
||||
# Make sure environment contains only strings
|
||||
filtered_env = {k: str(v) for k, v in env.items()}
|
||||
|
||||
# Use lib's logger if was not passed with kwargs.
|
||||
logger = kwargs.pop("logger", log)
|
||||
|
||||
# set overrides
|
||||
kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE)
|
||||
kwargs['stderr'] = kwargs.get('stderr', subprocess.PIPE)
|
||||
kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE)
|
||||
kwargs['env'] = filtered_env
|
||||
|
||||
proc = subprocess.Popen(*args, **kwargs)
|
||||
|
||||
full_output = ""
|
||||
_stdout, _stderr = proc.communicate()
|
||||
if _stdout:
|
||||
_stdout = _stdout.decode("utf-8")
|
||||
full_output += _stdout
|
||||
logger.debug(_stdout)
|
||||
|
||||
if _stderr:
|
||||
_stderr = _stderr.decode("utf-8")
|
||||
# Add additional line break if output already containt stdout
|
||||
if full_output:
|
||||
full_output += "\n"
|
||||
full_output += _stderr
|
||||
logger.warning(_stderr)
|
||||
|
||||
if proc.returncode != 0:
|
||||
exc_msg = "Executing arguments was not successful: \"{}\"".format(args)
|
||||
if _stdout:
|
||||
exc_msg += "\n\nOutput:\n{}".format(_stdout)
|
||||
|
||||
if _stderr:
|
||||
exc_msg += "Error:\n{}".format(_stderr)
|
||||
|
||||
raise RuntimeError(exc_msg)
|
||||
|
||||
return full_output
|
||||
870
pype/lib/avalon_context.py
Normal file
870
pype/lib/avalon_context.py
Normal file
|
|
@ -0,0 +1,870 @@
|
|||
import os
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from avalon import io, pipeline
|
||||
from ..api import config
|
||||
import avalon.api
|
||||
|
||||
log = logging.getLogger("AvalonContext")
|
||||
|
||||
|
||||
def is_latest(representation):
|
||||
"""Return whether the representation is from latest version
|
||||
|
||||
Args:
|
||||
representation (dict): The representation document from the database.
|
||||
|
||||
Returns:
|
||||
bool: Whether the representation is of latest version.
|
||||
|
||||
"""
|
||||
|
||||
version = io.find_one({"_id": representation['parent']})
|
||||
if version["type"] == "master_version":
|
||||
return True
|
||||
|
||||
# Get highest version under the parent
|
||||
highest_version = io.find_one({
|
||||
"type": "version",
|
||||
"parent": version["parent"]
|
||||
}, sort=[("name", -1)], projection={"name": True})
|
||||
|
||||
if version['name'] == highest_version['name']:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def any_outdated():
|
||||
"""Return whether the current scene has any outdated content"""
|
||||
|
||||
checked = set()
|
||||
host = avalon.api.registered_host()
|
||||
for container in host.ls():
|
||||
representation = container['representation']
|
||||
if representation in checked:
|
||||
continue
|
||||
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
"_id": io.ObjectId(representation),
|
||||
"type": "representation"
|
||||
},
|
||||
projection={"parent": True}
|
||||
)
|
||||
if representation_doc and not is_latest(representation_doc):
|
||||
return True
|
||||
elif not representation_doc:
|
||||
log.debug("Container '{objectName}' has an invalid "
|
||||
"representation, it is missing in the "
|
||||
"database".format(**container))
|
||||
|
||||
checked.add(representation)
|
||||
return False
|
||||
|
||||
|
||||
def get_asset(asset_name=None):
|
||||
""" Returning asset document from database by its name.
|
||||
|
||||
Doesn't count with duplicities on asset names!
|
||||
|
||||
Args:
|
||||
asset_name (str)
|
||||
|
||||
Returns:
|
||||
(MongoDB document)
|
||||
"""
|
||||
if not asset_name:
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
|
||||
asset_document = io.find_one({
|
||||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
|
||||
if not asset_document:
|
||||
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
|
||||
|
||||
return asset_document
|
||||
|
||||
|
||||
def get_hierarchy(asset_name=None):
|
||||
"""
|
||||
Obtain asset hierarchy path string from mongo db
|
||||
|
||||
Args:
|
||||
asset_name (str)
|
||||
|
||||
Returns:
|
||||
(string): asset hierarchy path
|
||||
|
||||
"""
|
||||
if not asset_name:
|
||||
asset_name = io.Session.get("AVALON_ASSET", os.environ["AVALON_ASSET"])
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": 'asset',
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
not_set = "PARENTS_NOT_SET"
|
||||
entity_parents = asset_entity.get("data", {}).get("parents", not_set)
|
||||
|
||||
# If entity already have parents then just return joined
|
||||
if entity_parents != not_set:
|
||||
return "/".join(entity_parents)
|
||||
|
||||
# Else query parents through visualParents and store result to entity
|
||||
hierarchy_items = []
|
||||
entity = asset_entity
|
||||
while True:
|
||||
parent_id = entity.get("data", {}).get("visualParent")
|
||||
if not parent_id:
|
||||
break
|
||||
entity = io.find_one({"_id": parent_id})
|
||||
hierarchy_items.append(entity["name"])
|
||||
|
||||
# Add parents to entity data for next query
|
||||
entity_data = asset_entity.get("data", {})
|
||||
entity_data["parents"] = hierarchy_items
|
||||
io.update_many(
|
||||
{"_id": asset_entity["_id"]},
|
||||
{"$set": {"data": entity_data}}
|
||||
)
|
||||
|
||||
return "/".join(hierarchy_items)
|
||||
|
||||
|
||||
def get_linked_assets(asset_entity):
|
||||
"""Return linked assets for `asset_entity` from DB
|
||||
|
||||
Args:
|
||||
asset_entity (dict): asset document from DB
|
||||
|
||||
Returns:
|
||||
(list) of MongoDB documents
|
||||
"""
|
||||
inputs = asset_entity["data"].get("inputs", [])
|
||||
inputs = [io.find_one({"_id": x}) for x in inputs]
|
||||
return inputs
|
||||
|
||||
|
||||
def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
|
||||
"""Retrieve latest version from `asset_name`, and `subset_name`.
|
||||
|
||||
Do not use if you want to query more than 5 latest versions as this method
|
||||
query 3 times to mongo for each call. For those cases is better to use
|
||||
more efficient way, e.g. with help of aggregations.
|
||||
|
||||
Args:
|
||||
asset_name (str): Name of asset.
|
||||
subset_name (str): Name of subset.
|
||||
dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection
|
||||
with Session.
|
||||
project_name (str, optional): Find latest version in specific project.
|
||||
|
||||
Returns:
|
||||
None: If asset, subset or version were not found.
|
||||
dict: Last version document for entered .
|
||||
"""
|
||||
|
||||
if not dbcon:
|
||||
log.debug("Using `avalon.io` for query.")
|
||||
dbcon = io
|
||||
# Make sure is installed
|
||||
io.install()
|
||||
|
||||
if project_name and project_name != dbcon.Session.get("AVALON_PROJECT"):
|
||||
# `avalon.io` has only `_database` attribute
|
||||
# but `AvalonMongoDB` has `database`
|
||||
database = getattr(dbcon, "database", dbcon._database)
|
||||
collection = database[project_name]
|
||||
else:
|
||||
project_name = dbcon.Session.get("AVALON_PROJECT")
|
||||
collection = dbcon
|
||||
|
||||
log.debug((
|
||||
"Getting latest version for Project: \"{}\" Asset: \"{}\""
|
||||
" and Subset: \"{}\""
|
||||
).format(project_name, asset_name, subset_name))
|
||||
|
||||
# Query asset document id by asset name
|
||||
asset_doc = collection.find_one(
|
||||
{"type": "asset", "name": asset_name},
|
||||
{"_id": True}
|
||||
)
|
||||
if not asset_doc:
|
||||
log.info(
|
||||
"Asset \"{}\" was not found in Database.".format(asset_name)
|
||||
)
|
||||
return None
|
||||
|
||||
subset_doc = collection.find_one(
|
||||
{"type": "subset", "name": subset_name, "parent": asset_doc["_id"]},
|
||||
{"_id": True}
|
||||
)
|
||||
if not subset_doc:
|
||||
log.info(
|
||||
"Subset \"{}\" was not found in Database.".format(subset_name)
|
||||
)
|
||||
return None
|
||||
|
||||
version_doc = collection.find_one(
|
||||
{"type": "version", "parent": subset_doc["_id"]},
|
||||
sort=[("name", -1)],
|
||||
)
|
||||
if not version_doc:
|
||||
log.info(
|
||||
"Subset \"{}\" does not have any version yet.".format(subset_name)
|
||||
)
|
||||
return None
|
||||
return version_doc
|
||||
|
||||
|
||||
class BuildWorkfile:
|
||||
"""Wrapper for build workfile process.
|
||||
|
||||
Load representations for current context by build presets. Build presets
|
||||
are host related, since each host has it's loaders.
|
||||
"""
|
||||
|
||||
log = logging.getLogger("BuildWorkfile")
|
||||
|
||||
@staticmethod
|
||||
def map_subsets_by_family(subsets):
|
||||
subsets_by_family = collections.defaultdict(list)
|
||||
for subset in subsets:
|
||||
family = subset["data"].get("family")
|
||||
if not family:
|
||||
families = subset["data"].get("families")
|
||||
if not families:
|
||||
continue
|
||||
family = families[0]
|
||||
|
||||
subsets_by_family[family].append(subset)
|
||||
return subsets_by_family
|
||||
|
||||
def process(self):
|
||||
"""Main method of this wrapper.
|
||||
|
||||
Building of workfile is triggered and is possible to implement
|
||||
post processing of loaded containers if necessary.
|
||||
"""
|
||||
containers = self.build_workfile()
|
||||
|
||||
return containers
|
||||
|
||||
def build_workfile(self):
|
||||
"""Prepares and load containers into workfile.
|
||||
|
||||
Loads latest versions of current and linked assets to workfile by logic
|
||||
stored in Workfile profiles from presets. Profiles are set by host,
|
||||
filtered by current task name and used by families.
|
||||
|
||||
Each family can specify representation names and loaders for
|
||||
representations and first available and successful loaded
|
||||
representation is returned as container.
|
||||
|
||||
At the end you'll get list of loaded containers per each asset.
|
||||
|
||||
loaded_containers [{
|
||||
"asset_entity": <AssetEntity1>,
|
||||
"containers": [<Container1>, <Container2>, ...]
|
||||
}, {
|
||||
"asset_entity": <AssetEntity2>,
|
||||
"containers": [<Container3>, ...]
|
||||
}, {
|
||||
...
|
||||
}]
|
||||
"""
|
||||
# Get current asset name and entity
|
||||
current_asset_name = io.Session["AVALON_ASSET"]
|
||||
current_asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": current_asset_name
|
||||
})
|
||||
|
||||
# Skip if asset was not found
|
||||
if not current_asset_entity:
|
||||
print("Asset entity with name `{}` was not found".format(
|
||||
current_asset_name
|
||||
))
|
||||
return
|
||||
|
||||
# Prepare available loaders
|
||||
loaders_by_name = {}
|
||||
for loader in avalon.api.discover(avalon.api.Loader):
|
||||
loader_name = loader.__name__
|
||||
if loader_name in loaders_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated loader name {0}!".format(loader_name)
|
||||
)
|
||||
loaders_by_name[loader_name] = loader
|
||||
|
||||
# Skip if there are any loaders
|
||||
if not loaders_by_name:
|
||||
self.log.warning("There are no registered loaders.")
|
||||
return
|
||||
|
||||
# Get current task name
|
||||
current_task_name = io.Session["AVALON_TASK"]
|
||||
|
||||
# Load workfile presets for task
|
||||
self.build_presets = self.get_build_presets(current_task_name)
|
||||
|
||||
# Skip if there are any presets for task
|
||||
if not self.build_presets:
|
||||
self.log.warning(
|
||||
"Current task `{}` does not have any loading preset.".format(
|
||||
current_task_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Get presets for loading current asset
|
||||
current_context_profiles = self.build_presets.get("current_context")
|
||||
# Get presets for loading linked assets
|
||||
link_context_profiles = self.build_presets.get("linked_assets")
|
||||
# Skip if both are missing
|
||||
if not current_context_profiles and not link_context_profiles:
|
||||
self.log.warning(
|
||||
"Current task `{}` has empty loading preset.".format(
|
||||
current_task_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
elif not current_context_profiles:
|
||||
self.log.warning((
|
||||
"Current task `{}` doesn't have any loading"
|
||||
" preset for it's context."
|
||||
).format(current_task_name))
|
||||
|
||||
elif not link_context_profiles:
|
||||
self.log.warning((
|
||||
"Current task `{}` doesn't have any"
|
||||
"loading preset for it's linked assets."
|
||||
).format(current_task_name))
|
||||
|
||||
# Prepare assets to process by workfile presets
|
||||
assets = []
|
||||
current_asset_id = None
|
||||
if current_context_profiles:
|
||||
# Add current asset entity if preset has current context set
|
||||
assets.append(current_asset_entity)
|
||||
current_asset_id = current_asset_entity["_id"]
|
||||
|
||||
if link_context_profiles:
|
||||
# Find and append linked assets if preset has set linked mapping
|
||||
link_assets = get_linked_assets(current_asset_entity)
|
||||
if link_assets:
|
||||
assets.extend(link_assets)
|
||||
|
||||
# Skip if there are no assets. This can happen if only linked mapping
|
||||
# is set and there are no links for his asset.
|
||||
if not assets:
|
||||
self.log.warning(
|
||||
"Asset does not have linked assets. Nothing to process."
|
||||
)
|
||||
return
|
||||
|
||||
# Prepare entities from database for assets
|
||||
prepared_entities = self._collect_last_version_repres(assets)
|
||||
|
||||
# Load containers by prepared entities and presets
|
||||
loaded_containers = []
|
||||
# - Current asset containers
|
||||
if current_asset_id and current_asset_id in prepared_entities:
|
||||
current_context_data = prepared_entities.pop(current_asset_id)
|
||||
loaded_data = self.load_containers_by_asset_data(
|
||||
current_context_data, current_context_profiles, loaders_by_name
|
||||
)
|
||||
if loaded_data:
|
||||
loaded_containers.append(loaded_data)
|
||||
|
||||
# - Linked assets container
|
||||
for linked_asset_data in prepared_entities.values():
|
||||
loaded_data = self.load_containers_by_asset_data(
|
||||
linked_asset_data, link_context_profiles, loaders_by_name
|
||||
)
|
||||
if loaded_data:
|
||||
loaded_containers.append(loaded_data)
|
||||
|
||||
# Return list of loaded containers
|
||||
return loaded_containers
|
||||
|
||||
def get_build_presets(self, task_name):
|
||||
""" Returns presets to build workfile for task name.
|
||||
|
||||
Presets are loaded for current project set in
|
||||
io.Session["AVALON_PROJECT"], filtered by registered host
|
||||
and entered task name.
|
||||
|
||||
Args:
|
||||
task_name (str): Task name used for filtering build presets.
|
||||
|
||||
Returns:
|
||||
(dict): preset per entered task name
|
||||
"""
|
||||
host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1]
|
||||
presets = config.get_presets(io.Session["AVALON_PROJECT"])
|
||||
# Get presets for host
|
||||
build_presets = (
|
||||
presets["plugins"]
|
||||
.get(host_name, {})
|
||||
.get("workfile_build")
|
||||
)
|
||||
if not build_presets:
|
||||
return
|
||||
|
||||
task_name_low = task_name.lower()
|
||||
per_task_preset = None
|
||||
for preset in build_presets:
|
||||
preset_tasks = preset.get("tasks") or []
|
||||
preset_tasks_low = [task.lower() for task in preset_tasks]
|
||||
if task_name_low in preset_tasks_low:
|
||||
per_task_preset = preset
|
||||
break
|
||||
|
||||
return per_task_preset
|
||||
|
||||
def _filter_build_profiles(self, build_profiles, loaders_by_name):
|
||||
""" Filter build profiles by loaders and prepare process data.
|
||||
|
||||
Valid profile must have "loaders", "families" and "repre_names" keys
|
||||
with valid values.
|
||||
- "loaders" expects list of strings representing possible loaders.
|
||||
- "families" expects list of strings for filtering
|
||||
by main subset family.
|
||||
- "repre_names" expects list of strings for filtering by
|
||||
representation name.
|
||||
|
||||
Lowered "families" and "repre_names" are prepared for each profile with
|
||||
all required keys.
|
||||
|
||||
Args:
|
||||
build_profiles (dict): Profiles for building workfile.
|
||||
loaders_by_name (dict): Available loaders per name.
|
||||
|
||||
Returns:
|
||||
(list): Filtered and prepared profiles.
|
||||
"""
|
||||
valid_profiles = []
|
||||
for profile in build_profiles:
|
||||
# Check loaders
|
||||
profile_loaders = profile.get("loaders")
|
||||
if not profile_loaders:
|
||||
self.log.warning((
|
||||
"Build profile has missing loaders configuration: {0}"
|
||||
).format(json.dumps(profile, indent=4)))
|
||||
continue
|
||||
|
||||
# Check if any loader is available
|
||||
loaders_match = False
|
||||
for loader_name in profile_loaders:
|
||||
if loader_name in loaders_by_name:
|
||||
loaders_match = True
|
||||
break
|
||||
|
||||
if not loaders_match:
|
||||
self.log.warning((
|
||||
"All loaders from Build profile are not available: {0}"
|
||||
).format(json.dumps(profile, indent=4)))
|
||||
continue
|
||||
|
||||
# Check families
|
||||
profile_families = profile.get("families")
|
||||
if not profile_families:
|
||||
self.log.warning((
|
||||
"Build profile is missing families configuration: {0}"
|
||||
).format(json.dumps(profile, indent=4)))
|
||||
continue
|
||||
|
||||
# Check representation names
|
||||
profile_repre_names = profile.get("repre_names")
|
||||
if not profile_repre_names:
|
||||
self.log.warning((
|
||||
"Build profile is missing"
|
||||
" representation names filtering: {0}"
|
||||
).format(json.dumps(profile, indent=4)))
|
||||
continue
|
||||
|
||||
# Prepare lowered families and representation names
|
||||
profile["families_lowered"] = [
|
||||
fam.lower() for fam in profile_families
|
||||
]
|
||||
profile["repre_names_lowered"] = [
|
||||
name.lower() for name in profile_repre_names
|
||||
]
|
||||
|
||||
valid_profiles.append(profile)
|
||||
|
||||
return valid_profiles
|
||||
|
||||
def _prepare_profile_for_subsets(self, subsets, profiles):
|
||||
"""Select profile for each subset byt it's data.
|
||||
|
||||
Profiles are filtered for each subset individually.
|
||||
Profile is filtered by subset's family, optionally by name regex and
|
||||
representation names set in profile.
|
||||
It is possible to not find matching profile for subset, in that case
|
||||
subset is skipped and it is possible that none of subsets have
|
||||
matching profile.
|
||||
|
||||
Args:
|
||||
subsets (list): Subset documents.
|
||||
profiles (dict): Build profiles.
|
||||
|
||||
Returns:
|
||||
(dict) Profile by subset's id.
|
||||
"""
|
||||
# Prepare subsets
|
||||
subsets_by_family = self.map_subsets_by_family(subsets)
|
||||
|
||||
profiles_per_subset_id = {}
|
||||
for family, subsets in subsets_by_family.items():
|
||||
family_low = family.lower()
|
||||
for profile in profiles:
|
||||
# Skip profile if does not contain family
|
||||
if family_low not in profile["families_lowered"]:
|
||||
continue
|
||||
|
||||
# Precompile name filters as regexes
|
||||
profile_regexes = profile.get("subset_name_filters")
|
||||
if profile_regexes:
|
||||
_profile_regexes = []
|
||||
for regex in profile_regexes:
|
||||
_profile_regexes.append(re.compile(regex))
|
||||
profile_regexes = _profile_regexes
|
||||
|
||||
# TODO prepare regex compilation
|
||||
for subset in subsets:
|
||||
# Verify regex filtering (optional)
|
||||
if profile_regexes:
|
||||
valid = False
|
||||
for pattern in profile_regexes:
|
||||
if re.match(pattern, subset["name"]):
|
||||
valid = True
|
||||
break
|
||||
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
profiles_per_subset_id[subset["_id"]] = profile
|
||||
|
||||
# break profiles loop on finding the first matching profile
|
||||
break
|
||||
return profiles_per_subset_id
|
||||
|
||||
def load_containers_by_asset_data(
|
||||
self, asset_entity_data, build_profiles, loaders_by_name
|
||||
):
|
||||
"""Load containers for entered asset entity by Build profiles.
|
||||
|
||||
Args:
|
||||
asset_entity_data (dict): Prepared data with subsets, last version
|
||||
and representations for specific asset.
|
||||
build_profiles (dict): Build profiles.
|
||||
loaders_by_name (dict): Available loaders per name.
|
||||
|
||||
Returns:
|
||||
(dict) Output contains asset document and loaded containers.
|
||||
"""
|
||||
|
||||
# Make sure all data are not empty
|
||||
if not asset_entity_data or not build_profiles or not loaders_by_name:
|
||||
return
|
||||
|
||||
asset_entity = asset_entity_data["asset_entity"]
|
||||
|
||||
valid_profiles = self._filter_build_profiles(
|
||||
build_profiles, loaders_by_name
|
||||
)
|
||||
if not valid_profiles:
|
||||
self.log.warning(
|
||||
"There are not valid Workfile profiles. Skipping process."
|
||||
)
|
||||
return
|
||||
|
||||
self.log.debug("Valid Workfile profiles: {}".format(valid_profiles))
|
||||
|
||||
subsets_by_id = {}
|
||||
version_by_subset_id = {}
|
||||
repres_by_version_id = {}
|
||||
for subset_id, in_data in asset_entity_data["subsets"].items():
|
||||
subset_entity = in_data["subset_entity"]
|
||||
subsets_by_id[subset_entity["_id"]] = subset_entity
|
||||
|
||||
version_data = in_data["version"]
|
||||
version_entity = version_data["version_entity"]
|
||||
version_by_subset_id[subset_id] = version_entity
|
||||
repres_by_version_id[version_entity["_id"]] = (
|
||||
version_data["repres"]
|
||||
)
|
||||
|
||||
if not subsets_by_id:
|
||||
self.log.warning("There are not subsets for asset {0}".format(
|
||||
asset_entity["name"]
|
||||
))
|
||||
return
|
||||
|
||||
profiles_per_subset_id = self._prepare_profile_for_subsets(
|
||||
subsets_by_id.values(), valid_profiles
|
||||
)
|
||||
if not profiles_per_subset_id:
|
||||
self.log.warning("There are not valid subsets.")
|
||||
return
|
||||
|
||||
valid_repres_by_subset_id = collections.defaultdict(list)
|
||||
for subset_id, profile in profiles_per_subset_id.items():
|
||||
profile_repre_names = profile["repre_names_lowered"]
|
||||
|
||||
version_entity = version_by_subset_id[subset_id]
|
||||
version_id = version_entity["_id"]
|
||||
repres = repres_by_version_id[version_id]
|
||||
for repre in repres:
|
||||
repre_name_low = repre["name"].lower()
|
||||
if repre_name_low in profile_repre_names:
|
||||
valid_repres_by_subset_id[subset_id].append(repre)
|
||||
|
||||
# DEBUG message
|
||||
msg = "Valid representations for Asset: `{}`".format(
|
||||
asset_entity["name"]
|
||||
)
|
||||
for subset_id, repres in valid_repres_by_subset_id.items():
|
||||
subset = subsets_by_id[subset_id]
|
||||
msg += "\n# Subset Name/ID: `{}`/{}".format(
|
||||
subset["name"], subset_id
|
||||
)
|
||||
for repre in repres:
|
||||
msg += "\n## Repre name: `{}`".format(repre["name"])
|
||||
|
||||
self.log.debug(msg)
|
||||
|
||||
containers = self._load_containers(
|
||||
valid_repres_by_subset_id, subsets_by_id,
|
||||
profiles_per_subset_id, loaders_by_name
|
||||
)
|
||||
|
||||
return {
|
||||
"asset_entity": asset_entity,
|
||||
"containers": containers
|
||||
}
|
||||
|
||||
def _load_containers(
|
||||
self, repres_by_subset_id, subsets_by_id,
|
||||
profiles_per_subset_id, loaders_by_name
|
||||
):
|
||||
"""Real load by collected data happens here.
|
||||
|
||||
Loading of representations per subset happens here. Each subset can
|
||||
loads one representation. Loading is tried in specific order.
|
||||
Representations are tried to load by names defined in configuration.
|
||||
If subset has representation matching representation name each loader
|
||||
is tried to load it until any is successful. If none of them was
|
||||
successful then next reprensentation name is tried.
|
||||
Subset process loop ends when any representation is loaded or
|
||||
all matching representations were already tried.
|
||||
|
||||
Args:
|
||||
repres_by_subset_id (dict): Available representations mapped
|
||||
by their parent (subset) id.
|
||||
subsets_by_id (dict): Subset documents mapped by their id.
|
||||
profiles_per_subset_id (dict): Build profiles mapped by subset id.
|
||||
loaders_by_name (dict): Available loaders per name.
|
||||
|
||||
Returns:
|
||||
(list) Objects of loaded containers.
|
||||
"""
|
||||
loaded_containers = []
|
||||
|
||||
# Get subset id order from build presets.
|
||||
build_presets = self.build_presets.get("current_context", [])
|
||||
build_presets += self.build_presets.get("linked_assets", [])
|
||||
subset_ids_ordered = []
|
||||
for preset in build_presets:
|
||||
for preset_family in preset["families"]:
|
||||
for id, subset in subsets_by_id.items():
|
||||
if preset_family not in subset["data"].get("families", []):
|
||||
continue
|
||||
|
||||
subset_ids_ordered.append(id)
|
||||
|
||||
# Order representations from subsets.
|
||||
print("repres_by_subset_id", repres_by_subset_id)
|
||||
representations_ordered = []
|
||||
representations = []
|
||||
for id in subset_ids_ordered:
|
||||
for subset_id, repres in repres_by_subset_id.items():
|
||||
if repres in representations:
|
||||
continue
|
||||
|
||||
if id == subset_id:
|
||||
representations_ordered.append((subset_id, repres))
|
||||
representations.append(repres)
|
||||
|
||||
print("representations", representations)
|
||||
|
||||
# Load ordered reprensentations.
|
||||
for subset_id, repres in representations_ordered:
|
||||
subset_name = subsets_by_id[subset_id]["name"]
|
||||
|
||||
profile = profiles_per_subset_id[subset_id]
|
||||
loaders_last_idx = len(profile["loaders"]) - 1
|
||||
repre_names_last_idx = len(profile["repre_names_lowered"]) - 1
|
||||
|
||||
repre_by_low_name = {
|
||||
repre["name"].lower(): repre for repre in repres
|
||||
}
|
||||
|
||||
is_loaded = False
|
||||
for repre_name_idx, profile_repre_name in enumerate(
|
||||
profile["repre_names_lowered"]
|
||||
):
|
||||
# Break iteration if representation was already loaded
|
||||
if is_loaded:
|
||||
break
|
||||
|
||||
repre = repre_by_low_name.get(profile_repre_name)
|
||||
if not repre:
|
||||
continue
|
||||
|
||||
for loader_idx, loader_name in enumerate(profile["loaders"]):
|
||||
if is_loaded:
|
||||
break
|
||||
|
||||
loader = loaders_by_name.get(loader_name)
|
||||
if not loader:
|
||||
continue
|
||||
try:
|
||||
container = avalon.api.load(
|
||||
loader,
|
||||
repre["_id"],
|
||||
name=subset_name
|
||||
)
|
||||
loaded_containers.append(container)
|
||||
is_loaded = True
|
||||
|
||||
except Exception as exc:
|
||||
if exc == pipeline.IncompatibleLoaderError:
|
||||
self.log.info((
|
||||
"Loader `{}` is not compatible with"
|
||||
" representation `{}`"
|
||||
).format(loader_name, repre["name"]))
|
||||
|
||||
else:
|
||||
self.log.error(
|
||||
"Unexpected error happened during loading",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
msg = "Loading failed."
|
||||
if loader_idx < loaders_last_idx:
|
||||
msg += " Trying next loader."
|
||||
elif repre_name_idx < repre_names_last_idx:
|
||||
msg += (
|
||||
" Loading of subset `{}` was not successful."
|
||||
).format(subset_name)
|
||||
else:
|
||||
msg += " Trying next representation."
|
||||
self.log.info(msg)
|
||||
|
||||
return loaded_containers
|
||||
|
||||
def _collect_last_version_repres(self, asset_entities):
|
||||
"""Collect subsets, versions and representations for asset_entities.
|
||||
|
||||
Args:
|
||||
asset_entities (list): Asset entities for which want to find data
|
||||
|
||||
Returns:
|
||||
(dict): collected entities
|
||||
|
||||
Example output:
|
||||
```
|
||||
{
|
||||
{Asset ID}: {
|
||||
"asset_entity": <AssetEntity>,
|
||||
"subsets": {
|
||||
{Subset ID}: {
|
||||
"subset_entity": <SubsetEntity>,
|
||||
"version": {
|
||||
"version_entity": <VersionEntity>,
|
||||
"repres": [
|
||||
<RepreEntity1>, <RepreEntity2>, ...
|
||||
]
|
||||
}
|
||||
},
|
||||
...
|
||||
}
|
||||
},
|
||||
...
|
||||
}
|
||||
output[asset_id]["subsets"][subset_id]["version"]["repres"]
|
||||
```
|
||||
"""
|
||||
|
||||
if not asset_entities:
|
||||
return {}
|
||||
|
||||
asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities}
|
||||
|
||||
subsets = list(io.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": asset_entity_by_ids.keys()}
|
||||
}))
|
||||
subset_entity_by_ids = {subset["_id"]: subset for subset in subsets}
|
||||
|
||||
sorted_versions = list(io.find({
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_entity_by_ids.keys()}
|
||||
}).sort("name", -1))
|
||||
|
||||
subset_id_with_latest_version = []
|
||||
last_versions_by_id = {}
|
||||
for version in sorted_versions:
|
||||
subset_id = version["parent"]
|
||||
if subset_id in subset_id_with_latest_version:
|
||||
continue
|
||||
subset_id_with_latest_version.append(subset_id)
|
||||
last_versions_by_id[version["_id"]] = version
|
||||
|
||||
repres = io.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": last_versions_by_id.keys()}
|
||||
})
|
||||
|
||||
output = {}
|
||||
for repre in repres:
|
||||
version_id = repre["parent"]
|
||||
version = last_versions_by_id[version_id]
|
||||
|
||||
subset_id = version["parent"]
|
||||
subset = subset_entity_by_ids[subset_id]
|
||||
|
||||
asset_id = subset["parent"]
|
||||
asset = asset_entity_by_ids[asset_id]
|
||||
|
||||
if asset_id not in output:
|
||||
output[asset_id] = {
|
||||
"asset_entity": asset,
|
||||
"subsets": {}
|
||||
}
|
||||
|
||||
if subset_id not in output[asset_id]["subsets"]:
|
||||
output[asset_id]["subsets"][subset_id] = {
|
||||
"subset_entity": subset,
|
||||
"version": {
|
||||
"version_entity": version,
|
||||
"repres": []
|
||||
}
|
||||
}
|
||||
|
||||
output[asset_id]["subsets"][subset_id]["version"]["repres"].append(
|
||||
repre
|
||||
)
|
||||
|
||||
return output
|
||||
26
pype/lib/deprecated.py
Normal file
26
pype/lib/deprecated.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
|
||||
from avalon import io
|
||||
|
||||
|
||||
def get_avalon_database():
|
||||
"""Mongo database used in avalon's io.
|
||||
|
||||
* Function is not used in pype 3.0 where was replaced with usage of
|
||||
AvalonMongoDB.
|
||||
"""
|
||||
if io._database is None:
|
||||
set_io_database()
|
||||
return io._database
|
||||
|
||||
|
||||
def set_io_database():
|
||||
"""Set avalon's io context with environemnts.
|
||||
|
||||
* Function is not used in pype 3.0 where was replaced with usage of
|
||||
AvalonMongoDB.
|
||||
"""
|
||||
required_keys = ["AVALON_PROJECT", "AVALON_ASSET", "AVALON_SILO"]
|
||||
for key in required_keys:
|
||||
os.environ[key] = os.environ.get(key, "")
|
||||
io.install()
|
||||
46
pype/lib/ffmpeg_utils.py
Normal file
46
pype/lib/ffmpeg_utils.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import logging
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
from . import get_ffmpeg_tool_path
|
||||
|
||||
log = logging.getLogger("FFmpeg utils")
|
||||
|
||||
|
||||
def ffprobe_streams(path_to_file, logger=None):
|
||||
"""Load streams from entered filepath via ffprobe.
|
||||
|
||||
Args:
|
||||
path_to_file (str): absolute path
|
||||
logger (logging.getLogger): injected logger, if empty new is created
|
||||
|
||||
"""
|
||||
if not logger:
|
||||
logger = log
|
||||
logger.info(
|
||||
"Getting information about input \"{}\".".format(path_to_file)
|
||||
)
|
||||
args = [
|
||||
"\"{}\"".format(get_ffmpeg_tool_path("ffprobe")),
|
||||
"-v quiet",
|
||||
"-print_format json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
"\"{}\"".format(path_to_file)
|
||||
]
|
||||
command = " ".join(args)
|
||||
logger.debug("FFprobe command: \"{}\"".format(command))
|
||||
popen = subprocess.Popen(
|
||||
command,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
popen_stdout, popen_stderr = popen.communicate()
|
||||
if popen_stdout:
|
||||
logger.debug("ffprobe stdout: {}".format(popen_stdout))
|
||||
|
||||
if popen_stderr:
|
||||
logger.debug("ffprobe stderr: {}".format(popen_stderr))
|
||||
return json.loads(popen_stdout)["streams"]
|
||||
71
pype/lib/hooks.py
Normal file
71
pype/lib/hooks.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package containing code for handling hooks."""
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
import logging
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class PypeHook:
|
||||
"""Abstract class from all hooks should inherit."""
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def execute(self, *args, **kwargs):
|
||||
"""Abstract execute method."""
|
||||
pass
|
||||
|
||||
|
||||
def execute_hook(hook, *args, **kwargs):
|
||||
"""Execute hook with arguments.
|
||||
|
||||
This will load hook file, instantiate class and call
|
||||
:meth:`PypeHook.execute` method on it. Hook must be in a form::
|
||||
|
||||
$PYPE_SETUP_PATH/repos/pype/path/to/hook.py/HookClass
|
||||
|
||||
This will load `hook.py`, instantiate HookClass and then execute_hook
|
||||
`execute(*args, **kwargs)`
|
||||
|
||||
Args:
|
||||
hook (str): path to hook class.
|
||||
|
||||
"""
|
||||
class_name = hook.split("/")[-1]
|
||||
|
||||
abspath = os.path.join(os.getenv('PYPE_SETUP_PATH'),
|
||||
'repos', 'pype', *hook.split("/")[:-1])
|
||||
|
||||
mod_name, mod_ext = os.path.splitext(os.path.basename(abspath))
|
||||
|
||||
if not mod_ext == ".py":
|
||||
return False
|
||||
|
||||
module = types.ModuleType(mod_name)
|
||||
module.__file__ = abspath
|
||||
|
||||
try:
|
||||
with open(abspath) as f:
|
||||
six.exec_(f.read(), module.__dict__)
|
||||
|
||||
sys.modules[abspath] = module
|
||||
|
||||
except Exception as exp:
|
||||
log.exception("loading hook failed: {}".format(exp),
|
||||
exc_info=True)
|
||||
return False
|
||||
|
||||
obj = getattr(module, class_name)
|
||||
hook_obj = obj()
|
||||
ret_val = hook_obj.execute(*args, **kwargs)
|
||||
return ret_val
|
||||
181
pype/lib/path_tools.py
Normal file
181
pype/lib/path_tools.py
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_paths_from_environ(env_key, return_first=False):
|
||||
"""Return existing paths from specific envirnment variable.
|
||||
|
||||
Args:
|
||||
env_key (str): Environment key where should look for paths.
|
||||
|
||||
Returns:
|
||||
(bool): Return first path on `True`, list of all on `False`.
|
||||
|
||||
|
||||
Difference when none of paths exists:
|
||||
- when `return_first` is set to `False` then function returns empty list.
|
||||
- when `return_first` is set to `True` then function returns `None`.
|
||||
"""
|
||||
existing_paths = []
|
||||
paths = os.environ.get(env_key) or ""
|
||||
path_items = paths.split(os.pathsep)
|
||||
for path in path_items:
|
||||
# Skip empty string
|
||||
if not path:
|
||||
continue
|
||||
# Normalize path
|
||||
path = os.path.normpath(path)
|
||||
# Check if path exists
|
||||
if os.path.exists(path):
|
||||
# Return path if `return_first` is set to True
|
||||
if return_first:
|
||||
return path
|
||||
# Store path
|
||||
existing_paths.append(path)
|
||||
|
||||
# Return None if none of paths exists
|
||||
if return_first:
|
||||
return None
|
||||
# Return all existing paths from environment variable
|
||||
return existing_paths
|
||||
|
||||
|
||||
def get_ffmpeg_tool_path(tool="ffmpeg"):
|
||||
"""Find path to ffmpeg tool in FFMPEG_PATH paths.
|
||||
|
||||
Function looks for tool in paths set in FFMPEG_PATH environment. If tool
|
||||
exists then returns it's full path.
|
||||
|
||||
Args:
|
||||
tool (string): tool name
|
||||
|
||||
Returns:
|
||||
(str): tool name itself when tool path was not found. (FFmpeg path
|
||||
may be set in PATH environment variable)
|
||||
"""
|
||||
dir_paths = get_paths_from_environ("FFMPEG_PATH")
|
||||
for dir_path in dir_paths:
|
||||
for file_name in os.listdir(dir_path):
|
||||
base, _ext = os.path.splitext(file_name)
|
||||
if base.lower() == tool.lower():
|
||||
return os.path.join(dir_path, tool)
|
||||
return tool
|
||||
|
||||
|
||||
def _rreplace(s, a, b, n=1):
|
||||
"""Replace a with b in string s from right side n times."""
|
||||
return b.join(s.rsplit(a, n))
|
||||
|
||||
|
||||
def version_up(filepath):
|
||||
"""Version up filepath to a new non-existing version.
|
||||
|
||||
Parses for a version identifier like `_v001` or `.v001`
|
||||
When no version present _v001 is appended as suffix.
|
||||
|
||||
Args:
|
||||
filepath (str): full url
|
||||
|
||||
Returns:
|
||||
(str): filepath with increased version number
|
||||
|
||||
"""
|
||||
dirname = os.path.dirname(filepath)
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
regex = r"[._]v\d+"
|
||||
matches = re.findall(regex, str(basename), re.IGNORECASE)
|
||||
if not matches:
|
||||
log.info("Creating version...")
|
||||
new_label = "_v{version:03d}".format(version=1)
|
||||
new_basename = "{}{}".format(basename, new_label)
|
||||
else:
|
||||
label = matches[-1]
|
||||
version = re.search(r"\d+", label).group()
|
||||
padding = len(version)
|
||||
|
||||
new_version = int(version) + 1
|
||||
new_version = '{version:0{padding}d}'.format(version=new_version,
|
||||
padding=padding)
|
||||
new_label = label.replace(version, new_version, 1)
|
||||
new_basename = _rreplace(basename, label, new_label)
|
||||
|
||||
if not new_basename.endswith(new_label):
|
||||
index = (new_basename.find(new_label))
|
||||
index += len(new_label)
|
||||
new_basename = new_basename[:index]
|
||||
|
||||
new_filename = "{}{}".format(new_basename, ext)
|
||||
new_filename = os.path.join(dirname, new_filename)
|
||||
new_filename = os.path.normpath(new_filename)
|
||||
|
||||
if new_filename == filepath:
|
||||
raise RuntimeError("Created path is the same as current file,"
|
||||
"this is a bug")
|
||||
|
||||
for file in os.listdir(dirname):
|
||||
if file.endswith(ext) and file.startswith(new_basename):
|
||||
log.info("Skipping existing version %s" % new_label)
|
||||
return version_up(new_filename)
|
||||
|
||||
log.info("New version %s" % new_label)
|
||||
return new_filename
|
||||
|
||||
|
||||
def get_version_from_path(file):
|
||||
"""Find version number in file path string.
|
||||
|
||||
Args:
|
||||
file (string): file path
|
||||
|
||||
Returns:
|
||||
v: version number in string ('001')
|
||||
|
||||
"""
|
||||
pattern = re.compile(r"[\._]v([0-9]+)", re.IGNORECASE)
|
||||
try:
|
||||
return pattern.findall(file)[0]
|
||||
except IndexError:
|
||||
log.error(
|
||||
"templates:get_version_from_workfile:"
|
||||
"`{}` missing version string."
|
||||
"Example `v004`".format(file)
|
||||
)
|
||||
|
||||
|
||||
def get_last_version_from_path(path_dir, filter):
|
||||
"""Find last version of given directory content.
|
||||
|
||||
Args:
|
||||
path_dir (string): directory path
|
||||
filter (list): list of strings used as file name filter
|
||||
|
||||
Returns:
|
||||
string: file name with last version
|
||||
|
||||
Example:
|
||||
last_version_file = get_last_version_from_path(
|
||||
"/project/shots/shot01/work", ["shot01", "compositing", "nk"])
|
||||
"""
|
||||
assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory"
|
||||
assert isinstance(filter, list) and (
|
||||
len(filter) != 0), "`filter` argument needs to be list and not empty"
|
||||
|
||||
filtred_files = list()
|
||||
|
||||
# form regex for filtering
|
||||
patern = r".*".join(filter)
|
||||
|
||||
for file in os.listdir(path_dir):
|
||||
if not re.findall(patern, file):
|
||||
continue
|
||||
filtred_files.append(file)
|
||||
|
||||
if filtred_files:
|
||||
sorted(filtred_files)
|
||||
return filtred_files[-1]
|
||||
|
||||
return None
|
||||
80
pype/lib/plugin_tools.py
Normal file
80
pype/lib/plugin_tools.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Avalon/Pyblish plugin tools."""
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
|
||||
from ..api import config
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def filter_pyblish_plugins(plugins):
|
||||
"""Filter pyblish plugins by presets.
|
||||
|
||||
This servers as plugin filter / modifier for pyblish. It will load plugin
|
||||
definitions from presets and filter those needed to be excluded.
|
||||
|
||||
Args:
|
||||
plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base`
|
||||
`discover()` method.
|
||||
|
||||
"""
|
||||
from pyblish import api
|
||||
|
||||
host = api.current_host()
|
||||
|
||||
presets = config.get_presets().get('plugins', {})
|
||||
|
||||
# iterate over plugins
|
||||
for plugin in plugins[:]:
|
||||
# skip if there are no presets to process
|
||||
if not presets:
|
||||
continue
|
||||
|
||||
file = os.path.normpath(inspect.getsourcefile(plugin))
|
||||
file = os.path.normpath(file)
|
||||
|
||||
# host determined from path
|
||||
host_from_file = file.split(os.path.sep)[-3:-2][0]
|
||||
plugin_kind = file.split(os.path.sep)[-2:-1][0]
|
||||
|
||||
try:
|
||||
config_data = presets[host]["publish"][plugin.__name__]
|
||||
except KeyError:
|
||||
try:
|
||||
config_data = presets[host_from_file][plugin_kind][plugin.__name__] # noqa: E501
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
for option, value in config_data.items():
|
||||
if option == "enabled" and value is False:
|
||||
log.info('removing plugin {}'.format(plugin.__name__))
|
||||
plugins.remove(plugin)
|
||||
else:
|
||||
log.info('setting {}:{} on plugin {}'.format(
|
||||
option, value, plugin.__name__))
|
||||
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
def source_hash(filepath, *args):
|
||||
"""Generate simple identifier for a source file.
|
||||
This is used to identify whether a source file has previously been
|
||||
processe into the pipeline, e.g. a texture.
|
||||
The hash is based on source filepath, modification time and file size.
|
||||
This is only used to identify whether a specific source file was already
|
||||
published before from the same location with the same modification date.
|
||||
We opt to do it this way as opposed to Avalanch C4 hash as this is much
|
||||
faster and predictable enough for all our production use cases.
|
||||
Args:
|
||||
filepath (str): The source file path.
|
||||
You can specify additional arguments in the function
|
||||
to allow for specific 'processing' values to be included.
|
||||
"""
|
||||
# We replace dots with comma because . cannot be a key in a pymongo dict.
|
||||
file_name = os.path.basename(filepath)
|
||||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
64
pype/modules/websocket_server/hosts/aftereffects.py
Normal file
64
pype/modules/websocket_server/hosts/aftereffects.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
from pype.api import Logger
|
||||
from wsrpc_aiohttp import WebSocketRoute
|
||||
import functools
|
||||
|
||||
import avalon.aftereffects as aftereffects
|
||||
|
||||
log = Logger().get_logger("WebsocketServer")
|
||||
|
||||
|
||||
class AfterEffects(WebSocketRoute):
|
||||
"""
|
||||
One route, mimicking external application (like Harmony, etc).
|
||||
All functions could be called from client.
|
||||
'do_notify' function calls function on the client - mimicking
|
||||
notification after long running job on the server or similar
|
||||
"""
|
||||
instance = None
|
||||
|
||||
def init(self, **kwargs):
|
||||
# Python __init__ must be return "self".
|
||||
# This method might return anything.
|
||||
log.debug("someone called AfterEffects route")
|
||||
self.instance = self
|
||||
return kwargs
|
||||
|
||||
# server functions
|
||||
async def ping(self):
|
||||
log.debug("someone called AfterEffects route ping")
|
||||
|
||||
# This method calls function on the client side
|
||||
# client functions
|
||||
|
||||
async def read(self):
|
||||
log.debug("aftereffects.read client calls server server calls "
|
||||
"aftereffects client")
|
||||
return await self.socket.call('aftereffects.read')
|
||||
|
||||
# panel routes for tools
|
||||
async def creator_route(self):
|
||||
self._tool_route("creator")
|
||||
|
||||
async def workfiles_route(self):
|
||||
self._tool_route("workfiles")
|
||||
|
||||
async def loader_route(self):
|
||||
self._tool_route("loader")
|
||||
|
||||
async def publish_route(self):
|
||||
self._tool_route("publish")
|
||||
|
||||
async def sceneinventory_route(self):
|
||||
self._tool_route("sceneinventory")
|
||||
|
||||
async def projectmanager_route(self):
|
||||
self._tool_route("projectmanager")
|
||||
|
||||
def _tool_route(self, tool_name):
|
||||
"""The address accessed when clicking on the buttons."""
|
||||
partial_method = functools.partial(aftereffects.show, tool_name)
|
||||
|
||||
aftereffects.execute_in_main_thread(partial_method)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
284
pype/modules/websocket_server/stubs/aftereffects_server_stub.py
Normal file
284
pype/modules/websocket_server/stubs/aftereffects_server_stub.py
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
from pype.modules.websocket_server import WebSocketServer
|
||||
"""
|
||||
Stub handling connection from server to client.
|
||||
Used anywhere solution is calling client methods.
|
||||
"""
|
||||
import json
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AfterEffectsServerStub():
|
||||
"""
|
||||
Stub for calling function on client (Photoshop js) side.
|
||||
Expects that client is already connected (started when avalon menu
|
||||
is opened).
|
||||
'self.websocketserver.call' is used as async wrapper
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.websocketserver = WebSocketServer.get_instance()
|
||||
self.client = self.websocketserver.get_client()
|
||||
|
||||
def open(self, path):
|
||||
"""
|
||||
Open file located at 'path' (local).
|
||||
Args:
|
||||
path(string): file path locally
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.open', path=path)
|
||||
)
|
||||
|
||||
def read(self, layer, layers_meta=None):
|
||||
"""
|
||||
Parses layer metadata from Label field of active document
|
||||
Args:
|
||||
layer: <namedTuple Layer("id":XX, "name":"YYY")
|
||||
layers_meta: full list from Headline (for performance in loops)
|
||||
Returns:
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_metadata()
|
||||
|
||||
return layers_meta.get(str(layer.id))
|
||||
|
||||
def get_metadata(self):
|
||||
"""
|
||||
Get stored JSON with metadata from AE.Metadata.Label field
|
||||
Returns:
|
||||
(dict)
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_metadata')
|
||||
)
|
||||
try:
|
||||
layers_data = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Unparsable metadata {}".format(res))
|
||||
return layers_data or {}
|
||||
|
||||
def imprint(self, layer, data, all_layers=None, layers_meta=None):
|
||||
"""
|
||||
Save layer metadata to Label field of metadata of active document
|
||||
Args:
|
||||
layer (namedtuple): Layer("id": XXX, "name":'YYY')
|
||||
data(string): json representation for single layer
|
||||
all_layers (list of namedtuples): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
triggered
|
||||
layers_meta(string): json representation from Headline
|
||||
(for performance - provide only if imprint is in
|
||||
loop - value should be same)
|
||||
Returns: None
|
||||
"""
|
||||
if not layers_meta:
|
||||
layers_meta = self.get_metadata()
|
||||
|
||||
# json.dumps writes integer values in a dictionary to string, so
|
||||
# anticipating it here.
|
||||
if str(layer.id) in layers_meta and layers_meta[str(layer.id)]:
|
||||
if data:
|
||||
layers_meta[str(layer.id)].update(data)
|
||||
else:
|
||||
layers_meta.pop(str(layer.id))
|
||||
else:
|
||||
layers_meta[str(layer.id)] = data
|
||||
# Ensure only valid ids are stored.
|
||||
if not all_layers:
|
||||
# loaders create FootageItem now
|
||||
all_layers = self.get_items(comps=True,
|
||||
folders=False,
|
||||
footages=True)
|
||||
item_ids = [int(item.id) for item in all_layers]
|
||||
cleaned_data = {}
|
||||
for id in layers_meta:
|
||||
if int(id) in item_ids:
|
||||
cleaned_data[id] = layers_meta[id]
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.imprint', payload=payload)
|
||||
)
|
||||
|
||||
def get_active_document_full_name(self):
|
||||
"""
|
||||
Returns just a name of active document via ws call
|
||||
Returns(string): file name
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_active_document_full_name'))
|
||||
|
||||
return res
|
||||
|
||||
def get_active_document_name(self):
|
||||
"""
|
||||
Returns just a name of active document via ws call
|
||||
Returns(string): file name
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.get_active_document_name'))
|
||||
|
||||
return res
|
||||
|
||||
def get_items(self, comps, folders=False, footages=False):
|
||||
"""
|
||||
Get all items from Project panel according to arguments.
|
||||
There are multiple different types:
|
||||
CompItem (could have multiple layers - source for Creator)
|
||||
FolderItem (collection type, currently not used
|
||||
FootageItem (imported file - created by Loader)
|
||||
Args:
|
||||
comps (bool): return CompItems
|
||||
folders (bool): return FolderItem
|
||||
footages (bool: return FootageItem
|
||||
|
||||
Returns:
|
||||
(list) of namedtuples
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_items',
|
||||
comps=comps,
|
||||
folders=folders,
|
||||
footages=footages)
|
||||
)
|
||||
return self._to_records(res)
|
||||
|
||||
def get_selected_items(self, comps, folders=False, footages=False):
|
||||
"""
|
||||
Same as get_items but using selected items only
|
||||
Args:
|
||||
comps (bool): return CompItems
|
||||
folders (bool): return FolderItem
|
||||
footages (bool: return FootageItem
|
||||
|
||||
Returns:
|
||||
(list) of namedtuples
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.get_selected_items',
|
||||
comps=comps,
|
||||
folders=folders,
|
||||
footages=footages)
|
||||
)
|
||||
return self._to_records(res)
|
||||
|
||||
def import_file(self, path, item_name, import_options=None):
|
||||
"""
|
||||
Imports file as a FootageItem. Used in Loader
|
||||
Args:
|
||||
path (string): absolute path for asset file
|
||||
item_name (string): label for created FootageItem
|
||||
import_options (dict): different files (img vs psd) need different
|
||||
config
|
||||
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call(
|
||||
'AfterEffects.import_file',
|
||||
path=path,
|
||||
item_name=item_name,
|
||||
import_options=import_options)
|
||||
)
|
||||
records = self._to_records(res)
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
log.debug("Couldn't import {} file".format(path))
|
||||
|
||||
def replace_item(self, item, path, item_name):
|
||||
""" Replace FootageItem with new file
|
||||
|
||||
Args:
|
||||
item (dict):
|
||||
path (string):absolute path
|
||||
item_name (string): label on item in Project list
|
||||
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.replace_item',
|
||||
item_id=item.id,
|
||||
path=path, item_name=item_name))
|
||||
|
||||
def delete_item(self, item):
|
||||
""" Deletes FootageItem with new file
|
||||
Args:
|
||||
item (dict):
|
||||
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.delete_item',
|
||||
item_id=item.id
|
||||
))
|
||||
|
||||
def is_saved(self):
|
||||
# TODO
|
||||
return True
|
||||
|
||||
def set_label_color(self, item_id, color_idx):
|
||||
"""
|
||||
Used for highlight additional information in Project panel.
|
||||
Green color is loaded asset, blue is created asset
|
||||
Args:
|
||||
item_id (int):
|
||||
color_idx (int): 0-16 Label colors from AE Project view
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.set_label_color',
|
||||
item_id=item_id,
|
||||
color_idx=color_idx
|
||||
))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Saves active document
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.save'))
|
||||
|
||||
def saveAs(self, project_path, as_copy):
|
||||
"""
|
||||
Saves active project to aep (copy) or png or jpg
|
||||
Args:
|
||||
project_path(string): full local path
|
||||
as_copy: <boolean>
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.saveAs',
|
||||
image_path=project_path,
|
||||
as_copy=as_copy))
|
||||
|
||||
def close(self):
|
||||
self.client.close()
|
||||
|
||||
def _to_records(self, res):
|
||||
"""
|
||||
Converts string json representation into list of named tuples for
|
||||
dot notation access to work.
|
||||
Returns: <list of named tuples>
|
||||
res(string): - json representation
|
||||
"""
|
||||
if not res:
|
||||
return []
|
||||
|
||||
try:
|
||||
layers_data = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Received broken JSON {}".format(res))
|
||||
if not layers_data:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
# convert to namedtuple to use dot donation
|
||||
if isinstance(layers_data, dict): # TODO refactore
|
||||
layers_data = [layers_data]
|
||||
for d in layers_data:
|
||||
ret.append(namedtuple('Layer', d.keys())(*d.values()))
|
||||
return ret
|
||||
52
pype/plugins/aftereffects/create/create_render.py
Normal file
52
pype/plugins/aftereffects/create/create_render.py
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
from avalon import api
|
||||
from avalon.vendor import Qt
|
||||
from avalon import aftereffects
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CreateRender(api.Creator):
|
||||
"""Render folder for publish."""
|
||||
|
||||
name = "renderDefault"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
|
||||
def process(self):
|
||||
# Photoshop can have multiple LayerSets with the same name, which does
|
||||
# not work with Avalon.
|
||||
txt = "Instance with name \"{}\" already exists.".format(self.name)
|
||||
stub = aftereffects.stub() # only after After Effects is up
|
||||
for layer in stub.get_items(comps=True,
|
||||
folders=False,
|
||||
footages=False):
|
||||
if self.name.lower() == layer.name.lower():
|
||||
msg = Qt.QtWidgets.QMessageBox()
|
||||
msg.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg.setText(txt)
|
||||
msg.exec_()
|
||||
return False
|
||||
log.debug("options:: {}".format(self.options))
|
||||
print("options:: {}".format(self.options))
|
||||
if (self.options or {}).get("useSelection"):
|
||||
log.debug("useSelection")
|
||||
print("useSelection")
|
||||
items = stub.get_selected_items(comps=True,
|
||||
folders=False,
|
||||
footages=False)
|
||||
else:
|
||||
items = stub.get_items(comps=True,
|
||||
folders=False,
|
||||
footages=False)
|
||||
log.debug("items:: {}".format(items))
|
||||
print("items:: {}".format(items))
|
||||
if not items:
|
||||
raise ValueError("Nothing to create. Select composition " +
|
||||
"if 'useSelection' or create at least " +
|
||||
"one composition.")
|
||||
|
||||
for item in items:
|
||||
stub.imprint(item, self.data)
|
||||
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
|
||||
105
pype/plugins/aftereffects/load/load_file.py
Normal file
105
pype/plugins/aftereffects/load/load_file.py
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
from avalon import api, aftereffects
|
||||
from pype.plugins import lib
|
||||
import re
|
||||
|
||||
stub = aftereffects.stub()
|
||||
|
||||
|
||||
class FileLoader(api.Loader):
|
||||
"""Load images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
label = "Load file"
|
||||
|
||||
families = ["image",
|
||||
"plate",
|
||||
"render",
|
||||
"prerender",
|
||||
"review",
|
||||
"audio"]
|
||||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
comp_name = lib.get_unique_layer_name(stub.get_items(comps=True),
|
||||
context["asset"]["name"],
|
||||
name)
|
||||
|
||||
import_options = {}
|
||||
|
||||
file = self.fname
|
||||
|
||||
repr_cont = context["representation"]["context"]
|
||||
if "#" not in file:
|
||||
frame = repr_cont.get("frame")
|
||||
if frame:
|
||||
padding = len(frame)
|
||||
file = file.replace(frame, "#" * padding)
|
||||
import_options['sequence'] = True
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
if '.psd' in file:
|
||||
import_options['ImportAsType'] = 'ImportAsType.COMP'
|
||||
|
||||
comp = stub.import_file(self.fname, comp_name, import_options)
|
||||
|
||||
if not comp:
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(file))
|
||||
self.log.warning("Check host app for alert error.")
|
||||
return
|
||||
|
||||
self[:] = [comp]
|
||||
namespace = namespace or comp_name
|
||||
|
||||
return aftereffects.containerise(
|
||||
name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
""" Switch asset or change version """
|
||||
layer = container.pop("layer")
|
||||
|
||||
context = representation.get("context", {})
|
||||
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
layer_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layer_name = lib.get_unique_layer_name(stub.get_items(comps=True),
|
||||
context["asset"],
|
||||
context["subset"])
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
path = api.get_representation_path(representation)
|
||||
# with aftereffects.maintained_selection(): # TODO
|
||||
stub.replace_item(layer, path, layer_name)
|
||||
stub.imprint(
|
||||
layer, {"representation": str(representation["_id"]),
|
||||
"name": context["subset"],
|
||||
"namespace": layer_name}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Removes element from scene: deletes layer + removes from Headline
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
import collections
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
import pype.api as pype
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
|
|
@ -12,12 +14,13 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
self.log.info('Collecting Audio Data')
|
||||
asset_entity = context.data["assetEntity"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
|
||||
# get all available representations
|
||||
subsets = pype.get_subsets(asset_entity["name"],
|
||||
representations=["audio", "wav"]
|
||||
)
|
||||
subsets = self.get_subsets(
|
||||
asset_doc,
|
||||
representations=["audio", "wav"]
|
||||
)
|
||||
self.log.info(f"subsets is: {pformat(subsets)}")
|
||||
|
||||
if not subsets.get("audioMain"):
|
||||
|
|
@ -39,3 +42,85 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
|||
'audio_file: {}, has been added to context'.format(audio_file))
|
||||
else:
|
||||
self.log.warning("Couldn't find any audio file on Ftrack.")
|
||||
|
||||
def get_subsets(self, asset_doc, representations):
|
||||
"""
|
||||
Query subsets with filter on name.
|
||||
|
||||
The method will return all found subsets and its defined version
|
||||
and subsets. Version could be specified with number. Representation
|
||||
can be filtered.
|
||||
|
||||
Arguments:
|
||||
asset_doct (dict): Asset (shot) mongo document
|
||||
representations (list): list for all representations
|
||||
|
||||
Returns:
|
||||
dict: subsets with version and representaions in keys
|
||||
"""
|
||||
|
||||
# Query all subsets for asset
|
||||
subset_docs = io.find({
|
||||
"type": "subset",
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
# Collect all subset ids
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
# Check if we found anything
|
||||
assert subset_ids, (
|
||||
"No subsets found. Check correct filter. "
|
||||
"Try this for start `r'.*'`: asset: `{}`"
|
||||
).format(asset_doc["name"])
|
||||
|
||||
# Last version aggregation
|
||||
pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_ids}
|
||||
}},
|
||||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"},
|
||||
"name": {"$last": "$name"}
|
||||
}}
|
||||
]
|
||||
last_versions_by_subset_id = dict()
|
||||
for doc in io.aggregate(pipeline):
|
||||
doc["parent"] = doc["_id"]
|
||||
doc["_id"] = doc.pop("_version_id")
|
||||
last_versions_by_subset_id[doc["parent"]] = doc
|
||||
|
||||
version_docs_by_id = {}
|
||||
for version_doc in last_versions_by_subset_id.values():
|
||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
||||
|
||||
repre_docs = io.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": list(version_docs_by_id.keys())},
|
||||
"name": {"$in": representations}
|
||||
})
|
||||
repre_docs_by_version_id = collections.defaultdict(list)
|
||||
for repre_doc in repre_docs:
|
||||
version_id = repre_doc["parent"]
|
||||
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||
|
||||
output_dict = {}
|
||||
for version_id, repre_docs in repre_docs_by_version_id.items():
|
||||
version_doc = version_docs_by_id[version_id]
|
||||
subset_id = version_doc["parent"]
|
||||
subset_doc = last_versions_by_subset_id[subset_id]
|
||||
# Store queried docs by subset name
|
||||
output_dict[subset_doc["name"]] = {
|
||||
"representations": repre_docs,
|
||||
"version": version_doc
|
||||
}
|
||||
|
||||
return output_dict
|
||||
|
|
|
|||
|
|
@ -19,12 +19,16 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
if "unreal" in pyblish.api.registered_hosts():
|
||||
return
|
||||
|
||||
assert context.data.get('currentFile'), "Cannot get curren file"
|
||||
filename = os.path.basename(context.data.get('currentFile'))
|
||||
|
||||
if '<shell>' in filename:
|
||||
return
|
||||
|
||||
rootVersion = int(pype.get_version_from_path(filename))
|
||||
version = pype.get_version_from_path(filename)
|
||||
assert version, "Cannot determine version"
|
||||
|
||||
rootVersion = int(version)
|
||||
context.data['version'] = rootVersion
|
||||
self.log.info("{}".format(type(rootVersion)))
|
||||
self.log.info('Scene Version: %s' % context.data.get('version'))
|
||||
|
|
|
|||
|
|
@ -157,6 +157,11 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
filled_anatomy = anatomy.format_all(burnin_data)
|
||||
burnin_data["anatomy"] = filled_anatomy.get_solved()
|
||||
|
||||
# Add source camera name to burnin data
|
||||
camera_name = repre.get("camera_name")
|
||||
if camera_name:
|
||||
burnin_data["camera_name"] = camera_name
|
||||
|
||||
first_output = True
|
||||
|
||||
files_to_delete = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Create render node."""
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
|
|
@ -10,17 +12,15 @@ class CreateRender(harmony.Creator):
|
|||
node_type = "WRITE"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Constructor."""
|
||||
super(CreateRender, self).__init__(*args, **kwargs)
|
||||
|
||||
def setup_node(self, node):
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
node.setTextAttr(args[0], "DRAWING_TYPE", 1, "PNG4");
|
||||
node.setTextAttr(args[0], "DRAWING_NAME", 1, args[1]);
|
||||
node.setTextAttr(args[0], "MOVIE_PATH", 1, args[1]);
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
"""Set render node."""
|
||||
self_name = self.__class__.__name__
|
||||
path = "{0}/{0}".format(node.split("/")[-1])
|
||||
harmony.send({"function": func, "args": [node, path]})
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Creators.{self_name}.create",
|
||||
"args": [node, path]
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,277 +1,81 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for image sequences."""
|
||||
import os
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
import clique
|
||||
|
||||
from avalon import api, harmony
|
||||
import pype.lib
|
||||
|
||||
copy_files = """function copyFile(srcFilename, dstFilename)
|
||||
{
|
||||
var srcFile = new PermanentFile(srcFilename);
|
||||
var dstFile = new PermanentFile(dstFilename);
|
||||
srcFile.copy(dstFile);
|
||||
}
|
||||
"""
|
||||
|
||||
import_files = """var PNGTransparencyMode = 1; //Premultiplied wih Black
|
||||
var TGATransparencyMode = 0; //Premultiplied wih Black
|
||||
var SGITransparencyMode = 0; //Premultiplied wih Black
|
||||
var LayeredPSDTransparencyMode = 1; //Straight
|
||||
var FlatPSDTransparencyMode = 2; //Premultiplied wih White
|
||||
|
||||
function getUniqueColumnName( column_prefix )
|
||||
{
|
||||
var suffix = 0;
|
||||
// finds if unique name for a column
|
||||
var column_name = column_prefix;
|
||||
while(suffix < 2000)
|
||||
{
|
||||
if(!column.type(column_name))
|
||||
break;
|
||||
|
||||
suffix = suffix + 1;
|
||||
column_name = column_prefix + "_" + suffix;
|
||||
}
|
||||
return column_name;
|
||||
}
|
||||
|
||||
function import_files(args)
|
||||
{
|
||||
var root = args[0];
|
||||
var files = args[1];
|
||||
var name = args[2];
|
||||
var start_frame = args[3];
|
||||
|
||||
var vectorFormat = null;
|
||||
var extension = null;
|
||||
var filename = files[0];
|
||||
|
||||
var pos = filename.lastIndexOf(".");
|
||||
if( pos < 0 )
|
||||
return null;
|
||||
|
||||
extension = filename.substr(pos+1).toLowerCase();
|
||||
|
||||
if(extension == "jpeg")
|
||||
extension = "jpg";
|
||||
if(extension == "tvg")
|
||||
{
|
||||
vectorFormat = "TVG"
|
||||
extension ="SCAN"; // element.add() will use this.
|
||||
}
|
||||
|
||||
var elemId = element.add(
|
||||
name,
|
||||
"BW",
|
||||
scene.numberOfUnitsZ(),
|
||||
extension.toUpperCase(),
|
||||
vectorFormat
|
||||
);
|
||||
if (elemId == -1)
|
||||
{
|
||||
// hum, unknown file type most likely -- let's skip it.
|
||||
return null; // no read to add.
|
||||
}
|
||||
|
||||
var uniqueColumnName = getUniqueColumnName(name);
|
||||
column.add(uniqueColumnName , "DRAWING");
|
||||
column.setElementIdOfDrawing(uniqueColumnName, elemId);
|
||||
|
||||
var read = node.add(root, name, "READ", 0, 0, 0);
|
||||
var transparencyAttr = node.getAttr(
|
||||
read, frame.current(), "READ_TRANSPARENCY"
|
||||
);
|
||||
var opacityAttr = node.getAttr(read, frame.current(), "OPACITY");
|
||||
transparencyAttr.setValue(true);
|
||||
opacityAttr.setValue(true);
|
||||
|
||||
var alignmentAttr = node.getAttr(read, frame.current(), "ALIGNMENT_RULE");
|
||||
alignmentAttr.setValue("ASIS");
|
||||
|
||||
var transparencyModeAttr = node.getAttr(
|
||||
read, frame.current(), "applyMatteToColor"
|
||||
);
|
||||
if (extension == "png")
|
||||
transparencyModeAttr.setValue(PNGTransparencyMode);
|
||||
if (extension == "tga")
|
||||
transparencyModeAttr.setValue(TGATransparencyMode);
|
||||
if (extension == "sgi")
|
||||
transparencyModeAttr.setValue(SGITransparencyMode);
|
||||
if (extension == "psd")
|
||||
transparencyModeAttr.setValue(FlatPSDTransparencyMode);
|
||||
if (extension == "jpg")
|
||||
transparencyModeAttr.setValue(LayeredPSDTransparencyMode);
|
||||
|
||||
node.linkAttr(read, "DRAWING.ELEMENT", uniqueColumnName);
|
||||
|
||||
if (files.length == 1)
|
||||
{
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, 1, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(elemId, "1");
|
||||
copyFile(files[0], drawingFilePath);
|
||||
// Expose the image for the entire frame range.
|
||||
for( var i =0; i <= frame.numberOf() - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
column.setEntry(uniqueColumnName, 1, timing, "1");
|
||||
}
|
||||
} else {
|
||||
// Create a drawing for each file.
|
||||
for( var i =0; i <= files.length - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(elemId, timing.toString());
|
||||
copyFile( files[i], drawingFilePath );
|
||||
|
||||
column.setEntry(uniqueColumnName, 1, timing, timing.toString());
|
||||
}
|
||||
}
|
||||
|
||||
var green_color = new ColorRGBA(0, 255, 0, 255);
|
||||
node.setColor(read, green_color);
|
||||
|
||||
return read;
|
||||
}
|
||||
import_files
|
||||
"""
|
||||
|
||||
replace_files = """var PNGTransparencyMode = 1; //Premultiplied wih Black
|
||||
var TGATransparencyMode = 0; //Premultiplied wih Black
|
||||
var SGITransparencyMode = 0; //Premultiplied wih Black
|
||||
var LayeredPSDTransparencyMode = 1; //Straight
|
||||
var FlatPSDTransparencyMode = 2; //Premultiplied wih White
|
||||
|
||||
function replace_files(args)
|
||||
{
|
||||
var files = args[0];
|
||||
MessageLog.trace(files);
|
||||
MessageLog.trace(files.length);
|
||||
var _node = args[1];
|
||||
var start_frame = args[2];
|
||||
|
||||
var _column = node.linkedColumn(_node, "DRAWING.ELEMENT");
|
||||
var elemId = column.getElementIdOfDrawing(_column);
|
||||
|
||||
// Delete existing drawings.
|
||||
var timings = column.getDrawingTimings(_column);
|
||||
for( var i =0; i <= timings.length - 1; ++i)
|
||||
{
|
||||
column.deleteDrawingAt(_column, parseInt(timings[i]));
|
||||
}
|
||||
|
||||
|
||||
var filename = files[0];
|
||||
var pos = filename.lastIndexOf(".");
|
||||
if( pos < 0 )
|
||||
return null;
|
||||
var extension = filename.substr(pos+1).toLowerCase();
|
||||
|
||||
if(extension == "jpeg")
|
||||
extension = "jpg";
|
||||
|
||||
var transparencyModeAttr = node.getAttr(
|
||||
_node, frame.current(), "applyMatteToColor"
|
||||
);
|
||||
if (extension == "png")
|
||||
transparencyModeAttr.setValue(PNGTransparencyMode);
|
||||
if (extension == "tga")
|
||||
transparencyModeAttr.setValue(TGATransparencyMode);
|
||||
if (extension == "sgi")
|
||||
transparencyModeAttr.setValue(SGITransparencyMode);
|
||||
if (extension == "psd")
|
||||
transparencyModeAttr.setValue(FlatPSDTransparencyMode);
|
||||
if (extension == "jpg")
|
||||
transparencyModeAttr.setValue(LayeredPSDTransparencyMode);
|
||||
|
||||
if (files.length == 1)
|
||||
{
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, 1, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(elemId, "1");
|
||||
copyFile(files[0], drawingFilePath);
|
||||
MessageLog.trace(files[0]);
|
||||
MessageLog.trace(drawingFilePath);
|
||||
// Expose the image for the entire frame range.
|
||||
for( var i =0; i <= frame.numberOf() - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
column.setEntry(_column, 1, timing, "1");
|
||||
}
|
||||
} else {
|
||||
// Create a drawing for each file.
|
||||
for( var i =0; i <= files.length - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(elemId, timing.toString());
|
||||
copyFile( files[i], drawingFilePath );
|
||||
|
||||
column.setEntry(_column, 1, timing, timing.toString());
|
||||
}
|
||||
}
|
||||
|
||||
var green_color = new ColorRGBA(0, 255, 0, 255);
|
||||
node.setColor(_node, green_color);
|
||||
}
|
||||
replace_files
|
||||
"""
|
||||
|
||||
|
||||
class ImageSequenceLoader(api.Loader):
|
||||
"""Load images
|
||||
"""Load image sequences.
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
|
||||
families = ["shot", "render", "image", "plate", "reference"]
|
||||
representations = ["jpeg", "png", "jpg"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""Plugin entry point.
|
||||
|
||||
Args:
|
||||
context (:class:`pyblish.api.Context`): Context.
|
||||
name (str, optional): Container name.
|
||||
namespace (str, optional): Container namespace.
|
||||
data (dict, optional): Additional data passed into loader.
|
||||
|
||||
"""
|
||||
fname = Path(self.fname)
|
||||
self_name = self.__class__.__name__
|
||||
collections, remainder = clique.assemble(
|
||||
os.listdir(os.path.dirname(self.fname))
|
||||
os.listdir(fname.parent.as_posix())
|
||||
)
|
||||
files = []
|
||||
if collections:
|
||||
for f in list(collections[0]):
|
||||
files.append(
|
||||
os.path.join(
|
||||
os.path.dirname(self.fname), f
|
||||
).replace("\\", "/")
|
||||
)
|
||||
files.append(fname.parent.joinpath(f).as_posix())
|
||||
else:
|
||||
files.append(
|
||||
os.path.join(
|
||||
os.path.dirname(self.fname), remainder[0]
|
||||
).replace("\\", "/")
|
||||
)
|
||||
files.append(fname.parent.joinpath(remainder[0]).as_posix())
|
||||
|
||||
name = context["subset"]["name"]
|
||||
name += "_{}".format(uuid.uuid4())
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
group_id = str(uuid.uuid4())
|
||||
read_node = harmony.send(
|
||||
{
|
||||
"function": copy_files + import_files,
|
||||
"args": ["Top", files, name, 1]
|
||||
"function": f"PypeHarmony.Loaders.{self_name}.importFiles", # noqa: E501
|
||||
"args": [
|
||||
files,
|
||||
asset,
|
||||
subset,
|
||||
1,
|
||||
group_id
|
||||
]
|
||||
}
|
||||
)["result"]
|
||||
|
||||
return harmony.containerise(
|
||||
name,
|
||||
f"{asset}_{subset}",
|
||||
namespace,
|
||||
read_node,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
self_name,
|
||||
nodes=[read_node]
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update loaded containers.
|
||||
|
||||
Args:
|
||||
container (dict): Container data.
|
||||
representation (dict): Representation data.
|
||||
|
||||
"""
|
||||
self_name = self.__class__.__name__
|
||||
node = harmony.find_node_by_name(container["name"], "READ")
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
|
|
@ -295,50 +99,42 @@ class ImageSequenceLoader(api.Loader):
|
|||
|
||||
harmony.send(
|
||||
{
|
||||
"function": copy_files + replace_files,
|
||||
"function": f"PypeHarmony.Loaders.{self_name}.replaceFiles",
|
||||
"args": [files, node, 1]
|
||||
}
|
||||
)
|
||||
|
||||
# Colour node.
|
||||
sig = harmony.signature("copyFile")
|
||||
func = """function %s(args){
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
var green_color = new ColorRGBA(0, 255, 0, 255);
|
||||
if (args[1] == "red"){
|
||||
node.setColor(args[0], red_color);
|
||||
}
|
||||
if (args[1] == "green"){
|
||||
node.setColor(args[0], green_color);
|
||||
}
|
||||
}
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
if pype.lib.is_latest(representation):
|
||||
harmony.send({"function": func, "args": [node, "green"]})
|
||||
harmony.send(
|
||||
{
|
||||
"function": "PypeHarmony.setColor",
|
||||
"args": [node, [0, 255, 0, 255]]
|
||||
})
|
||||
else:
|
||||
harmony.send({"function": func, "args": [node, "red"]})
|
||||
harmony.send(
|
||||
{
|
||||
"function": "PypeHarmony.setColor",
|
||||
"args": [node, [255, 0, 0, 255]]
|
||||
})
|
||||
|
||||
harmony.imprint(
|
||||
node, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
node = harmony.find_node_by_name(container["name"], "READ")
|
||||
"""Remove loaded container.
|
||||
|
||||
Args:
|
||||
container (dict): Container data.
|
||||
|
||||
func = """function deleteNode(_node)
|
||||
{
|
||||
node.deleteNode(_node, true, true);
|
||||
}
|
||||
deleteNode
|
||||
"""
|
||||
node = harmony.find_node_by_name(container["name"], "READ")
|
||||
harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
{"function": "PypeHarmony.deleteNode", "args": [node]}
|
||||
)
|
||||
harmony.imprint(node, {}, remove=True)
|
||||
|
||||
def switch(self, container, representation):
|
||||
"""Switch loaded representations."""
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -2,13 +2,12 @@ import os
|
|||
import shutil
|
||||
|
||||
from avalon import api, harmony
|
||||
from avalon.vendor import Qt
|
||||
|
||||
|
||||
class ImportPaletteLoader(api.Loader):
|
||||
"""Import palettes."""
|
||||
|
||||
families = ["harmony.palette"]
|
||||
families = ["palette"]
|
||||
representations = ["plt"]
|
||||
label = "Import Palette"
|
||||
|
||||
|
|
@ -41,14 +40,14 @@ class ImportPaletteLoader(api.Loader):
|
|||
|
||||
harmony.save_scene()
|
||||
|
||||
# Dont allow instances with the same name.
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "Updated {}.".format(subset_name)
|
||||
msg += " You need to reload the scene to see the changes."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
harmony.send(
|
||||
{
|
||||
"function": "PypeHarmony.message",
|
||||
"args": msg
|
||||
})
|
||||
return name
|
||||
|
||||
def remove(self, container):
|
||||
|
|
|
|||
143
pype/plugins/harmony/load/load_template.py
Normal file
143
pype/plugins/harmony/load/load_template.py
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load template."""
|
||||
import tempfile
|
||||
import zipfile
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
|
||||
from avalon import api, harmony
|
||||
import pype.lib
|
||||
|
||||
|
||||
class TemplateLoader(api.Loader):
|
||||
"""Load Harmony template as container.
|
||||
|
||||
.. todo::
|
||||
|
||||
This must be implemented properly.
|
||||
|
||||
"""
|
||||
|
||||
families = ["template", "workfile"]
|
||||
representations = ["*"]
|
||||
label = "Load Template"
|
||||
icon = "gift"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""Plugin entry point.
|
||||
|
||||
Args:
|
||||
context (:class:`pyblish.api.Context`): Context.
|
||||
name (str, optional): Container name.
|
||||
namespace (str, optional): Container namespace.
|
||||
data (dict, optional): Additional data passed into loader.
|
||||
|
||||
"""
|
||||
# Load template.
|
||||
self_name = self.__class__.__name__
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
zip_file = api.get_representation_path(context["representation"])
|
||||
template_path = os.path.join(temp_dir, "temp.tpl")
|
||||
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
||||
zip_ref.extractall(template_path)
|
||||
|
||||
group_id = "{}".format(uuid.uuid4())
|
||||
|
||||
container_group = harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Loaders.{self_name}.loadContainer",
|
||||
"args": [template_path,
|
||||
context["asset"]["name"],
|
||||
context["subset"]["name"],
|
||||
group_id]
|
||||
}
|
||||
)["result"]
|
||||
|
||||
# Cleanup the temp directory
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
# We must validate the group_node
|
||||
return harmony.containerise(
|
||||
name,
|
||||
namespace,
|
||||
container_group,
|
||||
context,
|
||||
self_name
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update loaded containers.
|
||||
|
||||
Args:
|
||||
container (dict): Container data.
|
||||
representation (dict): Representation data.
|
||||
|
||||
"""
|
||||
node_name = container["name"]
|
||||
node = harmony.find_node_by_name(node_name, "GROUP")
|
||||
self_name = self.__class__.__name__
|
||||
|
||||
update_and_replace = False
|
||||
if pype.lib.is_latest(representation):
|
||||
self._set_green(node)
|
||||
else:
|
||||
self._set_red(node)
|
||||
|
||||
update_and_replace = harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Loaders.{self_name}."
|
||||
"askForColumnsUpdate",
|
||||
"args": []
|
||||
}
|
||||
)["result"]
|
||||
|
||||
if update_and_replace:
|
||||
# FIXME: This won't work, need to implement it.
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Loaders.{self_name}."
|
||||
"replaceNode",
|
||||
"args": []
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.load(
|
||||
container["context"], container["name"],
|
||||
None, container["data"])
|
||||
|
||||
harmony.imprint(
|
||||
node, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove container.
|
||||
|
||||
Args:
|
||||
container (dict): container definition.
|
||||
|
||||
"""
|
||||
node = harmony.find_node_by_name(container["name"], "GROUP")
|
||||
harmony.send(
|
||||
{"function": "PypeHarmony.deleteNode", "args": [node]}
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
"""Switch representation containers."""
|
||||
self.update(container, representation)
|
||||
|
||||
def _set_green(self, node):
|
||||
"""Set node color to green `rgba(0, 255, 0, 255)`."""
|
||||
harmony.send(
|
||||
{
|
||||
"function": "PypeHarmony.setColor",
|
||||
"args": [node, [0, 255, 0, 255]]
|
||||
})
|
||||
|
||||
def _set_red(self, node):
|
||||
"""Set node color to red `rgba(255, 0, 0, 255)`."""
|
||||
harmony.send(
|
||||
{
|
||||
"function": "PypeHarmony.setColor",
|
||||
"args": [node, [255, 0, 0, 255]]
|
||||
})
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect information about current file."""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -5,24 +7,16 @@ from avalon import harmony
|
|||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
"""Inject the current working file into context."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Current File"
|
||||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return (
|
||||
scene.currentProjectPath() + "/" +
|
||||
scene.currentVersionName() + ".xstage"
|
||||
);
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
"""Inject the current working file."""
|
||||
self_name = self.__class__.__name__
|
||||
|
||||
current_file = harmony.send({"function": func})["result"]
|
||||
current_file = harmony.send(
|
||||
{"function": f"PypeHarmony.Publish.{self_name}.collect"})["result"]
|
||||
context.data["currentFile"] = os.path.normpath(current_file)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instances in Harmony."""
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -8,7 +10,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"""Gather instances by nodes metadata.
|
||||
|
||||
This collector takes into account assets that are associated with
|
||||
a composite node and marked with a unique identifier;
|
||||
a composite node and marked with a unique identifier.
|
||||
|
||||
Identifier:
|
||||
id (str): "pyblish.avalon.instance"
|
||||
|
|
@ -19,10 +21,19 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
hosts = ["harmony"]
|
||||
families_mapping = {
|
||||
"render": ["imagesequence", "review", "ftrack"],
|
||||
"harmony.template": []
|
||||
"harmony.template": [],
|
||||
"palette": ["palette", "ftrack"]
|
||||
}
|
||||
|
||||
pair_media = True
|
||||
|
||||
def process(self, context):
|
||||
"""Plugin entry point.
|
||||
|
||||
Args:
|
||||
context (:class:`pyblish.api.Context`): Context data.
|
||||
|
||||
"""
|
||||
nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
|
|
@ -46,6 +57,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
)["result"]
|
||||
instance.data["families"] = self.families_mapping[data["family"]]
|
||||
|
||||
# If set in plugin, pair the scene Version in ftrack with
|
||||
# thumbnails and review media.
|
||||
if (self.pair_media and instance.data["family"] == "scene"):
|
||||
context.data["scene_instance"] = instance
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect palettes from Harmony."""
|
||||
import os
|
||||
import json
|
||||
|
||||
|
|
@ -13,23 +15,12 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
|
||||
var palettes = {};
|
||||
for(var i=0; i < palette_list.numPalettes; ++i)
|
||||
"""Collector entry point."""
|
||||
self_name = self.__class__.__name__
|
||||
palettes = harmony.send(
|
||||
{
|
||||
var palette = palette_list.getPaletteByIndex(i);
|
||||
palettes[palette.getName()] = palette.id;
|
||||
}
|
||||
|
||||
return palettes;
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
palettes = harmony.send({"function": func})["result"]
|
||||
"function": f"PypeHarmony.Publish.{self_name}.getPalettes",
|
||||
})["result"]
|
||||
|
||||
for name, id in palettes.items():
|
||||
instance = context.create_instance(name)
|
||||
|
|
@ -37,7 +28,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
|
|||
"id": id,
|
||||
"family": "harmony.palette",
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"subset": "palette" + name
|
||||
"subset": "{}{}".format("palette", name)
|
||||
})
|
||||
self.log.info(
|
||||
"Created instance:\n" + json.dumps(
|
||||
|
|
|
|||
|
|
@ -14,26 +14,11 @@ class CollectScene(pyblish.api.ContextPlugin):
|
|||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return [
|
||||
about.getApplicationPath(),
|
||||
scene.currentProjectPath(),
|
||||
scene.currentScene(),
|
||||
scene.getFrameRate(),
|
||||
scene.getStartFrame(),
|
||||
scene.getStopFrame(),
|
||||
sound.getSoundtrackAll().path(),
|
||||
scene.defaultResolutionX(),
|
||||
scene.defaultResolutionY()
|
||||
]
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
"""Plugin entry point."""
|
||||
result = harmony.send(
|
||||
{"function": func, "args": []}
|
||||
{
|
||||
f"function": "PypeHarmony.getSceneSettings",
|
||||
"args": []}
|
||||
)["result"]
|
||||
|
||||
context.data["applicationPath"] = result[0]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect current workfile from Harmony."""
|
||||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
|
@ -10,10 +12,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
"""Plugin entry point."""
|
||||
family = "workfile"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
subset = family + task.capitalize()
|
||||
sanitized_task_name = task[0].upper() + task[1:]
|
||||
basename = os.path.basename(context.data["currentFile"])
|
||||
subset = "{}{}".format(family, sanitized_task_name)
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract palette from Harmony."""
|
||||
import os
|
||||
import csv
|
||||
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
|
||||
from avalon import harmony
|
||||
import pype.api
|
||||
|
|
@ -13,18 +18,53 @@ class ExtractPalette(pype.api.Extractor):
|
|||
families = ["harmony.palette"]
|
||||
|
||||
def process(self, instance):
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
var palette_list = PaletteObjectManager.getScenePaletteList();
|
||||
var palette = palette_list.getPaletteById(args[0]);
|
||||
return (palette.getPath() + "/" + palette.getName() + ".plt");
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
palette_file = harmony.send(
|
||||
{"function": func, "args": [instance.data["id"]]}
|
||||
)["result"]
|
||||
"""Plugin entry point."""
|
||||
self_name = self.__class__.__name__
|
||||
result = harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Publish.{self_name}.getPalette",
|
||||
"args": instance.data["id"]
|
||||
})["result"]
|
||||
|
||||
if not isinstance(result, list):
|
||||
self.log.error(f"Invalid reply: {result}")
|
||||
raise AssertionError("Invalid reply from server.")
|
||||
palette_name = result[0]
|
||||
palette_file = result[1]
|
||||
self.log.info(f"Got palette named {palette_name} "
|
||||
f"and file {palette_file}.")
|
||||
|
||||
tmp_thumb_path = os.path.join(os.path.dirname(palette_file),
|
||||
os.path.basename(palette_file)
|
||||
.split(".plt")[0] + "_swatches.png"
|
||||
)
|
||||
self.log.info(f"Temporary humbnail path {tmp_thumb_path}")
|
||||
|
||||
palette_version = str(instance.data.get("version")).zfill(3)
|
||||
|
||||
self.log.info(f"Palette version {palette_version}")
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = []
|
||||
|
||||
try:
|
||||
thumbnail_path = self.create_palette_thumbnail(palette_name,
|
||||
palette_version,
|
||||
palette_file,
|
||||
tmp_thumb_path)
|
||||
except ValueError:
|
||||
self.log.error("Unsupported palette type for thumbnail.")
|
||||
|
||||
else:
|
||||
thumbnail = {
|
||||
"name": "thumbnail",
|
||||
"ext": "png",
|
||||
"files": os.path.basename(thumbnail_path),
|
||||
"stagingDir": os.path.dirname(thumbnail_path),
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
|
||||
instance.data["representations"].append(thumbnail)
|
||||
|
||||
representation = {
|
||||
"name": "plt",
|
||||
|
|
@ -32,4 +72,130 @@ class ExtractPalette(pype.api.Extractor):
|
|||
"files": os.path.basename(palette_file),
|
||||
"stagingDir": os.path.dirname(palette_file)
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def create_palette_thumbnail(self,
|
||||
palette_name,
|
||||
palette_version,
|
||||
palette_path,
|
||||
dst_path):
|
||||
"""Create thumbnail for palette file.
|
||||
|
||||
Args:
|
||||
palette_name (str): Name of palette.
|
||||
palette_version (str): Version of palette.
|
||||
palette_path (str): Path to palette file.
|
||||
dst_path (str): Thumbnail path.
|
||||
|
||||
Returns:
|
||||
str: Thumbnail path.
|
||||
|
||||
"""
|
||||
colors = {}
|
||||
|
||||
with open(palette_path, newline='') as plt:
|
||||
plt_parser = csv.reader(plt, delimiter=" ")
|
||||
for i, line in enumerate(plt_parser):
|
||||
if i == 0:
|
||||
continue
|
||||
while ("" in line):
|
||||
line.remove("")
|
||||
# self.log.debug(line)
|
||||
if line[0] not in ["Solid"]:
|
||||
raise ValueError("Unsupported palette type.")
|
||||
color_name = line[1].strip('"')
|
||||
colors[color_name] = {"type": line[0],
|
||||
"uuid": line[2],
|
||||
"rgba": (int(line[3]),
|
||||
int(line[4]),
|
||||
int(line[5]),
|
||||
int(line[6])),
|
||||
}
|
||||
plt.close()
|
||||
|
||||
img_pad_top = 80
|
||||
label_pad_name = 30
|
||||
label_pad_rgb = 580
|
||||
swatch_pad_left = 300
|
||||
swatch_pad_top = 10
|
||||
swatch_w = 120
|
||||
swatch_h = 50
|
||||
|
||||
image_w = 800
|
||||
image_h = (img_pad_top +
|
||||
(len(colors.keys()) *
|
||||
swatch_h) +
|
||||
(swatch_pad_top *
|
||||
len(colors.keys()))
|
||||
)
|
||||
|
||||
img = Image.new("RGBA", (image_w, image_h), "white")
|
||||
|
||||
# For bg of colors with alpha, create checkerboard image
|
||||
checkers = Image.new("RGB", (swatch_w, swatch_h))
|
||||
pixels = checkers.load()
|
||||
|
||||
# Make pixels white where (row+col) is odd
|
||||
for i in range(swatch_w):
|
||||
for j in range(swatch_h):
|
||||
if (i + j) % 2:
|
||||
pixels[i, j] = (255, 255, 255)
|
||||
|
||||
draw = ImageDraw.Draw(img)
|
||||
# TODO: This needs to be font included with Pype because
|
||||
# arial is not available on other platforms then Windows.
|
||||
title_font = ImageFont.truetype("arial.ttf", 28)
|
||||
label_font = ImageFont.truetype("arial.ttf", 20)
|
||||
|
||||
draw.text((label_pad_name, 20),
|
||||
"{} (v{})".format(palette_name, palette_version),
|
||||
"black",
|
||||
font=title_font)
|
||||
|
||||
for i, name in enumerate(colors):
|
||||
rgba = colors[name]["rgba"]
|
||||
# @TODO: Fix this so alpha colors are displayed with checkboard
|
||||
# if not rgba[3] == "255":
|
||||
# img.paste(checkers,
|
||||
# (swatch_pad_left,
|
||||
# img_pad_top + swatch_pad_top + (i * swatch_h))
|
||||
# )
|
||||
#
|
||||
# half_y = (img_pad_top + swatch_pad_top + (i * swatch_h))/2
|
||||
#
|
||||
# draw.rectangle((
|
||||
# swatch_pad_left, # upper LX
|
||||
# img_pad_top + swatch_pad_top + (i * swatch_h), # upper LY
|
||||
# swatch_pad_left + (swatch_w * 2), # lower RX
|
||||
# half_y), # lower RY
|
||||
# fill=rgba[:-1], outline=(0, 0, 0), width=2)
|
||||
# draw.rectangle((
|
||||
# swatch_pad_left, # upper LX
|
||||
# half_y, # upper LY
|
||||
# swatch_pad_left + (swatch_w * 2), # lower RX
|
||||
# img_pad_top + swatch_h + (i * swatch_h)), # lower RY
|
||||
# fill=rgba, outline=(0, 0, 0), width=2)
|
||||
# else:
|
||||
|
||||
draw.rectangle((
|
||||
swatch_pad_left, # upper left x
|
||||
img_pad_top + swatch_pad_top + (i * swatch_h), # upper left y
|
||||
swatch_pad_left + (swatch_w * 2), # lower right x
|
||||
img_pad_top + swatch_h + (i * swatch_h)), # lower right y
|
||||
fill=rgba, outline=(0, 0, 0), width=2)
|
||||
|
||||
draw.text((label_pad_name, img_pad_top + (i * swatch_h) + swatch_pad_top + (swatch_h / 4)), # noqa: E501
|
||||
name,
|
||||
"black",
|
||||
font=label_font)
|
||||
|
||||
draw.text((label_pad_rgb, img_pad_top + (i * swatch_h) + swatch_pad_top + (swatch_h / 4)), # noqa: E501
|
||||
str(rgba),
|
||||
"black",
|
||||
font=label_font)
|
||||
|
||||
draw = ImageDraw.Draw(img)
|
||||
|
||||
img.save(dst_path)
|
||||
return dst_path
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract template."""
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
|
@ -14,6 +16,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
families = ["harmony.template"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filepath = os.path.join(staging_dir, f"{instance.name}.tpl")
|
||||
|
||||
|
|
@ -61,60 +64,49 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
"files": f"{instance.name}.zip",
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
|
||||
def get_backdrops(self, node):
|
||||
sig = harmony.signature()
|
||||
func = """function %s(probe_node)
|
||||
{
|
||||
var backdrops = Backdrop.backdrops("Top");
|
||||
var valid_backdrops = [];
|
||||
for(var i=0; i<backdrops.length; i++)
|
||||
{
|
||||
var position = backdrops[i].position;
|
||||
self.log.info(instance.data.get("representations"))
|
||||
if instance.data.get("representations"):
|
||||
instance.data["representations"].extend([representation])
|
||||
else:
|
||||
instance.data["representations"] = [representation]
|
||||
|
||||
var x_valid = false;
|
||||
var node_x = node.coordX(probe_node);
|
||||
if (position.x < node_x && node_x < (position.x + position.w)){
|
||||
x_valid = true
|
||||
};
|
||||
instance.data["version_name"] = "{}_{}".format(
|
||||
instance.data["subset"], os.environ["AVALON_TASK"])
|
||||
|
||||
var y_valid = false;
|
||||
var node_y = node.coordY(probe_node);
|
||||
if (position.y < node_y && node_y < (position.y + position.h)){
|
||||
y_valid = true
|
||||
};
|
||||
def get_backdrops(self, node: str) -> list:
|
||||
"""Get backdrops for the node.
|
||||
|
||||
if (x_valid && y_valid){
|
||||
valid_backdrops.push(backdrops[i])
|
||||
};
|
||||
}
|
||||
return valid_backdrops;
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
return harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)["result"]
|
||||
Args:
|
||||
node (str): Node path.
|
||||
|
||||
def get_dependencies(self, node, dependencies):
|
||||
sig = harmony.signature()
|
||||
func = """function %s(args)
|
||||
{
|
||||
var target_node = args[0];
|
||||
var numInput = node.numberOfInputPorts(target_node);
|
||||
var dependencies = [];
|
||||
for (var i = 0 ; i < numInput; i++)
|
||||
{
|
||||
dependencies.push(node.srcNode(target_node, i));
|
||||
}
|
||||
return dependencies;
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
Returns:
|
||||
list: list of Backdrops.
|
||||
|
||||
"""
|
||||
self_name = self.__class__.__name__
|
||||
return harmony.send({
|
||||
"function": f"PypeHarmony.Publish.{self_name}.getBackdropsByNode",
|
||||
"args": node})["result"]
|
||||
|
||||
def get_dependencies(
|
||||
self, node: str, dependencies: list = None) -> list:
|
||||
"""Get node dependencies.
|
||||
|
||||
This will return recursive dependency list of given node.
|
||||
|
||||
Args:
|
||||
node (str): Path to the node.
|
||||
dependencies (list, optional): existing dependency list.
|
||||
|
||||
Returns:
|
||||
list: List of dependent nodes.
|
||||
|
||||
"""
|
||||
current_dependencies = harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
{
|
||||
"function": "PypeHarmony.getDependencies",
|
||||
"args": node}
|
||||
)["result"]
|
||||
|
||||
for dependency in current_dependencies:
|
||||
|
|
|
|||
|
|
@ -1,61 +0,0 @@
|
|||
import os
|
||||
import acre
|
||||
|
||||
from avalon import api, lib
|
||||
import pype.api as pype
|
||||
from pype.aport import lib as aportlib
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
||||
class Aport(api.Action):
|
||||
|
||||
name = "aport"
|
||||
label = "Aport - Avalon's Server"
|
||||
icon = "retweet"
|
||||
order = 996
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_TASK" in session:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
"""Implement the behavior for when the action is triggered
|
||||
|
||||
Args:
|
||||
session (dict): environment dictionary
|
||||
|
||||
Returns:
|
||||
Popen instance of newly spawned process
|
||||
|
||||
"""
|
||||
|
||||
with pype.modified_environ(**session):
|
||||
# Get executable by name
|
||||
print(self.name)
|
||||
app = lib.get_application(self.name)
|
||||
executable = lib.which(app["executable"])
|
||||
|
||||
# Run as server
|
||||
arguments = []
|
||||
|
||||
tools_env = acre.get_tools([self.name])
|
||||
env = acre.compute(tools_env)
|
||||
env = acre.merge(env, current_env=dict(os.environ))
|
||||
|
||||
if not env.get('AVALON_WORKDIR', None):
|
||||
os.environ["AVALON_WORKDIR"] = aportlib.get_workdir_template()
|
||||
|
||||
env.update(dict(os.environ))
|
||||
|
||||
try:
|
||||
lib.launch(
|
||||
executable=executable,
|
||||
args=arguments,
|
||||
environment=env
|
||||
)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
return
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import os
|
||||
import acre
|
||||
|
||||
from avalon import api, lib, io
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
class PremierePro(api.Action):
|
||||
|
||||
name = "premiere_2019"
|
||||
label = "Premiere Pro"
|
||||
icon = "premiere_icon"
|
||||
order = 996
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_TASK" in session:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
"""Implement the behavior for when the action is triggered
|
||||
|
||||
Args:
|
||||
session (dict): environment dictionary
|
||||
|
||||
Returns:
|
||||
Popen instance of newly spawned process
|
||||
|
||||
"""
|
||||
|
||||
with pype.modified_environ(**session):
|
||||
# Get executable by name
|
||||
app = lib.get_application(self.name)
|
||||
executable = lib.which(app["executable"])
|
||||
|
||||
# Run as server
|
||||
arguments = []
|
||||
|
||||
tools_env = acre.get_tools([self.name])
|
||||
env = acre.compute(tools_env)
|
||||
env = acre.merge(env, current_env=dict(os.environ))
|
||||
|
||||
if not env.get('AVALON_WORKDIR', None):
|
||||
project_name = env.get("AVALON_PROJECT")
|
||||
anatomy = pype.Anatomy(project_name)
|
||||
os.environ['AVALON_PROJECT'] = project_name
|
||||
io.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
task_name = os.environ.get(
|
||||
"AVALON_TASK", io.Session["AVALON_TASK"]
|
||||
)
|
||||
asset_name = os.environ.get(
|
||||
"AVALON_ASSET", io.Session["AVALON_ASSET"]
|
||||
)
|
||||
application = lib.get_application(
|
||||
os.environ["AVALON_APP_NAME"]
|
||||
)
|
||||
|
||||
project_doc = io.find_one({"type": "project"})
|
||||
data = {
|
||||
"task": task_name,
|
||||
"asset": asset_name,
|
||||
"project": {
|
||||
"name": project_doc["name"],
|
||||
"code": project_doc["data"].get("code", '')
|
||||
},
|
||||
"hierarchy": pype.get_hierarchy(),
|
||||
"app": application["application_dir"]
|
||||
}
|
||||
anatomy_filled = anatomy.format(data)
|
||||
workdir = anatomy_filled["work"]["folder"]
|
||||
|
||||
os.environ["AVALON_WORKDIR"] = workdir
|
||||
|
||||
env.update(dict(os.environ))
|
||||
|
||||
lib.launch(
|
||||
executable=executable,
|
||||
args=arguments,
|
||||
environment=env
|
||||
)
|
||||
return
|
||||
26
pype/plugins/lib.py
Normal file
26
pype/plugins/lib.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import re
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, asset_name, subset_name):
|
||||
"""
|
||||
Gets all layer names and if 'name' is present in them, increases
|
||||
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||
Args:
|
||||
layers (list): of namedtuples, expects 'name' field present
|
||||
asset_name (string): in format asset_subset (Hero)
|
||||
subset_name (string): (LOD)
|
||||
|
||||
Returns:
|
||||
(string): name_00X (without version)
|
||||
"""
|
||||
name = "{}_{}".format(asset_name, subset_name)
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer.name)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
|
@ -4,14 +4,70 @@ import maya.cmds as cmds
|
|||
from avalon import api, io
|
||||
from avalon.maya.pipeline import containerise
|
||||
from avalon.maya import lib
|
||||
from Qt import QtWidgets
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
|
||||
class CameraWindow(QtWidgets.QDialog):
|
||||
|
||||
def __init__(self, cameras):
|
||||
super(CameraWindow, self).__init__()
|
||||
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
|
||||
|
||||
self.camera = None
|
||||
|
||||
self.widgets = {
|
||||
"label": QtWidgets.QLabel("Select camera for image plane."),
|
||||
"list": QtWidgets.QListWidget(),
|
||||
"warning": QtWidgets.QLabel("No cameras selected!"),
|
||||
"buttons": QtWidgets.QWidget(),
|
||||
"okButton": QtWidgets.QPushButton("Ok"),
|
||||
"cancelButton": QtWidgets.QPushButton("Cancel")
|
||||
}
|
||||
|
||||
# Build warning.
|
||||
self.widgets["warning"].setVisible(False)
|
||||
self.widgets["warning"].setStyleSheet("color: red")
|
||||
|
||||
# Build list.
|
||||
for camera in cameras:
|
||||
self.widgets["list"].addItem(camera)
|
||||
|
||||
# Build buttons.
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
|
||||
layout.addWidget(self.widgets["okButton"])
|
||||
layout.addWidget(self.widgets["cancelButton"])
|
||||
|
||||
# Build layout.
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(self.widgets["label"])
|
||||
layout.addWidget(self.widgets["list"])
|
||||
layout.addWidget(self.widgets["buttons"])
|
||||
layout.addWidget(self.widgets["warning"])
|
||||
|
||||
self.widgets["okButton"].pressed.connect(self.on_ok_pressed)
|
||||
self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed)
|
||||
self.widgets["list"].itemPressed.connect(self.on_list_itemPressed)
|
||||
|
||||
def on_list_itemPressed(self, item):
|
||||
self.camera = item.text()
|
||||
|
||||
def on_ok_pressed(self):
|
||||
if self.camera is None:
|
||||
self.widgets["warning"].setVisible(True)
|
||||
return
|
||||
|
||||
self.close()
|
||||
|
||||
def on_cancel_pressed(self):
|
||||
self.camera = None
|
||||
self.close()
|
||||
|
||||
|
||||
class ImagePlaneLoader(api.Loader):
|
||||
"""Specific loader of plate for image planes on selected camera."""
|
||||
|
||||
families = ["plate", "render"]
|
||||
label = "Create imagePlane on selected camera."
|
||||
label = "Load imagePlane."
|
||||
representations = ["mov", "exr", "preview", "png"]
|
||||
icon = "image"
|
||||
color = "orange"
|
||||
|
|
@ -26,43 +82,24 @@ class ImagePlaneLoader(api.Loader):
|
|||
suffix="_",
|
||||
)
|
||||
|
||||
# Getting camera from selection.
|
||||
selection = pc.ls(selection=True)
|
||||
|
||||
# Get camera from user selection.
|
||||
camera = None
|
||||
default_cameras = [
|
||||
"frontShape", "perspShape", "sideShape", "topShape"
|
||||
]
|
||||
cameras = [
|
||||
x for x in pc.ls(type="camera") if x.name() not in default_cameras
|
||||
]
|
||||
camera_names = {x.getParent().name(): x for x in cameras}
|
||||
camera_names["Create new camera."] = "create_camera"
|
||||
window = CameraWindow(camera_names.keys())
|
||||
window.exec_()
|
||||
camera = camera_names[window.camera]
|
||||
|
||||
if len(selection) > 1:
|
||||
QtWidgets.QMessageBox.critical(
|
||||
None,
|
||||
"Error!",
|
||||
"Multiple nodes selected. Please select only one.",
|
||||
QtWidgets.QMessageBox.Ok
|
||||
)
|
||||
return
|
||||
if camera == "create_camera":
|
||||
camera = pc.createNode("camera")
|
||||
|
||||
if len(selection) < 1:
|
||||
result = QtWidgets.QMessageBox.critical(
|
||||
None,
|
||||
"Error!",
|
||||
"No camera selected. Do you want to create a camera?",
|
||||
QtWidgets.QMessageBox.Ok,
|
||||
QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
if result == QtWidgets.QMessageBox.Ok:
|
||||
camera = pc.createNode("camera")
|
||||
else:
|
||||
return
|
||||
else:
|
||||
relatives = pc.listRelatives(selection[0], shapes=True)
|
||||
if pc.ls(relatives, type="camera"):
|
||||
camera = selection[0]
|
||||
else:
|
||||
QtWidgets.QMessageBox.critical(
|
||||
None,
|
||||
"Error!",
|
||||
"Selected node is not a camera.",
|
||||
QtWidgets.QMessageBox.Ok
|
||||
)
|
||||
if camera is None:
|
||||
return
|
||||
|
||||
try:
|
||||
|
|
@ -100,10 +137,16 @@ class ImagePlaneLoader(api.Loader):
|
|||
# Ensure OpenEXRLoader plugin is loaded.
|
||||
pc.loadPlugin("OpenEXRLoader.mll", quiet=True)
|
||||
|
||||
message = (
|
||||
"Hold image sequence on first frame?"
|
||||
"\n{} files available.".format(
|
||||
len(context["representation"]["files"])
|
||||
)
|
||||
)
|
||||
reply = QtWidgets.QMessageBox.information(
|
||||
None,
|
||||
"Frame Hold.",
|
||||
"Hold image sequence on first frame?",
|
||||
message,
|
||||
QtWidgets.QMessageBox.Ok,
|
||||
QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
from pype.hosts.maya import lib
|
||||
from pype.lib import pairwise
|
||||
|
||||
|
||||
SETTINGS = {"renderDensity",
|
||||
|
|
@ -78,7 +77,7 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
connections = cmds.ls(connections, long=True) # Ensure long names
|
||||
|
||||
inputs = []
|
||||
for dest, src in pairwise(connections):
|
||||
for dest, src in lib.pairwise(connections):
|
||||
source_node, source_attr = src.split(".", 1)
|
||||
dest_node, dest_attr = dest.split(".", 1)
|
||||
|
||||
|
|
@ -119,7 +118,7 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
texture_filenames = []
|
||||
if image_search_paths:
|
||||
|
||||
|
||||
|
||||
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
|
||||
# Later on check whether this is pipeline OS cross-compatible.
|
||||
image_search_paths = [p for p in
|
||||
|
|
@ -127,7 +126,7 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
image_search_paths = self._replace_tokens(image_search_paths)
|
||||
|
||||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.info("Found %i texture(s)" % len(texture_filenames))
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract camera as Maya Scene."""
|
||||
import os
|
||||
import itertools
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
import pype.api
|
||||
from pype.lib import grouper
|
||||
from pype.hosts.maya import lib
|
||||
|
||||
|
||||
|
|
@ -36,6 +36,17 @@ def massage_ma_file(path):
|
|||
f.close()
|
||||
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
"""Collect data into fixed-length chunks or blocks.
|
||||
|
||||
Examples:
|
||||
grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
|
||||
|
||||
"""
|
||||
args = [iter(iterable)] * n
|
||||
return itertools.izip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
|
||||
def unlock(plug):
|
||||
"""Unlocks attribute and disconnects inputs for a plug.
|
||||
|
||||
|
|
|
|||
|
|
@ -110,6 +110,9 @@ class ExtractPlayblast(pype.api.Extractor):
|
|||
if not instance.data.get("keepImages"):
|
||||
tags.append("delete")
|
||||
|
||||
# Add camera node name to representation data
|
||||
camera_node_name = pm.ls(camera)[0].getTransform().getName()
|
||||
|
||||
representation = {
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
|
|
@ -119,7 +122,8 @@ class ExtractPlayblast(pype.api.Extractor):
|
|||
"frameEnd": end,
|
||||
'fps': fps,
|
||||
'preview': True,
|
||||
'tags': tags
|
||||
'tags': tags,
|
||||
'camera_name': camera_node_name
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,87 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Harmony scenes in Standalone Publisher."""
|
||||
import copy
|
||||
import glob
|
||||
import os
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHarmonyScenes(pyblish.api.InstancePlugin):
|
||||
"""Collect Harmony xstage files."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.498
|
||||
label = "Collect Harmony Scene"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["harmony.scene"]
|
||||
|
||||
# presets
|
||||
ignored_instance_data_keys = ("name", "label", "stagingDir", "version")
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
context = instance.context
|
||||
asset_data = instance.context.data["assetEntity"]
|
||||
asset_name = instance.data["asset"]
|
||||
subset_name = instance.data.get("subset", "sceneMain")
|
||||
anatomy_data = instance.context.data["anatomyData"]
|
||||
repres = instance.data["representations"]
|
||||
staging_dir = repres[0]["stagingDir"]
|
||||
files = repres[0]["files"]
|
||||
|
||||
if not files.endswith(".zip"):
|
||||
# A harmony project folder / .xstage was dropped
|
||||
instance_name = f"{asset_name}_{subset_name}"
|
||||
task = instance.data.get("task", "harmonyIngest")
|
||||
|
||||
# create new instance
|
||||
new_instance = context.create_instance(instance_name)
|
||||
|
||||
# add original instance data except name key
|
||||
for key, value in instance.data.items():
|
||||
# Make sure value is copy since value may be object which
|
||||
# can be shared across all new created objects
|
||||
if key not in self.ignored_instance_data_keys:
|
||||
new_instance.data[key] = copy.deepcopy(value)
|
||||
|
||||
self.log.info("Copied data: {}".format(new_instance.data))
|
||||
|
||||
# fix anatomy data
|
||||
anatomy_data_new = copy.deepcopy(anatomy_data)
|
||||
# updating hierarchy data
|
||||
anatomy_data_new.update({
|
||||
"asset": asset_data["name"],
|
||||
"task": task,
|
||||
"subset": subset_name
|
||||
})
|
||||
|
||||
new_instance.data["label"] = f"{instance_name}"
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["extension"] = ".zip"
|
||||
new_instance.data["anatomyData"] = anatomy_data_new
|
||||
new_instance.data["publish"] = True
|
||||
|
||||
# When a project folder was dropped vs. just an xstage file, find
|
||||
# the latest file xstage version and update the instance
|
||||
if not files.endswith(".xstage"):
|
||||
|
||||
source_dir = os.path.join(
|
||||
staging_dir, files
|
||||
).replace("\\", "/")
|
||||
|
||||
latest_file = max(glob.iglob(source_dir + "/*.xstage"),
|
||||
key=os.path.getctime).replace("\\", "/")
|
||||
|
||||
new_instance.data["representations"][0]["stagingDir"] = (
|
||||
source_dir
|
||||
)
|
||||
new_instance.data["representations"][0]["files"] = (
|
||||
os.path.basename(latest_file)
|
||||
)
|
||||
self.log.info(f"Created new instance: {instance_name}")
|
||||
self.log.debug(f"_ inst_data: {pformat(new_instance.data)}")
|
||||
|
||||
# set original instance for removal
|
||||
self.log.info("Context data: {}".format(context.data))
|
||||
instance.data["remove"] = True
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect zips as Harmony scene files."""
|
||||
import copy
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHarmonyZips(pyblish.api.InstancePlugin):
|
||||
"""Collect Harmony zipped projects."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.497
|
||||
label = "Collect Harmony Zipped Projects"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["harmony.scene"]
|
||||
extensions = ["zip"]
|
||||
|
||||
# presets
|
||||
ignored_instance_data_keys = ("name", "label", "stagingDir", "version")
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
context = instance.context
|
||||
asset_data = instance.context.data["assetEntity"]
|
||||
asset_name = instance.data["asset"]
|
||||
subset_name = instance.data.get("subset", "sceneMain")
|
||||
anatomy_data = instance.context.data["anatomyData"]
|
||||
repres = instance.data["representations"]
|
||||
files = repres[0]["files"]
|
||||
|
||||
if files.endswith(".zip"):
|
||||
# A zip file was dropped
|
||||
instance_name = f"{asset_name}_{subset_name}"
|
||||
task = instance.data.get("task", "harmonyIngest")
|
||||
|
||||
# create new instance
|
||||
new_instance = context.create_instance(instance_name)
|
||||
|
||||
# add original instance data except name key
|
||||
for key, value in instance.data.items():
|
||||
# Make sure value is copy since value may be object which
|
||||
# can be shared across all new created objects
|
||||
if key not in self.ignored_instance_data_keys:
|
||||
new_instance.data[key] = copy.deepcopy(value)
|
||||
|
||||
self.log.info("Copied data: {}".format(new_instance.data))
|
||||
|
||||
# fix anatomy data
|
||||
anatomy_data_new = copy.deepcopy(anatomy_data)
|
||||
# updating hierarchy data
|
||||
anatomy_data_new.update({
|
||||
"asset": asset_data["name"],
|
||||
"task": task,
|
||||
"subset": subset_name
|
||||
})
|
||||
|
||||
new_instance.data["label"] = f"{instance_name}"
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["extension"] = ".zip"
|
||||
new_instance.data["anatomyData"] = anatomy_data_new
|
||||
new_instance.data["publish"] = True
|
||||
|
||||
self.log.info(f"Created new instance: {instance_name}")
|
||||
self.log.debug(f"_ inst_data: {pformat(new_instance.data)}")
|
||||
|
||||
# set original instance for removal
|
||||
self.log.info("Context data: {}".format(context.data))
|
||||
instance.data["remove"] = True
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instances that are marked for removal and remove them."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRemoveMarked(pyblish.api.ContextPlugin):
|
||||
"""Clean up instances marked for removal.
|
||||
|
||||
Note:
|
||||
This is a workaround for race conditions and removing of instances
|
||||
used to generate other instances.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Remove Marked Instances'
|
||||
|
||||
def process(self, context):
|
||||
"""Plugin entry point."""
|
||||
for instance in context:
|
||||
if instance.data.get('remove'):
|
||||
context.remove(instance)
|
||||
404
pype/plugins/standalonepublisher/publish/extract_harmony_zip.py
Normal file
404
pype/plugins/standalonepublisher/publish/extract_harmony_zip.py
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract Harmony scene from zip file."""
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import six
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
import pype.api
|
||||
|
||||
|
||||
class ExtractHarmonyZip(pype.api.Extractor):
|
||||
"""Extract Harmony zip."""
|
||||
|
||||
# Pyblish settings
|
||||
label = "Extract Harmony zip"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["scene"]
|
||||
|
||||
# Properties
|
||||
session = None
|
||||
task_types = None
|
||||
task_statuses = None
|
||||
assetversion_statuses = None
|
||||
|
||||
# Presets
|
||||
create_workfile = True
|
||||
default_task = "harmonyIngest"
|
||||
default_task_type = "Ingest"
|
||||
default_task_status = "Ingested"
|
||||
assetversion_status = "Ingested"
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
context = instance.context
|
||||
self.session = context.data["ftrackSession"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
# asset_name = instance.data["asset"]
|
||||
subset_name = instance.data["subset"]
|
||||
instance_name = instance.data["name"]
|
||||
family = instance.data["family"]
|
||||
task = context.data["anatomyData"]["task"] or self.default_task
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
ftrack_id = asset_doc["data"]["ftrackId"]
|
||||
repres = instance.data["representations"]
|
||||
submitted_staging_dir = repres[0]["stagingDir"]
|
||||
submitted_files = repres[0]["files"]
|
||||
|
||||
# Get all the ftrack entities needed
|
||||
|
||||
# Asset Entity
|
||||
query = 'AssetBuild where id is "{}"'.format(ftrack_id)
|
||||
asset_entity = self.session.query(query).first()
|
||||
|
||||
# Project Entity
|
||||
query = 'Project where full_name is "{}"'.format(
|
||||
project_entity["name"]
|
||||
)
|
||||
project_entity = self.session.query(query).one()
|
||||
|
||||
# Get Task types and Statuses for creation if needed
|
||||
self.task_types = self._get_all_task_types(project_entity)
|
||||
self.task_statuses = self.get_all_task_statuses(project_entity)
|
||||
|
||||
# Get Statuses of AssetVersions
|
||||
self.assetversion_statuses = self.get_all_assetversion_statuses(
|
||||
project_entity
|
||||
)
|
||||
|
||||
# Setup the status that we want for the AssetVersion
|
||||
if self.assetversion_status:
|
||||
instance.data["assetversion_status"] = self.assetversion_status
|
||||
|
||||
# Create the default_task if it does not exist
|
||||
if task == self.default_task:
|
||||
existing_tasks = []
|
||||
entity_children = asset_entity.get('children', [])
|
||||
for child in entity_children:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'].lower())
|
||||
|
||||
if task.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
|
||||
else:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=self.default_task_type,
|
||||
task_status=self.default_task_status,
|
||||
parent=asset_entity,
|
||||
)
|
||||
|
||||
# Find latest version
|
||||
latest_version = self._find_last_version(subset_name, asset_doc)
|
||||
version_number = 1
|
||||
if latest_version is not None:
|
||||
version_number += latest_version
|
||||
|
||||
self.log.info(
|
||||
"Next version of instance \"{}\" will be {}".format(
|
||||
instance_name, version_number
|
||||
)
|
||||
)
|
||||
|
||||
# update instance info
|
||||
instance.data["task"] = task
|
||||
instance.data["version_name"] = "{}_{}".format(subset_name, task)
|
||||
instance.data["family"] = family
|
||||
instance.data["subset"] = subset_name
|
||||
instance.data["version"] = version_number
|
||||
instance.data["latestVersion"] = latest_version
|
||||
instance.data["anatomyData"].update({
|
||||
"subset": subset_name,
|
||||
"family": family,
|
||||
"version": version_number
|
||||
})
|
||||
|
||||
# Copy `families` and check if `family` is not in current families
|
||||
families = instance.data.get("families") or list()
|
||||
if families:
|
||||
families = list(set(families))
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
# Prepare staging dir for new instance and zip + sanitize scene name
|
||||
staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
|
||||
# Handle if the representation is a .zip and not an .xstage
|
||||
pre_staged = False
|
||||
if submitted_files.endswith(".zip"):
|
||||
submitted_zip_file = os.path.join(submitted_staging_dir,
|
||||
submitted_files
|
||||
).replace("\\", "/")
|
||||
|
||||
pre_staged = self.sanitize_prezipped_project(instance,
|
||||
submitted_zip_file,
|
||||
staging_dir)
|
||||
|
||||
# Get the file to work with
|
||||
source_dir = str(repres[0]["stagingDir"])
|
||||
source_file = str(repres[0]["files"])
|
||||
|
||||
staging_scene_dir = os.path.join(staging_dir, "scene")
|
||||
staging_scene = os.path.join(staging_scene_dir, source_file)
|
||||
|
||||
# If the file is an .xstage / directory, we must stage it
|
||||
if not pre_staged:
|
||||
shutil.copytree(source_dir, staging_scene_dir)
|
||||
|
||||
# Rename this latest file as 'scene.xstage'
|
||||
# This is is determined in the collector from the latest scene in a
|
||||
# submitted directory / directory the submitted .xstage is in.
|
||||
# In the case of a zip file being submitted, this is determined within
|
||||
# the self.sanitize_project() method in this extractor.
|
||||
os.rename(staging_scene,
|
||||
os.path.join(staging_scene_dir, "scene.xstage")
|
||||
)
|
||||
|
||||
# Required to set the current directory where the zip will end up
|
||||
os.chdir(staging_dir)
|
||||
|
||||
# Create the zip file
|
||||
zip_filepath = shutil.make_archive(os.path.basename(source_dir),
|
||||
"zip",
|
||||
staging_scene_dir
|
||||
)
|
||||
|
||||
zip_filename = os.path.basename(zip_filepath)
|
||||
|
||||
self.log.info("Zip file: {}".format(zip_filepath))
|
||||
|
||||
# Setup representation
|
||||
new_repre = {
|
||||
"name": "zip",
|
||||
"ext": "zip",
|
||||
"files": zip_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
self.log.debug(
|
||||
"Creating new representation: {}".format(new_repre)
|
||||
)
|
||||
instance.data["representations"] = [new_repre]
|
||||
|
||||
self.log.debug("Completed prep of zipped Harmony scene: {}"
|
||||
.format(zip_filepath)
|
||||
)
|
||||
|
||||
# If this extractor is setup to also extract a workfile...
|
||||
if self.create_workfile:
|
||||
workfile_path = self.extract_workfile(instance,
|
||||
staging_scene
|
||||
)
|
||||
|
||||
self.log.debug("Extracted Workfile to: {}".format(workfile_path))
|
||||
|
||||
def extract_workfile(self, instance, staging_scene):
|
||||
"""Extract a valid workfile for this corresponding publish.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): Instance data.
|
||||
staging_scene (str): path of staging scene.
|
||||
|
||||
Returns:
|
||||
str: Path to workdir.
|
||||
|
||||
"""
|
||||
# Since the staging scene was renamed to "scene.xstage" for publish
|
||||
# rename the staging scene in the temp stagingdir
|
||||
staging_scene = os.path.join(os.path.dirname(staging_scene),
|
||||
"scene.xstage")
|
||||
|
||||
# Setup the data needed to form a valid work path filename
|
||||
anatomy = pype.api.Anatomy()
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
|
||||
data = {
|
||||
"root": api.registered_root(),
|
||||
"project": {
|
||||
"name": project_entity["name"],
|
||||
"code": project_entity["data"].get("code", '')
|
||||
},
|
||||
"asset": instance.data["asset"],
|
||||
"hierarchy": pype.api.get_hierarchy(instance.data["asset"]),
|
||||
"family": instance.data["family"],
|
||||
"task": instance.data.get("task"),
|
||||
"subset": instance.data["subset"],
|
||||
"version": 1,
|
||||
"ext": "zip",
|
||||
}
|
||||
|
||||
# Get a valid work filename first with version 1
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
anatomy_filled = anatomy.format(data)
|
||||
work_path = anatomy_filled["work"]["path"]
|
||||
|
||||
# Get the final work filename with the proper version
|
||||
data["version"] = api.last_workfile_with_version(
|
||||
os.path.dirname(work_path), file_template, data, [".zip"]
|
||||
)[1]
|
||||
|
||||
work_path = anatomy_filled["work"]["path"]
|
||||
base_name = os.path.splitext(os.path.basename(work_path))[0]
|
||||
|
||||
staging_work_path = os.path.join(os.path.dirname(staging_scene),
|
||||
base_name + ".xstage"
|
||||
)
|
||||
|
||||
# Rename this latest file after the workfile path filename
|
||||
os.rename(staging_scene, staging_work_path)
|
||||
|
||||
# Required to set the current directory where the zip will end up
|
||||
os.chdir(os.path.dirname(os.path.dirname(staging_scene)))
|
||||
|
||||
# Create the zip file
|
||||
zip_filepath = shutil.make_archive(base_name,
|
||||
"zip",
|
||||
os.path.dirname(staging_scene)
|
||||
)
|
||||
self.log.info(staging_scene)
|
||||
self.log.info(work_path)
|
||||
self.log.info(staging_work_path)
|
||||
self.log.info(os.path.dirname(os.path.dirname(staging_scene)))
|
||||
self.log.info(base_name)
|
||||
self.log.info(zip_filepath)
|
||||
|
||||
# Create the work path on disk if it does not exist
|
||||
os.makedirs(os.path.dirname(work_path), exist_ok=True)
|
||||
shutil.copy(zip_filepath, work_path)
|
||||
|
||||
return work_path
|
||||
|
||||
def sanitize_prezipped_project(
|
||||
self, instance, zip_filepath, staging_dir):
|
||||
"""Fix when a zip contains a folder.
|
||||
|
||||
Handle zip file root contains folder instead of the project.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): Instance data.
|
||||
zip_filepath (str): Path to zip.
|
||||
staging_dir (str): Path to staging directory.
|
||||
|
||||
"""
|
||||
zip = zipfile.ZipFile(zip_filepath)
|
||||
zip_contents = zipfile.ZipFile.namelist(zip)
|
||||
|
||||
# Determine if any xstage file is in root of zip
|
||||
project_in_root = [pth for pth in zip_contents
|
||||
if "/" not in pth and pth.endswith(".xstage")]
|
||||
|
||||
staging_scene_dir = os.path.join(staging_dir, "scene")
|
||||
|
||||
# The project is nested, so we must extract and move it
|
||||
if not project_in_root:
|
||||
|
||||
staging_tmp_dir = os.path.join(staging_dir, "tmp")
|
||||
|
||||
with zipfile.ZipFile(zip_filepath, "r") as zip_ref:
|
||||
zip_ref.extractall(staging_tmp_dir)
|
||||
|
||||
nested_project_folder = os.path.join(staging_tmp_dir,
|
||||
zip_contents[0]
|
||||
)
|
||||
|
||||
shutil.copytree(nested_project_folder, staging_scene_dir)
|
||||
|
||||
else:
|
||||
# The project is not nested, so we just extract to scene folder
|
||||
with zipfile.ZipFile(zip_filepath, "r") as zip_ref:
|
||||
zip_ref.extractall(staging_scene_dir)
|
||||
|
||||
latest_file = max(glob.iglob(staging_scene_dir + "/*.xstage"),
|
||||
key=os.path.getctime).replace("\\", "/")
|
||||
|
||||
instance.data["representations"][0]["stagingDir"] = staging_scene_dir
|
||||
instance.data["representations"][0]["files"] = os.path.basename(
|
||||
latest_file)
|
||||
|
||||
# We have staged the scene already so return True
|
||||
return True
|
||||
|
||||
def _find_last_version(self, subset_name, asset_doc):
|
||||
"""Find last version of subset."""
|
||||
subset_doc = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
|
||||
if subset_doc is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_doc = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_doc:
|
||||
return int(version_doc["name"])
|
||||
return None
|
||||
|
||||
def _get_all_task_types(self, project):
|
||||
"""Get all task types."""
|
||||
tasks = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_types = proj_template['_task_type_schema']['types']
|
||||
|
||||
for type in temp_task_types:
|
||||
if type['name'] not in tasks:
|
||||
tasks[type['name']] = type
|
||||
|
||||
return tasks
|
||||
|
||||
def _get_all_task_statuses(self, project):
|
||||
"""Get all statuses of tasks."""
|
||||
statuses = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_statuses = proj_template.get_statuses("Task")
|
||||
|
||||
for status in temp_task_statuses:
|
||||
if status['name'] not in statuses:
|
||||
statuses[status['name']] = status
|
||||
|
||||
return statuses
|
||||
|
||||
def _get_all_assetversion_statuses(self, project):
|
||||
"""Get statuses of all asset versions."""
|
||||
statuses = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_statuses = proj_template.get_statuses("AssetVersion")
|
||||
|
||||
for status in temp_task_statuses:
|
||||
if status['name'] not in statuses:
|
||||
statuses[status['name']] = status
|
||||
|
||||
return statuses
|
||||
|
||||
def _create_task(self, name, task_type, parent, task_status):
|
||||
"""Create task."""
|
||||
task_data = {
|
||||
'name': name,
|
||||
'parent': parent,
|
||||
}
|
||||
self.log.info(task_type)
|
||||
task_data['type'] = self.task_types[task_type]
|
||||
task_data['status'] = self.task_statuses[task_status]
|
||||
self.log.info(task_data)
|
||||
task = self.session.create('Task', task_data)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
83
pype/plugins/tvpaint/load/load_image.py
Normal file
83
pype/plugins/tvpaint/load/load_image.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from avalon import api
|
||||
from avalon.vendor import qargparse
|
||||
from avalon.tvpaint import CommunicatorWrapper
|
||||
|
||||
|
||||
class ImportImage(api.Loader):
|
||||
"""Load image or image sequence to TVPaint as new layer."""
|
||||
|
||||
families = ["render", "image", "background", "plate"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Import Image"
|
||||
order = 1
|
||||
icon = "image"
|
||||
color = "white"
|
||||
|
||||
import_script = (
|
||||
"filepath = \"{}\"\n"
|
||||
"layer_name = \"{}\"\n"
|
||||
"tv_loadsequence filepath {}PARSE layer_id\n"
|
||||
"tv_layerrename layer_id layer_name"
|
||||
)
|
||||
|
||||
defaults = {
|
||||
"stretch": True,
|
||||
"timestretch": True,
|
||||
"preload": True
|
||||
}
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"stretch",
|
||||
label="Stretch to project size",
|
||||
default=True,
|
||||
help="Stretch loaded image/s to project resolution?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"timestretch",
|
||||
label="Stretch to timeline length",
|
||||
default=True,
|
||||
help="Clip loaded image/s to timeline length?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"preload",
|
||||
label="Preload loaded image/s",
|
||||
default=True,
|
||||
help="Preload image/s?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
stretch = options.get("stretch", self.defaults["stretch"])
|
||||
timestretch = options.get("timestretch", self.defaults["timestretch"])
|
||||
preload = options.get("preload", self.defaults["preload"])
|
||||
|
||||
load_options = []
|
||||
if stretch:
|
||||
load_options.append("\"STRETCH\"")
|
||||
if timestretch:
|
||||
load_options.append("\"TIMESTRETCH\"")
|
||||
if preload:
|
||||
load_options.append("\"PRELOAD\"")
|
||||
|
||||
load_options_str = ""
|
||||
for load_option in load_options:
|
||||
load_options_str += (load_option + " ")
|
||||
|
||||
# Prepare layer name
|
||||
asset_name = context["asset"]["name"]
|
||||
version_name = context["version"]["name"]
|
||||
layer_name = "{}_{}_v{:0>3}".format(
|
||||
asset_name,
|
||||
name,
|
||||
version_name
|
||||
)
|
||||
# Fill import script with filename and layer name
|
||||
# - filename mus not contain backwards slashes
|
||||
george_script = self.import_script.format(
|
||||
self.fname.replace("\\", "/"),
|
||||
layer_name,
|
||||
load_options_str
|
||||
)
|
||||
return CommunicatorWrapper.execute_george_through_file(george_script)
|
||||
BIN
pype/resources/app_icons/aftereffects.png
Normal file
BIN
pype/resources/app_icons/aftereffects.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
|
|
@ -191,7 +191,7 @@ def switch(asset_name, filepath=None, new=True):
|
|||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = pype.switch_item(container,
|
||||
representation = fusion_lib.switch_item(container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
except Exception as e:
|
||||
|
|
|
|||
4
pype/tests/README.md
Normal file
4
pype/tests/README.md
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
Tests for Pype
|
||||
--------------
|
||||
Trigger by:
|
||||
`pype test --pype`
|
||||
39
pype/tests/test_lib_restructuralization.py
Normal file
39
pype/tests/test_lib_restructuralization.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# Test for backward compatibility of restructure of lib.py into lib library
|
||||
# Contains simple imports that should still work
|
||||
|
||||
|
||||
def test_backward_compatibility(printer):
|
||||
printer("Test if imports still work")
|
||||
try:
|
||||
from pype.lib import filter_pyblish_plugins
|
||||
from pype.lib import execute_hook
|
||||
from pype.lib import PypeHook
|
||||
|
||||
from pype.lib import get_latest_version
|
||||
from pype.lib import ApplicationLaunchFailed
|
||||
from pype.lib import launch_application
|
||||
from pype.lib import ApplicationAction
|
||||
from pype.lib import get_avalon_database
|
||||
from pype.lib import set_io_database
|
||||
|
||||
from pype.lib import get_ffmpeg_tool_path
|
||||
from pype.lib import get_last_version_from_path
|
||||
from pype.lib import get_paths_from_environ
|
||||
from pype.lib import get_version_from_path
|
||||
from pype.lib import version_up
|
||||
|
||||
from pype.lib import is_latest
|
||||
from pype.lib import any_outdated
|
||||
from pype.lib import get_asset
|
||||
from pype.lib import get_hierarchy
|
||||
from pype.lib import get_linked_assets
|
||||
from pype.lib import get_latest_version
|
||||
from pype.lib import ffprobe_streams
|
||||
|
||||
from pype.hosts.fusion.lib import switch_item
|
||||
|
||||
from pype.lib import source_hash
|
||||
from pype.lib import _subprocess
|
||||
|
||||
except ImportError as e:
|
||||
raise
|
||||
|
|
@ -20,12 +20,22 @@ class ActionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
if not is_group:
|
||||
return
|
||||
|
||||
extender_width = int(option.decorationSize.width() / 2)
|
||||
extender_height = int(option.decorationSize.height() / 2)
|
||||
grid_size = option.widget.gridSize()
|
||||
x_offset = int(
|
||||
(grid_size.width() / 2)
|
||||
- (option.rect.width() / 2)
|
||||
)
|
||||
item_x = option.rect.x() - x_offset
|
||||
|
||||
tenth_width = int(grid_size.width() / 10)
|
||||
tenth_height = int(grid_size.height() / 10)
|
||||
|
||||
extender_width = tenth_width * 2
|
||||
extender_height = tenth_height * 2
|
||||
|
||||
exteder_rect = QtCore.QRectF(
|
||||
option.rect.x() + (option.rect.width() / 10),
|
||||
option.rect.y() + (option.rect.height() / 10),
|
||||
item_x + tenth_width,
|
||||
option.rect.y() + tenth_height,
|
||||
extender_width,
|
||||
extender_height
|
||||
)
|
||||
|
|
@ -38,13 +48,14 @@ class ActionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
painter.drawPath(path)
|
||||
|
||||
divider = (2 * self.extender_lines) + 1
|
||||
line_height = extender_height / divider
|
||||
line_width = extender_width - (extender_width / 5)
|
||||
pos_x = exteder_rect.x() + extender_width / 10
|
||||
extender_offset = int(extender_width / 6)
|
||||
line_height = round(extender_height / divider)
|
||||
line_width = extender_width - (extender_offset * 2) + 1
|
||||
pos_x = exteder_rect.x() + extender_offset
|
||||
pos_y = exteder_rect.y() + line_height
|
||||
for _ in range(self.extender_lines):
|
||||
line_rect = QtCore.QRectF(
|
||||
pos_x, pos_y, line_width, round(line_height)
|
||||
pos_x, pos_y, line_width, line_height
|
||||
)
|
||||
painter.fillRect(line_rect, self.extender_fg)
|
||||
pos_y += 2 * line_height
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import ctypes
|
||||
import platform
|
||||
import contextlib
|
||||
|
||||
from . import compat, control, settings, util, window
|
||||
from . import control, settings, util, window
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
|
@ -79,7 +81,11 @@ def show(parent=None):
|
|||
css = css.replace("url(\"", "url(\"%s" % root)
|
||||
|
||||
with application() as app:
|
||||
compat.init()
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
|
||||
u"pyblish_pype"
|
||||
)
|
||||
|
||||
install_fonts()
|
||||
install_translator(app)
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
import os
|
||||
|
||||
|
||||
def __windows_taskbar_compat():
|
||||
"""Enable icon and taskbar grouping for Windows 7+"""
|
||||
|
||||
import ctypes
|
||||
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
|
||||
u"pyblish_pype")
|
||||
|
||||
|
||||
def init():
|
||||
if os.name == "nt":
|
||||
__windows_taskbar_compat()
|
||||
|
|
@ -1,5 +1,10 @@
|
|||
WindowTitle = "Pyblish" # Customize the window of the pyblish-lite window.
|
||||
UseLabel = True # Customize whether to show label names for plugins.
|
||||
from .util import env_variable_to_bool
|
||||
|
||||
# Customize the window of the pyblish-lite window.
|
||||
WindowTitle = "Pyblish"
|
||||
|
||||
# Customize whether to show label names for plugins.
|
||||
UseLabel = True
|
||||
|
||||
# Customize which tab to start on. Possible choices are: "artist", "overview"
|
||||
# and "terminal".
|
||||
|
|
@ -17,3 +22,6 @@ TerminalFilters = {
|
|||
"log_critical": True,
|
||||
"traceback": True,
|
||||
}
|
||||
|
||||
# Allow animations in GUI
|
||||
Animated = env_variable_to_bool("PYPE_PYBLISH_ANIMATED", True)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
from __future__ import (absolute_import, division, print_function,
|
||||
unicode_literals)
|
||||
from __future__ import (
|
||||
absolute_import,
|
||||
division,
|
||||
print_function,
|
||||
unicode_literals
|
||||
)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
@ -311,10 +315,14 @@ class OrderGroups:
|
|||
return float(group_range)
|
||||
|
||||
|
||||
def env_variable_to_bool(env_key):
|
||||
def env_variable_to_bool(env_key, default=False):
|
||||
"""Boolean based on environment variable value."""
|
||||
# TODO: move to pype lib
|
||||
value = os.environ.get(env_key)
|
||||
if value is not None:
|
||||
value = value.lower()
|
||||
if value in ("true", "1", "yes"):
|
||||
if value in ("true", "1", "yes", "on"):
|
||||
return True
|
||||
return False
|
||||
elif value in ("false", "0", "no", "off"):
|
||||
return False
|
||||
return default
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class OverviewView(QtWidgets.QTreeView):
|
|||
toggled = QtCore.Signal(QtCore.QModelIndex, object)
|
||||
show_perspective = QtCore.Signal(QtCore.QModelIndex)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, animated, parent=None):
|
||||
super(OverviewView, self).__init__(parent)
|
||||
|
||||
self.horizontalScrollBar().hide()
|
||||
|
|
@ -83,7 +83,8 @@ class OverviewView(QtWidgets.QTreeView):
|
|||
self.setHeaderHidden(True)
|
||||
self.setRootIsDecorated(False)
|
||||
self.setIndentation(0)
|
||||
self.setAnimated(True)
|
||||
if animated:
|
||||
self.setAnimated(True)
|
||||
|
||||
def event(self, event):
|
||||
if not event.type() == QtCore.QEvent.KeyPress:
|
||||
|
|
@ -157,8 +158,8 @@ class PluginView(OverviewView):
|
|||
|
||||
|
||||
class InstanceView(OverviewView):
|
||||
def __init__(self, parent=None):
|
||||
super(InstanceView, self).__init__(parent)
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(InstanceView, self).__init__(*args, **kwargs)
|
||||
self.viewport().setMouseTracking(True)
|
||||
self._pressed_group_index = None
|
||||
self._pressed_expander = None
|
||||
|
|
|
|||
|
|
@ -166,14 +166,18 @@ class Window(QtWidgets.QDialog):
|
|||
# TODO add parent
|
||||
overview_page = QtWidgets.QWidget()
|
||||
|
||||
overview_instance_view = view.InstanceView(parent=overview_page)
|
||||
overview_instance_view = view.InstanceView(
|
||||
animated=settings.Animated, parent=overview_page
|
||||
)
|
||||
overview_instance_delegate = delegate.InstanceDelegate(
|
||||
parent=overview_instance_view
|
||||
)
|
||||
overview_instance_view.setItemDelegate(overview_instance_delegate)
|
||||
overview_instance_view.setModel(instance_model)
|
||||
|
||||
overview_plugin_view = view.PluginView(parent=overview_page)
|
||||
overview_plugin_view = view.PluginView(
|
||||
animated=settings.Animated, parent=overview_page
|
||||
)
|
||||
overview_plugin_delegate = delegate.PluginDelegate(
|
||||
parent=overview_plugin_view
|
||||
)
|
||||
|
|
@ -669,6 +673,11 @@ class Window(QtWidgets.QDialog):
|
|||
target_page.show()
|
||||
return
|
||||
|
||||
if not settings.Animated:
|
||||
previous_page.setVisible(False)
|
||||
target_page.setVisible(True)
|
||||
return
|
||||
|
||||
width = previous_page.frameGeometry().width()
|
||||
offset = QtCore.QPoint(direction * width, 0)
|
||||
|
||||
|
|
|
|||
36
pype/vendor/OpenHarmony/.gitattributes
vendored
Normal file
36
pype/vendor/OpenHarmony/.gitattributes
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
$.html merge=ours
|
||||
$.oAttribute.html merge=ours
|
||||
$.oBackdrop.html merge=ours
|
||||
$.oBox.html merge=ours
|
||||
$.oColor.html merge=ours
|
||||
$.oColorValue.html merge=ours
|
||||
$.oColumn.html merge=ours
|
||||
$.oDialog.html merge=ours
|
||||
$.oDialog.Progress.html merge=ours
|
||||
$.oDrawing.html merge=ours
|
||||
$.oDrawingColumn.html merge=ours
|
||||
$.oDrawingNode.html merge=ours
|
||||
$.oElement.html merge=ours
|
||||
$.oFile.html merge=ours
|
||||
$.oFolder.html merge=ours
|
||||
$.oFrame.html merge=ours
|
||||
$.oGroupNode.html merge=ours
|
||||
$.oList.html merge=ours
|
||||
$.oNetwork.html merge=ours
|
||||
$.oNode.html merge=ours
|
||||
$.oNodeLink.html merge=ours
|
||||
$.oPalette.html merge=ours
|
||||
$.oPathPoint.html merge=ours
|
||||
$.oPegNode.html merge=ours
|
||||
$.oPoint.html merge=ours
|
||||
$.oScene.html merge=ours
|
||||
$.oThread.html merge=ours
|
||||
$.oTimeline.html merge=ours
|
||||
$.oTimelineLayer.html merge=ours
|
||||
$.oUtils.html merge=ours
|
||||
$.index.html merge=ours
|
||||
$.global.html merge=ours
|
||||
$.oDatabase.html merge=ours
|
||||
$.oProgressDialog.html merge=ours
|
||||
$.oProcess.html merge=ours
|
||||
NodeTypes.html merge=ours
|
||||
1
pype/vendor/OpenHarmony/.gitignore
vendored
Normal file
1
pype/vendor/OpenHarmony/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
|
||||
40
pype/vendor/OpenHarmony/Install.bat
vendored
Normal file
40
pype/vendor/OpenHarmony/Install.bat
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
@echo off
|
||||
SETLOCAL ENABLEDELAYEDEXPANSION
|
||||
SET dlPath=%~dp0
|
||||
set harmonyPrefsDir=%appdata%\Toon Boom Animation
|
||||
|
||||
SETX LIB_OPENHARMONY_PATH %dlPath%
|
||||
|
||||
echo -------------------------------------------------------------------
|
||||
echo -- Starting install of openHarmony open source scripting library --
|
||||
echo -------------------------------------------------------------------
|
||||
echo OpenHarmony will be installed to the folder :
|
||||
echo %dlpath%
|
||||
echo Do not delete the contents of this folder.
|
||||
|
||||
REM Check Harmony Versions and make a list
|
||||
for /d %%D in ("%harmonyPrefsDir%\*Harmony*") do (
|
||||
set harmonyVersionDir=%%~fD
|
||||
for /d %%V in ("!harmonyVersionDir!\*-layouts*") do (
|
||||
set "folderName=%%~nD"
|
||||
set "versionName=%%~nV"
|
||||
set "harmonyFolder=!folderName:~-7!"
|
||||
set "harmonyVersions=!versionName:~0,2!"
|
||||
echo Found Toonboom Harmony !harmonyFolder! !harmonyVersions! - installing openHarmony for this version.
|
||||
set "installDir=!harmonyPrefsDir!\Toon Boom Harmony !harmonyFolder!\!harmonyVersions!00-scripts\"
|
||||
|
||||
if not "!installDir!" == "!dlPath!" (
|
||||
REM creating a "openHarmony.js" file in script folders
|
||||
if not exist "!installDir!" mkdir "!installDir!"
|
||||
|
||||
cd !installDir!
|
||||
|
||||
set "script=include(System.getenv('LIB_OPENHARMONY_PATH')+'openHarmony.js');"
|
||||
echo !script!> openHarmony.js
|
||||
)
|
||||
echo ---- done. ----
|
||||
)
|
||||
)
|
||||
|
||||
echo - Install Complete -
|
||||
pause
|
||||
373
pype/vendor/OpenHarmony/LICENSE
vendored
Normal file
373
pype/vendor/OpenHarmony/LICENSE
vendored
Normal file
|
|
@ -0,0 +1,373 @@
|
|||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
||||
144
pype/vendor/OpenHarmony/README.md
vendored
Normal file
144
pype/vendor/OpenHarmony/README.md
vendored
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
# OpenHarmony - The Toonboom Harmony Open Source DOM Library
|
||||
|
||||
## Why did we make this library ?
|
||||
|
||||
Ever tried to make a simple script for toonboom Harmony, then got stumped by the numerous amount of steps required to execute the simplest action? Or bored of coding the same helper routines again and again for every studio you work for?
|
||||
|
||||
Toonboom Harmony is a very powerful software, with hundreds of functions and tools, and it unlocks a great amount of possibilities for animation studios around the globe. And... being the produce of the hard work of a small team forced to prioritise, it can also be a bit rustic at times!
|
||||
|
||||
We are users at heart, animators and riggers, who just want to interact with the software as simply as possible. Simplicity is at the heart of the design of openHarmony. But we also are developpers, and we made the library for people like us who can't resist tweaking the software and bend it in all possible ways, and are looking for powerful functions to help them do it.
|
||||
|
||||
This library's aim is to create a more direct way to interact with Toonboom through scripts, by providing a more intuitive way to access its elements, and help with the cumbersome and repetitive tasks as well as help unlock untapped potential in its many available systems. So we can go from having to do things like this:
|
||||
|
||||
```javascript
|
||||
// adding a Drawing to the scene with the official API
|
||||
var myNodeName = "Drawing";
|
||||
var myColumnName = myNodeName;
|
||||
var myNode = node.add("Top", myNodeName, "READ",0,0,0);
|
||||
var myColumn = column.add(myColumnName, "DRAWING", "BOTTOM");
|
||||
var myElement = element.add (myNodeName, "COLOR", 12, "SCAN", "TVG");
|
||||
column.setElementIdOfDrawing(myColumnName, myElement);
|
||||
node.linkAttr (myNode, "DRAWING.ELEMENT", myColumnName);
|
||||
drawing.create (myElement, "1", false, false);
|
||||
column.setEntry (myColumnName, 0, 1, "1");
|
||||
```
|
||||
|
||||
to simply writing :
|
||||
|
||||
```javascript
|
||||
// with openHarmony
|
||||
var myNode = $.scene.root.addDrawingNode("Drawing");
|
||||
myNode.element.addDrawing(1);
|
||||
```
|
||||
|
||||
Less time spent coding, more time spent having ideas!
|
||||
|
||||
-----
|
||||
## Do I need any knowledge of toonboom scripting to use openHarmony?
|
||||
|
||||
OpenHarmony aims to be self contained and to reimplement all the basic functions of the Harmony API. So, while it might help to have prior experience to understand what goes on under the hood, knowledge of the official API is not required.
|
||||
|
||||
However, should you reach the limits of what openHarmony can offer at this time, you can always access the official API at any moment. Maybe you can submit a request and the missing parts will be added eventually, or you can even delve into the code and add the necessary functions yourself if you feel like it!
|
||||
|
||||
You can access a list of all the functions, how to use them, as well as examples, from the online documentation:
|
||||
|
||||
https://cfourney.github.io/OpenHarmony/$.html
|
||||
|
||||
-----
|
||||
## The OpenHarmony Document Object Model or DOM
|
||||
|
||||
OpenHarmony is based around the four principles of Object Oriented Programming: *Abstraction*, *Encapsulation*, *Inheritance*, *Polymorphism*.
|
||||
|
||||
This means every element of the Harmony scene has a corresponding abstraction existing in the code as a class. We have oNode, oScene, oColumn, etc. Unlike in the official API, each class is designed to create objects that are instances of these classes and encapsulate them and all their actions. It means no more storing the path of nodes, column abstract names and element ids to interact with them; if you can create or call it, you can access all of its functionalities. Nodes are declined as DrawingNodes and PegNodes, which inherint from the Node Class, and so on.
|
||||
|
||||
The openHarmony library doesn't merely provide *access* to the elements of a Toonboom Harmony file, it *models* them and their relationship to each others.
|
||||
|
||||
<img src="https://raw.githubusercontent.com/cfourney/OpenHarmony/master/oH_DOM.jpg" alt="The Document ObjectModel" width="1600">
|
||||
|
||||
The *Document Object Model* is a way to organise the elements of the Toonboom scene by highlighting the way they interact with each other. The Scene object has a root group, which contains Nodes, which have Attributes which can be linked to Columns which contain Frames, etc. This way it's always easy to find and access the content you are looking for. The attribute system has also been streamlined and you can now set values of node properties with a simple attribution synthax.
|
||||
|
||||
We implemented a global access to all elements and functions through the standard **dot notation** for the hierarchy, for ease of use, and clarity of code.
|
||||
|
||||
Functions and methods also make extensive use of **optional parameters** so no more need to fill in all arguments when calling functions when the default behavior is all that's needed.
|
||||
|
||||
On the other hand, the "o" naming scheme allows us to retain full access to the official API at all times. This means you can use it only when it really makes your life better.
|
||||
|
||||
-----
|
||||
## Adopting openHarmony for your project
|
||||
|
||||
This library is made available under the [Mozilla Public license 2.0](https://www.mozilla.org/en-US/MPL/2.0/).
|
||||
|
||||
OpenHarmony can be downloaded from [this repository](https://github.com/cfourney/OpenHarmony/releases/) directly. In order to make use of its functions, it needs to be unzipped next to the scripts you will be writing.
|
||||
|
||||
All you have to do is call :
|
||||
```javascript
|
||||
include("openHarmony.js");
|
||||
```
|
||||
at the beggining of your script.
|
||||
|
||||
You can ask your users to download their copy of the library and store it alongside, or bundle it as you wish as long as you include the license file provided on this repository.
|
||||
|
||||
The entire library is documented at the address :
|
||||
|
||||
https://cfourney.github.io/OpenHarmony/$.html
|
||||
|
||||
This include a list of all the available functions as well as examples and references (such as the list of all available node attributes).
|
||||
|
||||
As time goes by, more functions will be added and the documentation will also get richer as more examples get created.
|
||||
|
||||
-----
|
||||
## Installation
|
||||
|
||||
To install:
|
||||
- download the zip from [the releases page](https://github.com/cfourney/OpenHarmony/releases/),
|
||||
- unzip the contents where you want to store the library,
|
||||
- run `install.bat`.
|
||||
|
||||
This last step will tell Harmony where to look to load the library, by setting the environment variable `LIB_OPENHARMONY_PATH` to the current folder.
|
||||
|
||||
It will then create a `openHarmony.js` file into the user scripts folder which calls the files from the folder from the `LIB_OPENHARMONY_PATH` variable, so that scripts can make direct use of it without having to worry about where openHarmony is stored.
|
||||
|
||||
If you don't need a remote location for the library, you can also unzip the entire download into your user script folder.
|
||||
|
||||
-----
|
||||
## Let's get technical. I can code, and want to contribute, where do I start?
|
||||
|
||||
Reading and understanding the existing code, or at least the structure of the lib, is a great start, but not a requirement. You can simply start adding your classes to the $ object that is the root of the harmony lib, and start implementing. However, try to follow these guidelines as they are the underlying principles that make the library consistent:
|
||||
|
||||
* There is a $ global object, which contains all the class declarations, and can be passed from one context to another to access the functions.
|
||||
|
||||
* Each class is an abstract representation of a core concept of Harmony, so naming and consistency (within the lib) is essential. But we are not bound by the structure or naming of Harmony if we find a better way, for example to make nomenclatures more consistent between the scripting interface and the UI.
|
||||
|
||||
* Each class defines a bunch of class properties with getter/setters for the values that are directly related to an entity of the scene. If you're thinking of making a getter function that doesn't require arguments, use a getter setter instead!
|
||||
|
||||
* Each class also defines methods which can be called on the class instances to affect its contents, or its children's contents. For example, you'd go to the scene class to add the things that live in the scene, such as elements, columns and palettes. You wouldn't go to the column class or palette class to add one, because then what are you adding it *to*?
|
||||
|
||||
* We use encapsulation over having to pass a function arguments every time we can. Instead of adding a node to the scene, and having to pass a group as argument, adding a node is done directly by calling a method of the parent group. This way the parent/child relationship is always clear and the arguments list kept to a minimum.
|
||||
|
||||
* The goal is to make the most useful set of functions we can. Instead of making a large function that does a lot, consider extracting the small useful subroutines you need in your function into the existing classes directly.
|
||||
|
||||
* Each method argument besides the core one (for example, for adding nodes, we have to specify the type of the new node we create) must have a default fallback to make the argument optional.
|
||||
|
||||
* Don't use globals ever, but maybe use a class property if you need an enum for example.
|
||||
|
||||
* Don't use the official API namespace, any function that exists in the official API must remain accessible otherwise things will break. Prefix your class names with "o" to avoid this and to signify this function is part of openHarmony.
|
||||
|
||||
* We use the official API as little as we can in the code, so that if the implementation changes, we can easily fix it in a minimal amount of places. Wrap it, then use the wrapper. (ex: oScene.name)
|
||||
|
||||
* Users of the lib should almost never have to use "new" to create instances of their classes. Create accessors/factories that will do that for them. For example, $.scn creates and return a oScene instance, and $.scn.nodes returns new oNodes instances, but users don't have to create them themselves, so it's like they were always there, contained within. It also lets you create different subclasses for one factory. For example, $.scn.$node("Top/myNode") will either return a oNode, oDrawingNode, oPegNode or oGroupNode object depending on the node type of the node represented by the object.
|
||||
Exceptions are small useful value containing objects that don't belong to the Harmony hierarchy like oPoint, oBox, oColorValue, etc.
|
||||
|
||||
* It's a JS Library, so use camelCase naming and try to follow the google style guide for JS :
|
||||
https://google.github.io/styleguide/jsguide.html
|
||||
|
||||
* Document your new functions using the JSDocs synthax : https://devdocs.io/jsdoc/howto-es2015-classes
|
||||
|
||||
* Make a branch, create a merge request when you're done, and we'll add the new stuff you added to the lib :)
|
||||
|
||||
|
||||
-----
|
||||
## Credits
|
||||
|
||||
This library was created by Mathieu Chaptel and Chris Fourney.
|
||||
|
||||
If you're using openHarmony, and are noticing things that you would like to see in the library, please feel free to contribute to the code directly, or send us feedback through Github. This project will only be as good as people working together can make it, and we need every piece of code and feedback we can get, and would love to hear from you!
|
||||
2
pype/vendor/OpenHarmony/build_doc.bat
vendored
Normal file
2
pype/vendor/OpenHarmony/build_doc.bat
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
jsdoc -c ./documentation.json -t ../node_modules/jaguarjs-jsdoc
|
||||
pause
|
||||
6240
pype/vendor/OpenHarmony/docs/$.html
vendored
Normal file
6240
pype/vendor/OpenHarmony/docs/$.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4227
pype/vendor/OpenHarmony/docs/$.oApp.html
vendored
Normal file
4227
pype/vendor/OpenHarmony/docs/$.oApp.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
3465
pype/vendor/OpenHarmony/docs/$.oArtLayer.html
vendored
Normal file
3465
pype/vendor/OpenHarmony/docs/$.oArtLayer.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4633
pype/vendor/OpenHarmony/docs/$.oAttribute.html
vendored
Normal file
4633
pype/vendor/OpenHarmony/docs/$.oAttribute.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
3871
pype/vendor/OpenHarmony/docs/$.oBackdrop.html
vendored
Normal file
3871
pype/vendor/OpenHarmony/docs/$.oBackdrop.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
3882
pype/vendor/OpenHarmony/docs/$.oBox.html
vendored
Normal file
3882
pype/vendor/OpenHarmony/docs/$.oBox.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4298
pype/vendor/OpenHarmony/docs/$.oColor.html
vendored
Normal file
4298
pype/vendor/OpenHarmony/docs/$.oColor.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4040
pype/vendor/OpenHarmony/docs/$.oColorValue.html
vendored
Normal file
4040
pype/vendor/OpenHarmony/docs/$.oColorValue.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4724
pype/vendor/OpenHarmony/docs/$.oColumn.html
vendored
Normal file
4724
pype/vendor/OpenHarmony/docs/$.oColumn.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
3681
pype/vendor/OpenHarmony/docs/$.oDatabase.html
vendored
Normal file
3681
pype/vendor/OpenHarmony/docs/$.oDatabase.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
2859
pype/vendor/OpenHarmony/docs/$.oDialog.Progress.html
vendored
Normal file
2859
pype/vendor/OpenHarmony/docs/$.oDialog.Progress.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4552
pype/vendor/OpenHarmony/docs/$.oDialog.html
vendored
Normal file
4552
pype/vendor/OpenHarmony/docs/$.oDialog.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4878
pype/vendor/OpenHarmony/docs/$.oDrawing.html
vendored
Normal file
4878
pype/vendor/OpenHarmony/docs/$.oDrawing.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
5020
pype/vendor/OpenHarmony/docs/$.oDrawingColumn.html
vendored
Normal file
5020
pype/vendor/OpenHarmony/docs/$.oDrawingColumn.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
11677
pype/vendor/OpenHarmony/docs/$.oDrawingNode.html
vendored
Normal file
11677
pype/vendor/OpenHarmony/docs/$.oDrawingNode.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4291
pype/vendor/OpenHarmony/docs/$.oDynList.html
vendored
Normal file
4291
pype/vendor/OpenHarmony/docs/$.oDynList.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4453
pype/vendor/OpenHarmony/docs/$.oElement.html
vendored
Normal file
4453
pype/vendor/OpenHarmony/docs/$.oElement.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
4924
pype/vendor/OpenHarmony/docs/$.oFile.html
vendored
Normal file
4924
pype/vendor/OpenHarmony/docs/$.oFile.html
vendored
Normal file
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue