mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'master' into develop
This commit is contained in:
commit
f9645b7ddb
81 changed files with 2795 additions and 3117 deletions
11
pype/api.py
11
pype/api.py
|
|
@ -6,6 +6,12 @@ from pypeapp import (
|
|||
execute
|
||||
)
|
||||
|
||||
from pypeapp.lib.mongo import (
|
||||
decompose_url,
|
||||
compose_url,
|
||||
get_default_components
|
||||
)
|
||||
|
||||
from .plugin import (
|
||||
Extractor,
|
||||
|
||||
|
|
@ -30,6 +36,7 @@ from .lib import (
|
|||
get_hierarchy,
|
||||
get_subsets,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment
|
||||
)
|
||||
|
|
@ -43,6 +50,9 @@ __all__ = [
|
|||
"project_overrides_dir_path",
|
||||
"config",
|
||||
"execute",
|
||||
"decompose_url",
|
||||
"compose_url",
|
||||
"get_default_components",
|
||||
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
|
|
@ -67,6 +77,7 @@ __all__ = [
|
|||
"get_asset",
|
||||
"get_subsets",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
|
||||
|
|
|
|||
208
pype/hooks/celaction/prelaunch.py
Normal file
208
pype/hooks/celaction/prelaunch.py
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
import logging
|
||||
import os
|
||||
import winreg
|
||||
import shutil
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import (
|
||||
Anatomy,
|
||||
Logger,
|
||||
get_last_version_from_path
|
||||
)
|
||||
|
||||
from avalon import io, api, lib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CelactionPrelaunchHook(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Unreal
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Unreal launcher
|
||||
shell script.
|
||||
"""
|
||||
workfile_ext = "scn"
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initialize
|
||||
self._S = api.Session
|
||||
|
||||
# get publish version of celaction
|
||||
app = "celaction_publish"
|
||||
|
||||
# get context variables
|
||||
project = self._S["AVALON_PROJECT"] = env["AVALON_PROJECT"]
|
||||
asset = self._S["AVALON_ASSET"] = env["AVALON_ASSET"]
|
||||
task = self._S["AVALON_TASK"] = env["AVALON_TASK"]
|
||||
workdir = self._S["AVALON_WORKDIR"] = env["AVALON_WORKDIR"]
|
||||
|
||||
# get workfile path
|
||||
anatomy_filled = self.get_anatomy_filled()
|
||||
workfile = anatomy_filled["work"]["file"]
|
||||
version = anatomy_filled["version"]
|
||||
|
||||
# create workdir if doesn't exist
|
||||
os.makedirs(workdir, exist_ok=True)
|
||||
self.log.info(f"Work dir is: `{workdir}`")
|
||||
|
||||
# get last version of workfile
|
||||
workfile_last = get_last_version_from_path(
|
||||
workdir, workfile.split(version))
|
||||
|
||||
if workfile_last:
|
||||
workfile = workfile_last
|
||||
|
||||
workfile_path = os.path.join(workdir, workfile)
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
if not os.path.isfile(workfile_path):
|
||||
# try to get path from environment or use default
|
||||
# from `pype.celation` dir
|
||||
template_path = env.get("CELACTION_TEMPLATE") or os.path.join(
|
||||
env.get("PYPE_MODULE_ROOT"),
|
||||
"pype/hosts/celaction/celaction_template_scene.scn"
|
||||
)
|
||||
self.log.info(
|
||||
f"Creating workfile from template: `{template_path}`")
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(workfile_path)
|
||||
)
|
||||
|
||||
self.log.info(f"Workfile to open: `{workfile_path}`")
|
||||
|
||||
# adding compulsory environment var for openting file
|
||||
env["PYPE_CELACTION_PROJECT_FILE"] = workfile_path
|
||||
|
||||
# setting output parameters
|
||||
path = r"Software\CelAction\CelAction2D\User Settings"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
"Software\\CelAction\\CelAction2D\\User Settings", 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
|
||||
# TODO: change to root path and pyblish standalone to premiere way
|
||||
pype_root_path = os.getenv("PYPE_SETUP_PATH")
|
||||
path = os.path.join(pype_root_path,
|
||||
"pype.bat")
|
||||
|
||||
winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, path)
|
||||
|
||||
parameters = [
|
||||
"launch",
|
||||
f"--app {app}",
|
||||
f"--project {project}",
|
||||
f"--asset {asset}",
|
||||
f"--task {task}",
|
||||
"--currentFile \"*SCENE*\"",
|
||||
"--chunk *CHUNK*",
|
||||
"--frameStart *START*",
|
||||
"--frameEnd *END*",
|
||||
"--resolutionWidth *X*",
|
||||
"--resolutionHeight *Y*",
|
||||
# "--programDir \"'*PROGPATH*'\""
|
||||
]
|
||||
winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
|
||||
" ".join(parameters))
|
||||
|
||||
# setting resolution parameters
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
|
||||
path += r"\SubmitOutput"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
|
||||
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
|
||||
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
|
||||
|
||||
# making sure message dialogs don't appear when overwriting
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
||||
path += r"\OverwriteScene"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
|
||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
||||
path += r"\SceneSaved"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
|
||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||
|
||||
return True
|
||||
|
||||
def get_anatomy_filled(self):
|
||||
root_path = api.registered_root()
|
||||
project_name = self._S["AVALON_PROJECT"]
|
||||
asset_name = self._S["AVALON_ASSET"]
|
||||
|
||||
io.install()
|
||||
project_entity = io.find_one({
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
})
|
||||
assert project_entity, (
|
||||
"Project '{0}' was not found."
|
||||
).format(project_name)
|
||||
log.debug("Collected Project \"{}\"".format(project_entity))
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
assert asset_entity, (
|
||||
"No asset found by the name '{0}' in project '{1}'"
|
||||
).format(asset_name, project_name)
|
||||
|
||||
project_name = project_entity["name"]
|
||||
|
||||
log.info(
|
||||
"Anatomy object collected for project \"{}\".".format(project_name)
|
||||
)
|
||||
|
||||
hierarchy_items = asset_entity["data"]["parents"]
|
||||
hierarchy = ""
|
||||
if hierarchy_items:
|
||||
hierarchy = os.path.join(*hierarchy_items)
|
||||
|
||||
template_data = {
|
||||
"root": root_path,
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": project_entity["data"].get("code")
|
||||
},
|
||||
"asset": asset_entity["name"],
|
||||
"hierarchy": hierarchy.replace("\\", "/"),
|
||||
"task": self._S["AVALON_TASK"],
|
||||
"ext": self.workfile_ext,
|
||||
"version": 1,
|
||||
"username": os.getenv("PYPE_USERNAME", "").strip()
|
||||
}
|
||||
|
||||
avalon_app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if avalon_app_name:
|
||||
application_def = lib.get_application(avalon_app_name)
|
||||
app_dir = application_def.get("application_dir")
|
||||
if app_dir:
|
||||
template_data["app"] = app_dir
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy_filled = anatomy.format_all(template_data).get_solved()
|
||||
|
||||
return anatomy_filled
|
||||
23
pype/hooks/photoshop/prelaunch.py
Normal file
23
pype/hooks/photoshop/prelaunch.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import pype.lib
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class PhotoshopPrelaunch(pype.lib.PypeHook):
|
||||
"""This hook will check for the existence of PyWin
|
||||
|
||||
PyWin is a requirement for the Photoshop integration.
|
||||
"""
|
||||
project_code = None
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
output = pype.lib._subprocess(["pip", "install", "pywin32==227"])
|
||||
self.log.info(output)
|
||||
return True
|
||||
1
pype/hosts/celaction/__init__.py
Normal file
1
pype/hosts/celaction/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
kwargs = None
|
||||
BIN
pype/hosts/celaction/celaction_template_scene.scn
Normal file
BIN
pype/hosts/celaction/celaction_template_scene.scn
Normal file
Binary file not shown.
121
pype/hosts/celaction/cli.py
Normal file
121
pype/hosts/celaction/cli.py
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
import os
|
||||
import sys
|
||||
import copy
|
||||
import argparse
|
||||
|
||||
from avalon import io
|
||||
from avalon.tools import publish
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
from pype.api import Logger
|
||||
import pype
|
||||
import pype.celaction
|
||||
|
||||
log = Logger().get_logger("Celaction_cli_publisher")
|
||||
|
||||
publish_host = "celaction"
|
||||
|
||||
PUBLISH_PATH = os.path.join(pype.PLUGINS_DIR, publish_host, "publish")
|
||||
|
||||
PUBLISH_PATHS = [
|
||||
PUBLISH_PATH,
|
||||
os.path.join(pype.PLUGINS_DIR, "ftrack", "publish")
|
||||
]
|
||||
|
||||
|
||||
def cli():
|
||||
parser = argparse.ArgumentParser(prog="celaction_publish")
|
||||
|
||||
parser.add_argument("--currentFile",
|
||||
help="Pass file to Context as `currentFile`")
|
||||
|
||||
parser.add_argument("--chunk",
|
||||
help=("Render chanks on farm"))
|
||||
|
||||
parser.add_argument("--frameStart",
|
||||
help=("Start of frame range"))
|
||||
|
||||
parser.add_argument("--frameEnd",
|
||||
help=("End of frame range"))
|
||||
|
||||
parser.add_argument("--resolutionWidth",
|
||||
help=("Width of resolution"))
|
||||
|
||||
parser.add_argument("--resolutionHeight",
|
||||
help=("Height of resolution"))
|
||||
|
||||
# parser.add_argument("--programDir",
|
||||
# help=("Directory with celaction program installation"))
|
||||
|
||||
pype.celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__
|
||||
|
||||
|
||||
def _prepare_publish_environments():
|
||||
"""Prepares environments based on request data."""
|
||||
env = copy.deepcopy(os.environ)
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
asset_name = os.getenv("AVALON_ASSET")
|
||||
|
||||
io.install()
|
||||
project_doc = io.find_one({
|
||||
"type": "project"
|
||||
})
|
||||
av_asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
parents = av_asset["data"]["parents"]
|
||||
hierarchy = ""
|
||||
if parents:
|
||||
hierarchy = "/".join(parents)
|
||||
|
||||
env["AVALON_PROJECT"] = project_name
|
||||
env["AVALON_ASSET"] = asset_name
|
||||
env["AVALON_TASK"] = os.getenv("AVALON_TASK")
|
||||
env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR")
|
||||
env["AVALON_HIERARCHY"] = hierarchy
|
||||
env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "")
|
||||
env["AVALON_APP"] = publish_host
|
||||
env["AVALON_APP_NAME"] = "celaction_local"
|
||||
|
||||
env["PYBLISH_HOSTS"] = publish_host
|
||||
|
||||
os.environ.update(env)
|
||||
|
||||
|
||||
def main():
|
||||
# prepare all environments
|
||||
_prepare_publish_environments()
|
||||
|
||||
# Registers pype's Global pyblish plugins
|
||||
pype.install()
|
||||
|
||||
for path in PUBLISH_PATHS:
|
||||
path = os.path.normpath(path)
|
||||
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
|
||||
log.info(f"Registering path: {path}")
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
pyblish.api.register_host(publish_host)
|
||||
|
||||
# Register project specific plugins
|
||||
project_name = os.environ["AVALON_PROJECT"]
|
||||
project_plugins_paths = os.getenv("PYPE_PROJECT_PLUGINS", "")
|
||||
for path in project_plugins_paths.split(os.pathsep):
|
||||
plugin_path = os.path.join(path, project_name, "plugins")
|
||||
if os.path.exists(plugin_path):
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
return publish.show()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
result = main()
|
||||
sys.exit(not bool(result))
|
||||
|
|
@ -1,14 +1,149 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from avalon import api, harmony
|
||||
from avalon.vendor import Qt
|
||||
import pyblish.api
|
||||
from pype import lib
|
||||
|
||||
|
||||
def set_scene_settings(settings):
|
||||
func = """function func(args)
|
||||
{
|
||||
if (args[0]["fps"])
|
||||
{
|
||||
scene.setFrameRate(args[0]["fps"]);
|
||||
}
|
||||
if (args[0]["frameStart"] && args[0]["frameEnd"])
|
||||
{
|
||||
var duration = args[0]["frameEnd"] - args[0]["frameStart"] + 1
|
||||
if (frame.numberOf() > duration)
|
||||
{
|
||||
frame.remove(
|
||||
duration, frame.numberOf() - duration
|
||||
);
|
||||
}
|
||||
if (frame.numberOf() < duration)
|
||||
{
|
||||
frame.insert(
|
||||
duration, duration - frame.numberOf()
|
||||
);
|
||||
}
|
||||
|
||||
scene.setStartFrame(1);
|
||||
scene.setStopFrame(duration);
|
||||
}
|
||||
if (args[0]["resolutionWidth"] && args[0]["resolutionHeight"])
|
||||
{
|
||||
scene.setDefaultResolution(
|
||||
args[0]["resolutionWidth"], args[0]["resolutionHeight"], 41.112
|
||||
)
|
||||
}
|
||||
}
|
||||
func
|
||||
"""
|
||||
harmony.send({"function": func, "args": [settings]})
|
||||
|
||||
|
||||
def get_asset_settings():
|
||||
asset_data = lib.get_asset()["data"]
|
||||
fps = asset_data.get("fps")
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
resolution_width = asset_data.get("resolutionWidth")
|
||||
resolution_height = asset_data.get("resolutionHeight")
|
||||
|
||||
return {
|
||||
"fps": fps,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height
|
||||
}
|
||||
|
||||
|
||||
def ensure_scene_settings():
|
||||
settings = get_asset_settings()
|
||||
|
||||
invalid_settings = []
|
||||
valid_settings = {}
|
||||
for key, value in settings.items():
|
||||
if value is None:
|
||||
invalid_settings.append(key)
|
||||
else:
|
||||
valid_settings[key] = value
|
||||
|
||||
# Warn about missing attributes.
|
||||
print("Starting new QApplication..")
|
||||
app = Qt.QtWidgets.QApplication(sys.argv)
|
||||
|
||||
message_box = Qt.QtWidgets.QMessageBox()
|
||||
message_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg = "Missing attributes:"
|
||||
if invalid_settings:
|
||||
for item in invalid_settings:
|
||||
msg += f"\n{item}"
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
# Garbage collect QApplication.
|
||||
del app
|
||||
|
||||
set_scene_settings(valid_settings)
|
||||
|
||||
|
||||
def export_template(backdrops, nodes, filepath):
|
||||
func = """function func(args)
|
||||
{
|
||||
// Add an extra node just so a new group can be created.
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
var template_group = node.createGroup(temp_node, "temp_group");
|
||||
node.deleteNode( template_group + "/temp_note" );
|
||||
|
||||
// This will make Node View to focus on the new group.
|
||||
selection.clearSelection();
|
||||
selection.addNodeToSelection(template_group);
|
||||
Action.perform("onActionEnterGroup()", "Node View");
|
||||
|
||||
// Recreate backdrops in group.
|
||||
for (var i = 0 ; i < args[0].length; i++)
|
||||
{
|
||||
Backdrop.addBackdrop(template_group, args[0][i]);
|
||||
};
|
||||
|
||||
// Copy-paste the selected nodes into the new group.
|
||||
var drag_object = copyPaste.copy(args[1], 1, frame.numberOf, "");
|
||||
copyPaste.pasteNewNodes(drag_object, template_group, "");
|
||||
|
||||
// Select all nodes within group and export as template.
|
||||
Action.perform( "selectAll()", "Node View" );
|
||||
copyPaste.createTemplateFromSelection(args[2], args[3]);
|
||||
|
||||
// Unfocus the group in Node view, delete all nodes and backdrops
|
||||
// created during the process.
|
||||
Action.perform("onActionUpToParent()", "Node View");
|
||||
node.deleteNode(template_group, true, true);
|
||||
}
|
||||
func
|
||||
"""
|
||||
harmony.send({
|
||||
"function": func,
|
||||
"args": [
|
||||
backdrops,
|
||||
nodes,
|
||||
os.path.basename(filepath),
|
||||
os.path.dirname(filepath)
|
||||
]
|
||||
})
|
||||
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
|
||||
plugins_directory = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "plugins", "harmony"
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
|
||||
"plugins",
|
||||
"harmony"
|
||||
)
|
||||
|
||||
pyblish.api.register_plugin_path(
|
||||
|
|
@ -21,10 +156,13 @@ def install():
|
|||
api.Creator, os.path.join(plugins_directory, "create")
|
||||
)
|
||||
|
||||
# Register callbacks.
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
api.on("application.launched", ensure_scene_settings)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node enabling on instance toggles."""
|
||||
|
|
|
|||
40
pype/lib.py
40
pype/lib.py
|
|
@ -17,6 +17,7 @@ import six
|
|||
import avalon.api
|
||||
from .api import config
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -469,6 +470,43 @@ def get_version_from_path(file):
|
|||
)
|
||||
|
||||
|
||||
def get_last_version_from_path(path_dir, filter):
|
||||
"""
|
||||
Finds last version of given directory content
|
||||
|
||||
Args:
|
||||
path_dir (string): directory path
|
||||
filter (list): list of strings used as file name filter
|
||||
|
||||
Returns:
|
||||
string: file name with last version
|
||||
|
||||
Example:
|
||||
last_version_file = get_last_version_from_path(
|
||||
"/project/shots/shot01/work", ["shot01", "compositing", "nk"])
|
||||
"""
|
||||
|
||||
assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory"
|
||||
assert isinstance(filter, list) and (
|
||||
len(filter) != 0), "`filter` argument needs to be list and not empty"
|
||||
|
||||
filtred_files = list()
|
||||
|
||||
# form regex for filtering
|
||||
patern = r".*".join(filter)
|
||||
|
||||
for f in os.listdir(path_dir):
|
||||
if not re.findall(patern, f):
|
||||
continue
|
||||
filtred_files.append(f)
|
||||
|
||||
if filtred_files:
|
||||
sorted(filtred_files)
|
||||
return filtred_files[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_avalon_database():
|
||||
if io._database is None:
|
||||
set_io_database()
|
||||
|
|
@ -610,7 +648,7 @@ def get_subsets(asset_name,
|
|||
|
||||
if len(repres_out) > 0:
|
||||
output_dict[subset["name"]] = {"version": version_sel,
|
||||
"representaions": repres_out}
|
||||
"representations": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import contextlib
|
|||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
from avalon.io import extract_port_from_url
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
|
@ -72,8 +73,17 @@ class DbConnector(object):
|
|||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||
mongo_url = self.Session["AVALON_MONGO"]
|
||||
kwargs = {
|
||||
"host": mongo_url,
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
|
||||
port = extract_port_from_url(mongo_url)
|
||||
if port is not None:
|
||||
kwargs["port"] = int(port)
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(**kwargs)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
|
|
@ -381,6 +391,10 @@ class DbConnector(object):
|
|||
if document is None:
|
||||
break
|
||||
|
||||
if document.get("type") == "master_version":
|
||||
_document = self.find_one({"_id": document["version_id"]})
|
||||
document["data"] = _document["data"]
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
|
|
|||
|
|
@ -4,17 +4,14 @@ import json
|
|||
import bson
|
||||
import bson.json_util
|
||||
from pype.modules.rest_api import RestApi, abort, CallbackResult
|
||||
from pype.modules.ftrack.lib.custom_db_connector import DbConnector
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AvalonRestApi(RestApi):
|
||||
dbcon = DbConnector(
|
||||
os.environ["AVALON_MONGO"],
|
||||
os.environ["AVALON_DB"]
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dbcon = DbConnector()
|
||||
self.dbcon.install()
|
||||
|
||||
@RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET")
|
||||
|
|
|
|||
164
pype/modules/ftrack/actions/action_batch_task_creation.py
Normal file
164
pype/modules/ftrack/actions/action_batch_task_creation.py
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
"""
|
||||
Taken from https://github.com/tokejepsen/ftrack-hooks/tree/master/batch_tasks
|
||||
"""
|
||||
|
||||
from pype.modules.ftrack.lib import BaseAction
|
||||
|
||||
|
||||
class BatchTasksAction(BaseAction):
|
||||
'''Batch Tasks action
|
||||
`label` a descriptive string identifing your action.
|
||||
`varaint` To group actions together, give them the same
|
||||
label and specify a unique variant per action.
|
||||
`identifier` a unique identifier for your action.
|
||||
`description` a verbose descriptive text for you action
|
||||
'''
|
||||
label = "Batch Tasks"
|
||||
variant = None
|
||||
identifier = "batch-tasks"
|
||||
description = None
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
'''Return true if we can handle the selected entities.
|
||||
*session* is a `ftrack_api.Session` instance
|
||||
*entities* is a list of tuples each containing the entity type and the
|
||||
entity id.
|
||||
If the entity is a hierarchical you will always get the entity
|
||||
type TypedContext, once retrieved through a get operation you
|
||||
will have the "real" entity type ie. example Shot, Sequence
|
||||
or Asset Build.
|
||||
*event* the unmodified original event
|
||||
'''
|
||||
# Only discover the action if any selection is made.
|
||||
if entities:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_task_form_items(self, session, number_of_tasks):
|
||||
items = []
|
||||
|
||||
task_type_options = [
|
||||
{'label': task_type["name"], 'value': task_type["id"]}
|
||||
for task_type in session.query("Type")
|
||||
]
|
||||
|
||||
for index in range(0, number_of_tasks):
|
||||
items.extend(
|
||||
[
|
||||
{
|
||||
'value': '##Template for Task{0}##'.format(
|
||||
index
|
||||
),
|
||||
'type': 'label'
|
||||
},
|
||||
{
|
||||
'label': 'Type',
|
||||
'type': 'enumerator',
|
||||
'name': 'task_{0}_typeid'.format(index),
|
||||
'data': task_type_options
|
||||
},
|
||||
{
|
||||
'label': 'Name',
|
||||
'type': 'text',
|
||||
'name': 'task_{0}_name'.format(index)
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return items
|
||||
|
||||
def ensure_task(self, session, name, task_type, parent):
|
||||
|
||||
# Query for existing task.
|
||||
query = (
|
||||
'Task where type.id is "{0}" and name is "{1}" '
|
||||
'and parent.id is "{2}"'
|
||||
)
|
||||
task = session.query(
|
||||
query.format(
|
||||
task_type["id"],
|
||||
name,
|
||||
parent["id"]
|
||||
)
|
||||
).first()
|
||||
|
||||
# Create task.
|
||||
if not task:
|
||||
session.create(
|
||||
"Task",
|
||||
{
|
||||
"name": name,
|
||||
"type": task_type,
|
||||
"parent": parent
|
||||
}
|
||||
)
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
'''Callback method for the custom action.
|
||||
return either a bool ( True if successful or False if the action
|
||||
failed ) or a dictionary with they keys `message` and `success`, the
|
||||
message should be a string and will be displayed as feedback to the
|
||||
user, success should be a bool, True if successful or False if the
|
||||
action failed.
|
||||
*session* is a `ftrack_api.Session` instance
|
||||
*entities* is a list of tuples each containing the entity type and the
|
||||
entity id.
|
||||
If the entity is a hierarchical you will always get the entity
|
||||
type TypedContext, once retrieved through a get operation you
|
||||
will have the "real" entity type ie. example Shot, Sequence
|
||||
or Asset Build.
|
||||
*event* the unmodified original event
|
||||
'''
|
||||
if 'values' in event['data']:
|
||||
values = event['data']['values']
|
||||
if 'number_of_tasks' in values:
|
||||
return {
|
||||
'success': True,
|
||||
'message': '',
|
||||
'items': self.get_task_form_items(
|
||||
session, int(values['number_of_tasks'])
|
||||
)
|
||||
}
|
||||
else:
|
||||
# Create tasks on each entity
|
||||
for entity in entities:
|
||||
for count in range(0, int(len(values.keys()) / 2)):
|
||||
task_type = session.query(
|
||||
'Type where id is "{0}"'.format(
|
||||
values["task_{0}_typeid".format(count)]
|
||||
)
|
||||
).one()
|
||||
|
||||
# Get name, or assume task type in lower case as name.
|
||||
name = values["task_{0}_name".format(count)]
|
||||
if not name:
|
||||
name = task_type["name"].lower()
|
||||
|
||||
self.ensure_task(session, name, task_type, entity)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Action completed successfully'
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': "",
|
||||
'items': [
|
||||
{
|
||||
'label': 'Number of tasks',
|
||||
'type': 'number',
|
||||
'name': 'number_of_tasks',
|
||||
'value': 2
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def register(session, plugins_presets=None):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
BatchTasksAction(session, plugins_presets).register()
|
||||
|
|
@ -158,7 +158,7 @@ class FirstVersionStatus(BaseEvent):
|
|||
filtered_ents = []
|
||||
for entity in event["data"].get("entities", []):
|
||||
# Care only about add actions
|
||||
if entity["action"] != "add":
|
||||
if entity.get("action") != "add":
|
||||
continue
|
||||
|
||||
# Filter AssetVersions
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
" where project_id is \"{}\" and name in ({})"
|
||||
)
|
||||
created_entities = []
|
||||
report_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ class ThumbnailEvents(BaseEvent):
|
|||
"""Updates thumbnails of entities from new AssetVersion."""
|
||||
|
||||
for entity in event["data"].get("entities", []):
|
||||
action = entity.get("action")
|
||||
if not action:
|
||||
continue
|
||||
if (
|
||||
entity["action"] == "remove"
|
||||
or entity["entityType"].lower() != "assetversion"
|
||||
|
|
|
|||
|
|
@ -13,10 +13,12 @@ import time
|
|||
import uuid
|
||||
|
||||
import ftrack_api
|
||||
import pymongo
|
||||
from pype.modules.ftrack.lib import credentials
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
ftrack_events_mongo_settings, check_ftrack_url
|
||||
check_ftrack_url, get_ftrack_event_mongo_info
|
||||
)
|
||||
|
||||
import socket_thread
|
||||
|
||||
|
||||
|
|
@ -30,22 +32,19 @@ class MongoPermissionsError(Exception):
|
|||
|
||||
def check_mongo_url(host, port, log_error=False):
|
||||
"""Checks if mongo server is responding"""
|
||||
sock = None
|
||||
try:
|
||||
sock = socket.create_connection(
|
||||
(host, port),
|
||||
timeout=1
|
||||
)
|
||||
return True
|
||||
except socket.error as err:
|
||||
client = pymongo.MongoClient(host=host, port=port)
|
||||
# Force connection on a request as the connect=True parameter of
|
||||
# MongoClient seems to be useless here
|
||||
client.server_info()
|
||||
except pymongo.errors.ServerSelectionTimeoutError as err:
|
||||
if log_error:
|
||||
print("Can't connect to MongoDB at {}:{} because: {}".format(
|
||||
host, port, err
|
||||
))
|
||||
return False
|
||||
finally:
|
||||
if sock is not None:
|
||||
sock.close()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_credentials(url, user, api):
|
||||
|
|
@ -190,9 +189,10 @@ def main_loop(ftrack_url):
|
|||
|
||||
os.environ["FTRACK_EVENT_SUB_ID"] = str(uuid.uuid1())
|
||||
# Get mongo hostname and port for testing mongo connection
|
||||
mongo_list = ftrack_events_mongo_settings()
|
||||
mongo_hostname = mongo_list[0]
|
||||
mongo_port = mongo_list[1]
|
||||
|
||||
mongo_uri, mongo_port, database_name, collection_name = (
|
||||
get_ftrack_event_mongo_info()
|
||||
)
|
||||
|
||||
# Current file
|
||||
file_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
|
@ -270,13 +270,12 @@ def main_loop(ftrack_url):
|
|||
ftrack_accessible = check_ftrack_url(ftrack_url)
|
||||
|
||||
if not mongo_accessible:
|
||||
mongo_accessible = check_mongo_url(mongo_hostname, mongo_port)
|
||||
mongo_accessible = check_mongo_url(mongo_uri, mongo_port)
|
||||
|
||||
# Run threads only if Ftrack is accessible
|
||||
if not ftrack_accessible or not mongo_accessible:
|
||||
if not mongo_accessible and not printed_mongo_error:
|
||||
mongo_url = mongo_hostname + ":" + mongo_port
|
||||
print("Can't access Mongo {}".format(mongo_url))
|
||||
print("Can't access Mongo {}".format(mongo_uri))
|
||||
|
||||
if not ftrack_accessible and not printed_ftrack_error:
|
||||
print("Can't access Ftrack {}".format(ftrack_url))
|
||||
|
|
|
|||
|
|
@ -18,12 +18,13 @@ import ftrack_api.operation
|
|||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
except ImportError:
|
||||
from urlparse import urlparse, parse_qs
|
||||
|
||||
from pype.api import Logger
|
||||
from pype.api import (
|
||||
Logger,
|
||||
get_default_components,
|
||||
decompose_url,
|
||||
compose_url
|
||||
)
|
||||
|
||||
from pype.modules.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
||||
|
|
@ -32,69 +33,29 @@ TOPIC_STATUS_SERVER = "pype.event.server.status"
|
|||
TOPIC_STATUS_SERVER_RESULT = "pype.event.server.status.result"
|
||||
|
||||
|
||||
def ftrack_events_mongo_settings():
|
||||
host = None
|
||||
port = None
|
||||
username = None
|
||||
password = None
|
||||
collection = None
|
||||
database = None
|
||||
auth_db = ""
|
||||
|
||||
if os.environ.get('FTRACK_EVENTS_MONGO_URL'):
|
||||
result = urlparse(os.environ['FTRACK_EVENTS_MONGO_URL'])
|
||||
|
||||
host = result.hostname
|
||||
try:
|
||||
port = result.port
|
||||
except ValueError:
|
||||
raise RuntimeError("invalid port specified")
|
||||
username = result.username
|
||||
password = result.password
|
||||
try:
|
||||
database = result.path.lstrip("/").split("/")[0]
|
||||
collection = result.path.lstrip("/").split("/")[1]
|
||||
except IndexError:
|
||||
if not database:
|
||||
raise RuntimeError("missing database name for logging")
|
||||
try:
|
||||
auth_db = parse_qs(result.query)['authSource'][0]
|
||||
except KeyError:
|
||||
# no auth db provided, mongo will use the one we are connecting to
|
||||
pass
|
||||
else:
|
||||
host = os.environ.get('FTRACK_EVENTS_MONGO_HOST')
|
||||
port = int(os.environ.get('FTRACK_EVENTS_MONGO_PORT', "0"))
|
||||
database = os.environ.get('FTRACK_EVENTS_MONGO_DB')
|
||||
username = os.environ.get('FTRACK_EVENTS_MONGO_USER')
|
||||
password = os.environ.get('FTRACK_EVENTS_MONGO_PASSWORD')
|
||||
collection = os.environ.get('FTRACK_EVENTS_MONGO_COL')
|
||||
auth_db = os.environ.get('FTRACK_EVENTS_MONGO_AUTH_DB', 'avalon')
|
||||
|
||||
return host, port, database, username, password, collection, auth_db
|
||||
|
||||
|
||||
def get_ftrack_event_mongo_info():
|
||||
host, port, database, username, password, collection, auth_db = (
|
||||
ftrack_events_mongo_settings()
|
||||
database_name = (
|
||||
os.environ.get("FTRACK_EVENTS_MONGO_DB") or "pype"
|
||||
)
|
||||
collection_name = (
|
||||
os.environ.get("FTRACK_EVENTS_MONGO_COL") or "ftrack_events"
|
||||
)
|
||||
user_pass = ""
|
||||
if username and password:
|
||||
user_pass = "{}:{}@".format(username, password)
|
||||
|
||||
socket_path = "{}:{}".format(host, port)
|
||||
mongo_url = os.environ.get("FTRACK_EVENTS_MONGO_URL")
|
||||
if mongo_url is not None:
|
||||
components = decompose_url(mongo_url)
|
||||
_used_ftrack_url = True
|
||||
else:
|
||||
components = get_default_components()
|
||||
_used_ftrack_url = False
|
||||
|
||||
dab = ""
|
||||
if database:
|
||||
dab = "/{}".format(database)
|
||||
if not _used_ftrack_url or components["database"] is None:
|
||||
components["database"] = database_name
|
||||
components["collection"] = collection_name
|
||||
|
||||
auth = ""
|
||||
if auth_db:
|
||||
auth = "?authSource={}".format(auth_db)
|
||||
uri = compose_url(components)
|
||||
|
||||
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
|
||||
|
||||
return url, database, collection
|
||||
return uri, components["port"], database_name, collection_name
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
|
|
@ -198,16 +159,17 @@ class StorerEventHub(SocketBaseEventHub):
|
|||
class ProcessEventHub(SocketBaseEventHub):
|
||||
|
||||
hearbeat_msg = b"processor"
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.dbcon = DbConnector(
|
||||
mongo_url=self.url,
|
||||
database_name=self.database,
|
||||
table_name=self.table_name
|
||||
self.uri,
|
||||
self.port,
|
||||
self.database,
|
||||
self.table_name
|
||||
)
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -269,7 +231,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
def load_events(self):
|
||||
"""Load not processed events sorted by stored date"""
|
||||
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
result = self.dbcon.delete_many({
|
||||
self.dbcon.delete_many({
|
||||
"pype_data.stored": {"$lte": ago_date},
|
||||
"pype_data.is_processed": True
|
||||
})
|
||||
|
|
|
|||
|
|
@ -23,12 +23,8 @@ class SessionFactory:
|
|||
session = None
|
||||
|
||||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = DbConnector(
|
||||
mongo_url=url,
|
||||
database_name=database,
|
||||
table_name=table_name
|
||||
)
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = DbConnector(uri, port, database, table_name)
|
||||
|
||||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import atexit
|
|||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
from pype.api import decompose_url
|
||||
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
|
|
@ -63,13 +64,29 @@ class DbConnector:
|
|||
log = logging.getLogger(__name__)
|
||||
timeout = 1000
|
||||
|
||||
def __init__(self, mongo_url, database_name, table_name=None):
|
||||
def __init__(
|
||||
self, uri, port=None, database_name=None, table_name=None
|
||||
):
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
self._mongo_url = mongo_url
|
||||
|
||||
self._uri = uri
|
||||
components = decompose_url(uri)
|
||||
if port is None:
|
||||
port = components.get("port")
|
||||
|
||||
if database_name is None:
|
||||
database_name = components.get("database")
|
||||
|
||||
if database_name is None:
|
||||
raise ValueError(
|
||||
"Database is not defined for connection. {}".format(uri)
|
||||
)
|
||||
|
||||
self._port = port
|
||||
self._database_name = database_name
|
||||
|
||||
self.active_table = table_name
|
||||
|
|
@ -95,10 +112,16 @@ class DbConnector:
|
|||
atexit.register(self.uninstall)
|
||||
logging.basicConfig()
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self._mongo_url,
|
||||
serverSelectionTimeoutMS=self.timeout
|
||||
)
|
||||
kwargs = {
|
||||
"host": self._uri,
|
||||
"serverSelectionTimeoutMS": self.timeout
|
||||
}
|
||||
if self._port is not None:
|
||||
kwargs["port"] = self._port
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(**kwargs)
|
||||
if self._port is None:
|
||||
self._port = self._mongo_client.PORT
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
|
|
@ -113,11 +136,11 @@ class DbConnector:
|
|||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self._mongo_url, self.timeout)
|
||||
"less than %.3f ms" % (self._uri, self.timeout)
|
||||
)
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self._mongo_url, time.time() - t1
|
||||
self._uri, time.time() - t1
|
||||
))
|
||||
|
||||
self._database = self._mongo_client[self._database_name]
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class BaseHandler(object):
|
|||
ignore_me = False
|
||||
preactions = []
|
||||
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
def __init__(self, session, plugins_presets=None):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
if not(
|
||||
|
|
@ -57,6 +57,8 @@ class BaseHandler(object):
|
|||
# Using decorator
|
||||
self.register = self.register_decorator(self.register)
|
||||
self.launch = self.launch_log(self.launch)
|
||||
if plugins_presets is None:
|
||||
plugins_presets = {}
|
||||
self.plugins_presets = plugins_presets
|
||||
|
||||
# Decorator
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import contextlib
|
|||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
from avalon.io import extract_port_from_url
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
|
@ -72,8 +73,17 @@ class DbConnector(object):
|
|||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||
mongo_url = self.Session["AVALON_MONGO"]
|
||||
kwargs = {
|
||||
"host": mongo_url,
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
|
||||
port = extract_port_from_url(mongo_url)
|
||||
if port is not None:
|
||||
kwargs["port"] = int(port)
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(**kwargs)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
|
|
@ -381,6 +391,10 @@ class DbConnector(object):
|
|||
if document is None:
|
||||
break
|
||||
|
||||
if document.get("type") == "master_version":
|
||||
_document = self.find_one({"_id": document["version_id"]})
|
||||
document["data"] = _document["data"]
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import os
|
||||
import collections
|
||||
from Qt import QtCore
|
||||
from pype.api import Logger
|
||||
from pypeapp.lib.log import _bootstrap_mongo_log
|
||||
from pypeapp.lib.log import _bootstrap_mongo_log, LOG_COLLECTION_NAME
|
||||
|
||||
log = Logger().get_logger("LogModel", "LoggingModule")
|
||||
|
||||
|
|
@ -41,11 +40,11 @@ class LogModel(QtCore.QAbstractItemModel):
|
|||
super(LogModel, self).__init__(parent)
|
||||
self._root_node = Node()
|
||||
|
||||
collection = os.environ.get('PYPE_LOG_MONGO_COL')
|
||||
database = _bootstrap_mongo_log()
|
||||
self.dbcon = None
|
||||
if collection in database.list_collection_names():
|
||||
self.dbcon = database[collection]
|
||||
# Crash if connection is not possible to skip this module
|
||||
database = _bootstrap_mongo_log()
|
||||
if LOG_COLLECTION_NAME in database.list_collection_names():
|
||||
self.dbcon = database[LOG_COLLECTION_NAME]
|
||||
|
||||
def add_log(self, log):
|
||||
node = Node(log)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,23 @@
|
|||
from Qt import QtWidgets
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
from ..gui.app import LogsWindow
|
||||
|
||||
log = Logger().get_logger("LoggingModule", "logging")
|
||||
|
||||
|
||||
class LoggingModule:
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.parent = parent
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "logging")
|
||||
|
||||
self.window = LogsWindow()
|
||||
try:
|
||||
self.window = LogsWindow()
|
||||
self.tray_menu = self._tray_menu
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't set Logging GUI due to error.", exc_info=True
|
||||
)
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
def _tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
menu = QtWidgets.QMenu('Logging', parent_menu)
|
||||
# menu.setProperty('submenu', 'on')
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ def cli_publish(data, gui=True):
|
|||
envcopy["PYBLISH_HOSTS"] = "standalonepublisher"
|
||||
envcopy["SAPUBLISH_INPATH"] = json_data_path
|
||||
envcopy["SAPUBLISH_OUTPATH"] = return_data_path
|
||||
envcopy["PYBLISH_GUI"] = "pyblish_lite"
|
||||
envcopy["PYBLISH_GUI"] = "pyblish_pype"
|
||||
|
||||
returncode = execute([
|
||||
sys.executable, "-u", "-m", "pyblish"
|
||||
|
|
|
|||
41
pype/plugins/celaction/publish/collect_audio.py
Normal file
41
pype/plugins/celaction/publish/collect_audio.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
import pype.api as pype
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
||||
|
||||
label = "Colect Audio for publishing"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
self.log.info('Collecting Audio Data')
|
||||
asset_entity = context.data["assetEntity"]
|
||||
|
||||
# get all available representations
|
||||
subsets = pype.get_subsets(asset_entity["name"],
|
||||
representations=["audio"]
|
||||
)
|
||||
self.log.info(f"subsets is: {pformat(subsets)}")
|
||||
|
||||
if not subsets.get("audioMain"):
|
||||
raise AttributeError("`audioMain` subset does not exist")
|
||||
|
||||
reprs = subsets.get("audioMain", {}).get("representations", [])
|
||||
self.log.info(f"reprs is: {pformat(reprs)}")
|
||||
|
||||
repr = next((r for r in reprs), None)
|
||||
if not repr:
|
||||
raise "Missing `audioMain` representation"
|
||||
self.log.info(f"represetation is: {repr}")
|
||||
|
||||
audio_file = repr.get('data', {}).get('path', "")
|
||||
|
||||
if os.path.exists(audio_file):
|
||||
context.data["audioFile"] = audio_file
|
||||
self.log.info(
|
||||
'audio_file: {}, has been added to context'.format(audio_file))
|
||||
else:
|
||||
self.log.warning("Couldn't find any audio file on Ftrack.")
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
import pype.celaction
|
||||
|
||||
|
||||
class CollectCelactionCliKwargs(pyblish.api.Collector):
|
||||
""" Collects all keyword arguments passed from the terminal """
|
||||
|
||||
label = "Collect Celaction Cli Kwargs"
|
||||
order = pyblish.api.Collector.order - 0.1
|
||||
|
||||
def process(self, context):
|
||||
kwargs = pype.celaction.kwargs.copy()
|
||||
|
||||
self.log.info("Storing kwargs: %s" % kwargs)
|
||||
context.set_data("kwargs", kwargs)
|
||||
|
||||
# get kwargs onto context data as keys with values
|
||||
for k, v in kwargs.items():
|
||||
self.log.info(f"Setting `{k}` to instance.data with value: `{v}`")
|
||||
if k in ["frameStart", "frameEnd"]:
|
||||
context.data[k] = kwargs[k] = int(v)
|
||||
else:
|
||||
context.data[k] = v
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||
""" Adds the celaction render instances """
|
||||
|
||||
label = "Collect Celaction Instances"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
task = api.Session["AVALON_TASK"]
|
||||
current_file = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
|
||||
shared_instance_data = {
|
||||
"asset": asset_entity["name"],
|
||||
"frameStart": asset_entity["data"]["frameStart"],
|
||||
"frameEnd": asset_entity["data"]["frameEnd"],
|
||||
"handleStart": asset_entity["data"]["handleStart"],
|
||||
"handleEnd": asset_entity["data"]["handleEnd"],
|
||||
"fps": asset_entity["data"]["fps"],
|
||||
"resolutionWidth": asset_entity["data"]["resolutionWidth"],
|
||||
"resolutionHeight": asset_entity["data"]["resolutionHeight"],
|
||||
"pixelAspect": 1,
|
||||
"step": 1,
|
||||
"version": version
|
||||
}
|
||||
|
||||
celaction_kwargs = context.data.get("kwargs", {})
|
||||
|
||||
if celaction_kwargs:
|
||||
shared_instance_data.update(celaction_kwargs)
|
||||
|
||||
# workfile instance
|
||||
family = "workfile"
|
||||
subset = family + task.capitalize()
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
||||
# creating instance data
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": scene_file,
|
||||
"family": family,
|
||||
"families": [family],
|
||||
"representations": list()
|
||||
})
|
||||
|
||||
# adding basic script data
|
||||
instance.data.update(shared_instance_data)
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'scn',
|
||||
'ext': 'scn',
|
||||
'files': scene_file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info('Publishing Celaction workfile')
|
||||
|
||||
# render instance
|
||||
family = "render.farm"
|
||||
subset = f"render{task}Main"
|
||||
instance = context.create_instance(name=subset)
|
||||
# getting instance state
|
||||
instance.data["publish"] = True
|
||||
|
||||
# add assetEntity data into instance
|
||||
instance.data.update({
|
||||
"label": "{} - farm".format(subset),
|
||||
"family": family,
|
||||
"families": [family],
|
||||
"subset": subset
|
||||
})
|
||||
|
||||
# adding basic script data
|
||||
instance.data.update(shared_instance_data)
|
||||
|
||||
self.log.info('Publishing Celaction render instance')
|
||||
self.log.debug(f"Instance data: `{instance.data}`")
|
||||
|
||||
for i in context:
|
||||
self.log.debug(f"{i.data['families']}")
|
||||
29
pype/plugins/celaction/publish/collect_render_path.py
Normal file
29
pype/plugins/celaction/publish/collect_render_path.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRenderPath(pyblish.api.InstancePlugin):
|
||||
"""Generate file and directory path where rendered images will be"""
|
||||
|
||||
label = "Collect Render Path"
|
||||
order = pyblish.api.CollectorOrder + 0.495
|
||||
|
||||
def process(self, instance):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
current_file = instance.context.data["currentFile"]
|
||||
work_dir = os.path.dirname(current_file)
|
||||
padding = anatomy.templates.get("frame_padding", 4)
|
||||
render_dir = os.path.join(
|
||||
work_dir, "render", "celaction"
|
||||
)
|
||||
render_path = os.path.join(
|
||||
render_dir,
|
||||
".".join([instance.data["subset"], f"%0{padding}d", "png"])
|
||||
)
|
||||
|
||||
# create dir if it doesnt exists
|
||||
os.makedirs(render_dir, exist_ok=True)
|
||||
|
||||
instance.data["path"] = render_path
|
||||
|
||||
self.log.info(f"Render output path set to: `{render_path}`")
|
||||
68
pype/plugins/celaction/publish/integrate_version_up.py
Normal file
68
pype/plugins/celaction/publish/integrate_version_up.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
import shutil
|
||||
import re
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class VersionUpScene(pyblish.api.ContextPlugin):
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = 'Version Up Scene'
|
||||
families = ['scene']
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, context):
|
||||
current_file = context.data.get('currentFile')
|
||||
v_up = get_version_up(current_file)
|
||||
self.log.debug('Current file is: {}'.format(current_file))
|
||||
self.log.debug('Version up: {}'.format(v_up))
|
||||
|
||||
shutil.copy2(current_file, v_up)
|
||||
self.log.info('Scene saved into new version: {}'.format(v_up))
|
||||
|
||||
|
||||
def version_get(string, prefix, suffix=None):
|
||||
"""Extract version information from filenames used by DD (and Weta, apparently)
|
||||
These are _v# or /v# or .v# where v is a prefix string, in our case
|
||||
we use "v" for render version and "c" for camera track version.
|
||||
See the version.py and camera.py plugins for usage."""
|
||||
|
||||
if string is None:
|
||||
raise ValueError("Empty version string - no match")
|
||||
|
||||
regex = r"[/_.]{}\d+".format(prefix)
|
||||
matches = re.findall(regex, string, re.IGNORECASE)
|
||||
if not len(matches):
|
||||
msg = f"No `_{prefix}#` found in `{string}`"
|
||||
raise ValueError(msg)
|
||||
return (matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group())
|
||||
|
||||
|
||||
def version_set(string, prefix, oldintval, newintval):
|
||||
"""Changes version information from filenames used by DD (and Weta, apparently)
|
||||
These are _v# or /v# or .v# where v is a prefix string, in our case
|
||||
we use "v" for render version and "c" for camera track version.
|
||||
See the version.py and camera.py plugins for usage."""
|
||||
|
||||
regex = r"[/_.]{}\d+".format(prefix)
|
||||
matches = re.findall(regex, string, re.IGNORECASE)
|
||||
if not len(matches):
|
||||
return ""
|
||||
|
||||
# Filter to retain only version strings with matching numbers
|
||||
matches = filter(lambda s: int(s[2:]) == oldintval, matches)
|
||||
|
||||
# Replace all version strings with matching numbers
|
||||
for match in matches:
|
||||
# use expression instead of expr so 0 prefix does not make octal
|
||||
fmt = "%%(#)0%dd" % (len(match) - 2)
|
||||
newfullvalue = match[0] + prefix + str(fmt % {"#": newintval})
|
||||
string = re.sub(match, newfullvalue, string)
|
||||
return string
|
||||
|
||||
|
||||
def get_version_up(path):
|
||||
""" Returns the next version of the path """
|
||||
|
||||
(prefix, v) = version_get(path, 'v')
|
||||
v = int(v)
|
||||
return version_set(path, prefix, v, v + 1)
|
||||
234
pype/plugins/celaction/publish/submit_celaction_deadline.py
Normal file
234
pype/plugins/celaction/publish/submit_celaction_deadline.py
Normal file
|
|
@ -0,0 +1,234 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon.vendor import requests
|
||||
import re
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit CelAction2D scene to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit CelAction to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["celaction"]
|
||||
families = ["render.farm"]
|
||||
|
||||
deadline_department = ""
|
||||
deadline_priority = 50
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL")
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
self._comment = context.data.get("comment", "")
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
self._frame_start = int(instance.data["frameStart"])
|
||||
self._frame_end = int(instance.data["frameEnd"])
|
||||
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
response = self.payload_submit(instance,
|
||||
script_path,
|
||||
render_path
|
||||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
instance.context.data['ftrackStatus'] = "Render"
|
||||
|
||||
# adding 2d render specific family for version identification in Loader
|
||||
instance.data["families"] = ["render2d"]
|
||||
|
||||
def payload_submit(self,
|
||||
instance,
|
||||
script_path,
|
||||
render_path
|
||||
):
|
||||
resolution_width = instance.data["resolutionWidth"]
|
||||
resolution_height = instance.data["resolutionHeight"]
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
script_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (script_name, instance.name)
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(render_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.context.data.get("chunk")
|
||||
if chunk_size == 0:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
# search for %02d pattern in name, and padding number
|
||||
search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
padding_number = int(search_results[1])
|
||||
|
||||
args = [
|
||||
f"<QUOTE>{script_path}<QUOTE>",
|
||||
"-a",
|
||||
"-s <STARTFRAME>",
|
||||
"-e <ENDFRAME>",
|
||||
f"-d <QUOTE>{render_dir}<QUOTE>",
|
||||
f"-x {resolution_width}",
|
||||
f"-y {resolution_height}",
|
||||
f"-r <QUOTE>{render_path.replace(split_patern, '')}<QUOTE>",
|
||||
f"-= AbsoluteFrameNumber=on -= PadDigits={padding_number}",
|
||||
"-= ClearAttachment=on",
|
||||
]
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
# plugin definition
|
||||
"Plugin": "CelAction",
|
||||
|
||||
# Top-level group name
|
||||
"BatchName": script_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"Priority": self.deadline_priority,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool,
|
||||
"SecondaryPool": self.deadline_pool_secondary,
|
||||
"ChunkSize": chunk_size,
|
||||
|
||||
"Frames": f"{self._frame_start}-{self._frame_end}",
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/")
|
||||
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": script_path,
|
||||
|
||||
# Output directory
|
||||
"OutputFilePath": render_dir.replace("\\", "/"),
|
||||
|
||||
# Plugin attributes
|
||||
"StartupDirectory": "",
|
||||
"Arguments": " ".join(args),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": script_path,
|
||||
"AWSAssetFile0": render_path,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expectied files to instance.data
|
||||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
response = requests.post(self.deadline_url, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
return response
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("frameStart", "frameEnd"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"[._](%0)(\d)(d)[._]", path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
split_path = path.split(split_patern)
|
||||
hashes = "#" * int(search_results[1])
|
||||
return "".join([split_path[0], hashes, split_path[-1]])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
else:
|
||||
return path
|
||||
|
||||
def expected_files(self,
|
||||
instance,
|
||||
path):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = list()
|
||||
|
||||
dir = os.path.dirname(path)
|
||||
file = os.path.basename(path)
|
||||
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in file:
|
||||
instance.data["expectedFiles"].append(path)
|
||||
return
|
||||
|
||||
for i in range(self._frame_start, (self._frame_end + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||
|
|
@ -44,10 +44,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
family = instance.data['family'].lower()
|
||||
|
||||
asset_type = ''
|
||||
asset_type = instance.data.get(
|
||||
"ftrackFamily", self.family_mapping[family]
|
||||
)
|
||||
asset_type = instance.data.get("ftrackFamily")
|
||||
if not asset_type and family in self.family_mapping:
|
||||
asset_type = self.family_mapping[family]
|
||||
|
||||
# Ignore this instance if neither "ftrackFamily" or a family mapping is
|
||||
# found.
|
||||
if not asset_type:
|
||||
return
|
||||
|
||||
componentList = []
|
||||
ft_session = instance.context.data["ftrackSession"]
|
||||
|
|
|
|||
|
|
@ -48,8 +48,18 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
|
||||
data = asset_entity['data']
|
||||
|
||||
context.data["frameStart"] = data.get("frameStart")
|
||||
context.data["frameEnd"] = data.get("frameEnd")
|
||||
frame_start = data.get("frameStart")
|
||||
if frame_start is None:
|
||||
frame_start = 1
|
||||
self.log.warning("Missing frame start. Defaulting to 1.")
|
||||
|
||||
frame_end = data.get("frameEnd")
|
||||
if frame_end is None:
|
||||
frame_end = 2
|
||||
self.log.warning("Missing frame end. Defaulting to 2.")
|
||||
|
||||
context.data["frameStart"] = frame_start
|
||||
context.data["frameEnd"] = frame_end
|
||||
|
||||
handles = data.get("handles") or 0
|
||||
handle_start = data.get("handleStart")
|
||||
|
|
@ -72,7 +82,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
context.data["handleStart"] = int(handle_start)
|
||||
context.data["handleEnd"] = int(handle_end)
|
||||
|
||||
frame_start_h = data.get("frameStart") - context.data["handleStart"]
|
||||
frame_end_h = data.get("frameEnd") + context.data["handleEnd"]
|
||||
frame_start_h = frame_start - context.data["handleStart"]
|
||||
frame_end_h = frame_end + context.data["handleEnd"]
|
||||
context.data["frameStartHandle"] = frame_start_h
|
||||
context.data["frameEndHandle"] = frame_end_h
|
||||
|
|
|
|||
|
|
@ -99,6 +99,17 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
|
||||
instance.data["representations"] = representations
|
||||
|
||||
# add audio if in metadata data
|
||||
if data.get("audio"):
|
||||
instance.data.update({
|
||||
"audio": [{
|
||||
"filename": data.get("audio"),
|
||||
"offset": 0
|
||||
}]
|
||||
})
|
||||
self.log.info(
|
||||
f"Adding audio to instance: {instance.data['audio']}")
|
||||
|
||||
def process(self, context):
|
||||
self._context = context
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract Hierarchy To Avalon"
|
||||
families = ["clip", "shot"]
|
||||
families = ["clip", "shot", "editorial"]
|
||||
|
||||
def process(self, context):
|
||||
if "hierarchyContext" not in context.data:
|
||||
|
|
|
|||
|
|
@ -610,8 +610,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# NOTE Skipped using instance's resolution
|
||||
full_input_path_single_file = temp_data["full_input_path_single_file"]
|
||||
input_data = pype.lib.ffprobe_streams(full_input_path_single_file)[0]
|
||||
input_width = input_data["width"]
|
||||
input_height = input_data["height"]
|
||||
input_width = int(input_data["width"])
|
||||
input_height = int(input_data["height"])
|
||||
|
||||
self.log.debug("pixel_aspect: `{}`".format(pixel_aspect))
|
||||
self.log.debug("input_width: `{}`".format(input_width))
|
||||
|
|
@ -631,6 +631,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output_width = input_width
|
||||
output_height = input_height
|
||||
|
||||
output_width = int(output_width)
|
||||
output_height = int(output_height)
|
||||
|
||||
self.log.debug(
|
||||
"Output resolution is {}x{}".format(output_width, output_height)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"frameStart"
|
||||
"frameEnd"
|
||||
'fps'
|
||||
"data": additional metadata for each representation.
|
||||
"""
|
||||
|
||||
label = "Integrate Asset New"
|
||||
|
|
@ -83,7 +82,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"fbx",
|
||||
"textures",
|
||||
"action",
|
||||
"harmony.template"
|
||||
"harmony.template",
|
||||
"editorial"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
db_representation_context_keys = [
|
||||
|
|
@ -379,8 +379,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst = "{0}{1}{2}".format(
|
||||
dst_head,
|
||||
dst_padding,
|
||||
dst_tail
|
||||
).replace("..", ".")
|
||||
dst_tail).replace("..", ".")
|
||||
|
||||
self.log.debug("destination: `{}`".format(dst))
|
||||
src = os.path.join(stagingdir, src_file_name)
|
||||
|
|
@ -453,15 +452,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if repre_id is None:
|
||||
repre_id = io.ObjectId()
|
||||
|
||||
data = repre.get("data") or {}
|
||||
data.update({'path': dst, 'template': template})
|
||||
representation = {
|
||||
"_id": repre_id,
|
||||
"schema": "pype:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": repre['name'],
|
||||
"data": data,
|
||||
"data": {'path': dst, 'template': template},
|
||||
"dependencies": instance.data.get("dependencies", "").split(),
|
||||
|
||||
# Imprint shortcut to context
|
||||
|
|
@ -561,10 +558,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
while True:
|
||||
try:
|
||||
copyfile(src, dst)
|
||||
except OSError as e:
|
||||
self.log.critical("Cannot copy {} to {}".format(src, dst))
|
||||
self.log.critical(e)
|
||||
six.reraise(*sys.exc_info())
|
||||
except (OSError, AttributeError) as e:
|
||||
self.log.warning(e)
|
||||
# try it again with shutil
|
||||
import shutil
|
||||
try:
|
||||
shutil.copyfile(src, dst)
|
||||
self.log.debug("Copying files with shutil...")
|
||||
except (OSError) as e:
|
||||
self.log.critical("Cannot copy {} to {}".format(src, dst))
|
||||
self.log.critical(e)
|
||||
six.reraise(*sys.exc_info())
|
||||
if str(getsize(src)) in str(getsize(dst)):
|
||||
break
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,11 @@ import pyblish.api
|
|||
|
||||
def _get_script():
|
||||
"""Get path to the image sequence script."""
|
||||
try:
|
||||
from pathlib import Path
|
||||
except ImportError:
|
||||
from pathlib2 import Path
|
||||
|
||||
try:
|
||||
from pype.scripts import publish_filesequence
|
||||
except Exception:
|
||||
|
|
@ -23,7 +28,10 @@ def _get_script():
|
|||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[: -len(".pyc")] + ".py"
|
||||
|
||||
return os.path.normpath(module_path)
|
||||
path = Path(os.path.normpath(module_path)).resolve(strict=True)
|
||||
assert path is not None, ("Cannot determine path")
|
||||
|
||||
return str(path)
|
||||
|
||||
|
||||
def get_latest_version(asset_name, subset_name, family):
|
||||
|
|
@ -145,7 +153,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
|
||||
hosts = ["fusion", "maya", "nuke"]
|
||||
hosts = ["fusion", "maya", "nuke", "celaction"]
|
||||
|
||||
families = ["render.farm", "prerener",
|
||||
"renderlayer", "imagesequence", "vrayscene"]
|
||||
|
|
@ -158,11 +166,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"FTRACK_SERVER",
|
||||
"PYPE_METADATA_FILE",
|
||||
"AVALON_PROJECT",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"PYPE_LOG_NO_COLORS",
|
||||
"PYPE_PYTHON_EXE"
|
||||
]
|
||||
|
||||
# pool used to do the publishing job
|
||||
# custom deadline atributes
|
||||
deadline_department = ""
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
|
||||
# regex for finding frame number in string
|
||||
R_FRAME_NUMBER = re.compile(r'.+\.(?P<frame>[0-9]+)\..+')
|
||||
|
|
@ -215,8 +228,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"JobDependency0": job["_id"],
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"ChunkSize": self.deadline_chunk_size,
|
||||
"Priority": job["Props"]["Pri"],
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool,
|
||||
"SecondaryPool": self.deadline_pool_secondary,
|
||||
|
||||
"OutputDirectory0": output_dir
|
||||
},
|
||||
"PluginInfo": {
|
||||
|
|
@ -470,6 +490,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if bake_render_path:
|
||||
preview = False
|
||||
|
||||
if "celaction" in self.hosts:
|
||||
preview = True
|
||||
|
||||
staging = os.path.dirname(list(collection)[0])
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
|
|
@ -819,6 +842,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"instances": instances
|
||||
}
|
||||
|
||||
# add audio to metadata file if available
|
||||
audio_file = context.data.get("audioFile")
|
||||
if audio_file and os.path.isfile(audio_file):
|
||||
publish_job.update({"audio": audio_file})
|
||||
|
||||
# pass Ftrack credentials in case of Muster
|
||||
if submission_type == "muster":
|
||||
ftrack = {
|
||||
|
|
|
|||
|
|
@ -37,6 +37,21 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
|
|||
order = pype.api.ValidateContentsOrder
|
||||
families = ["ftrack"]
|
||||
optional = True
|
||||
# Ignore standalone host, because it does not have an Ftrack entity
|
||||
# associated.
|
||||
hosts = [
|
||||
"blender",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"houdini",
|
||||
"maya",
|
||||
"nuke",
|
||||
"nukestudio",
|
||||
"photoshop",
|
||||
"premiere",
|
||||
"resolve",
|
||||
"unreal"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
|
|
|
|||
|
|
@ -8,12 +8,11 @@ except ImportError:
|
|||
import errno
|
||||
|
||||
|
||||
class ValidateFFmpegInstalled(pyblish.api.Validator):
|
||||
class ValidateFFmpegInstalled(pyblish.api.ContextPlugin):
|
||||
"""Validate availability of ffmpeg tool in PATH"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'Validate ffmpeg installation'
|
||||
families = ['review']
|
||||
optional = True
|
||||
|
||||
def is_tool(self, name):
|
||||
|
|
@ -27,7 +26,7 @@ class ValidateFFmpegInstalled(pyblish.api.Validator):
|
|||
return False
|
||||
return True
|
||||
|
||||
def process(self, instance):
|
||||
def process(self, context):
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
self.log.info("ffmpeg path: `{}`".format(ffmpeg_path))
|
||||
if self.is_tool(ffmpeg_path) is False:
|
||||
|
|
|
|||
42
pype/plugins/harmony/load/load_audio.py
Normal file
42
pype/plugins/harmony/load/load_audio.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from avalon import api, harmony
|
||||
|
||||
|
||||
func = """
|
||||
function getUniqueColumnName( column_prefix )
|
||||
{
|
||||
var suffix = 0;
|
||||
// finds if unique name for a column
|
||||
var column_name = column_prefix;
|
||||
while(suffix < 2000)
|
||||
{
|
||||
if(!column.type(column_name))
|
||||
break;
|
||||
|
||||
suffix = suffix + 1;
|
||||
column_name = column_prefix + "_" + suffix;
|
||||
}
|
||||
return column_name;
|
||||
}
|
||||
|
||||
function func(args)
|
||||
{
|
||||
var uniqueColumnName = getUniqueColumnName(args[0]);
|
||||
column.add(uniqueColumnName , "SOUND");
|
||||
column.importSound(uniqueColumnName, 1, args[1]);
|
||||
}
|
||||
func
|
||||
"""
|
||||
|
||||
|
||||
class ImportAudioLoader(api.Loader):
|
||||
"""Import audio."""
|
||||
|
||||
families = ["shot"]
|
||||
representations = ["wav"]
|
||||
label = "Import Audio"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
wav_file = api.get_representation_path(context["representation"])
|
||||
harmony.send(
|
||||
{"function": func, "args": [context["subset"]["name"], wav_file]}
|
||||
)
|
||||
228
pype/plugins/harmony/load/load_imagesequence.py
Normal file
228
pype/plugins/harmony/load/load_imagesequence.py
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
import os
|
||||
|
||||
import clique
|
||||
|
||||
from avalon import api, harmony
|
||||
|
||||
copy_files = """function copyFile(srcFilename, dstFilename)
|
||||
{
|
||||
var srcFile = new PermanentFile(srcFilename);
|
||||
var dstFile = new PermanentFile(dstFilename);
|
||||
srcFile.copy(dstFile);
|
||||
}
|
||||
"""
|
||||
|
||||
import_files = """var PNGTransparencyMode = 0; //Premultiplied wih Black
|
||||
var TGATransparencyMode = 0; //Premultiplied wih Black
|
||||
var SGITransparencyMode = 0; //Premultiplied wih Black
|
||||
var LayeredPSDTransparencyMode = 1; //Straight
|
||||
var FlatPSDTransparencyMode = 2; //Premultiplied wih White
|
||||
|
||||
function getUniqueColumnName( column_prefix )
|
||||
{
|
||||
var suffix = 0;
|
||||
// finds if unique name for a column
|
||||
var column_name = column_prefix;
|
||||
while(suffix < 2000)
|
||||
{
|
||||
if(!column.type(column_name))
|
||||
break;
|
||||
|
||||
suffix = suffix + 1;
|
||||
column_name = column_prefix + "_" + suffix;
|
||||
}
|
||||
return column_name;
|
||||
}
|
||||
|
||||
function import_files(args)
|
||||
{
|
||||
var root = args[0];
|
||||
var files = args[1];
|
||||
var name = args[2];
|
||||
var start_frame = args[3];
|
||||
|
||||
var vectorFormat = null;
|
||||
var extension = null;
|
||||
var filename = files[0];
|
||||
|
||||
var pos = filename.lastIndexOf(".");
|
||||
if( pos < 0 )
|
||||
return null;
|
||||
|
||||
extension = filename.substr(pos+1).toLowerCase();
|
||||
|
||||
if(extension == "jpeg")
|
||||
extension = "jpg";
|
||||
if(extension == "tvg")
|
||||
{
|
||||
vectorFormat = "TVG"
|
||||
extension ="SCAN"; // element.add() will use this.
|
||||
}
|
||||
|
||||
var elemId = element.add(
|
||||
name,
|
||||
"BW",
|
||||
scene.numberOfUnitsZ(),
|
||||
extension.toUpperCase(),
|
||||
vectorFormat
|
||||
);
|
||||
if (elemId == -1)
|
||||
{
|
||||
// hum, unknown file type most likely -- let's skip it.
|
||||
return null; // no read to add.
|
||||
}
|
||||
|
||||
var uniqueColumnName = getUniqueColumnName(name);
|
||||
column.add(uniqueColumnName , "DRAWING");
|
||||
column.setElementIdOfDrawing(uniqueColumnName, elemId);
|
||||
|
||||
var read = node.add(root, name, "READ", 0, 0, 0);
|
||||
var transparencyAttr = node.getAttr(
|
||||
read, frame.current(), "READ_TRANSPARENCY"
|
||||
);
|
||||
var opacityAttr = node.getAttr(read, frame.current(), "OPACITY");
|
||||
transparencyAttr.setValue(true);
|
||||
opacityAttr.setValue(true);
|
||||
|
||||
var alignmentAttr = node.getAttr(read, frame.current(), "ALIGNMENT_RULE");
|
||||
alignmentAttr.setValue("ASIS");
|
||||
|
||||
var transparencyModeAttr = node.getAttr(
|
||||
read, frame.current(), "applyMatteToColor"
|
||||
);
|
||||
if (extension == "png")
|
||||
transparencyModeAttr.setValue(PNGTransparencyMode);
|
||||
if (extension == "tga")
|
||||
transparencyModeAttr.setValue(TGATransparencyMode);
|
||||
if (extension == "sgi")
|
||||
transparencyModeAttr.setValue(SGITransparencyMode);
|
||||
if (extension == "psd")
|
||||
transparencyModeAttr.setValue(FlatPSDTransparencyMode);
|
||||
|
||||
node.linkAttr(read, "DRAWING.ELEMENT", uniqueColumnName);
|
||||
|
||||
// Create a drawing for each file.
|
||||
for( var i =0; i <= files.length - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(elemId, timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(elemId, timing.toString());
|
||||
copyFile( files[i], drawingFilePath );
|
||||
|
||||
column.setEntry(uniqueColumnName, 1, timing, timing.toString());
|
||||
}
|
||||
return read;
|
||||
}
|
||||
import_files
|
||||
"""
|
||||
|
||||
replace_files = """function replace_files(args)
|
||||
{
|
||||
var files = args[0];
|
||||
var _node = args[1];
|
||||
var start_frame = args[2];
|
||||
|
||||
var _column = node.linkedColumn(_node, "DRAWING.ELEMENT");
|
||||
|
||||
// Delete existing drawings.
|
||||
var timings = column.getDrawingTimings(_column);
|
||||
for( var i =0; i <= timings.length - 1; ++i)
|
||||
{
|
||||
column.deleteDrawingAt(_column, parseInt(timings[i]));
|
||||
}
|
||||
|
||||
// Create new drawings.
|
||||
for( var i =0; i <= files.length - 1; ++i)
|
||||
{
|
||||
timing = start_frame + i
|
||||
// Create a drawing drawing, 'true' indicate that the file exists.
|
||||
Drawing.create(node.getElementId(_node), timing, true);
|
||||
// Get the actual path, in tmp folder.
|
||||
var drawingFilePath = Drawing.filename(
|
||||
node.getElementId(_node), timing.toString()
|
||||
);
|
||||
copyFile( files[i], drawingFilePath );
|
||||
|
||||
column.setEntry(_column, 1, timing, timing.toString());
|
||||
}
|
||||
}
|
||||
replace_files
|
||||
"""
|
||||
|
||||
|
||||
class ImageSequenceLoader(api.Loader):
|
||||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
families = ["shot", "render"]
|
||||
representations = ["jpeg", "png"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
os.listdir(os.path.dirname(self.fname))
|
||||
)
|
||||
files = []
|
||||
for f in list(collections[0]):
|
||||
files.append(
|
||||
os.path.join(os.path.dirname(self.fname), f).replace("\\", "/")
|
||||
)
|
||||
|
||||
read_node = harmony.send(
|
||||
{
|
||||
"function": copy_files + import_files,
|
||||
"args": ["Top", files, context["subset"]["name"], 1]
|
||||
}
|
||||
)["result"]
|
||||
|
||||
self[:] = [read_node]
|
||||
|
||||
return harmony.containerise(
|
||||
name,
|
||||
namespace,
|
||||
read_node,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
node = container.pop("node")
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
os.listdir(
|
||||
os.path.dirname(api.get_representation_path(representation))
|
||||
)
|
||||
)
|
||||
files = []
|
||||
for f in list(collections[0]):
|
||||
files.append(
|
||||
os.path.join(os.path.dirname(self.fname), f).replace("\\", "/")
|
||||
)
|
||||
|
||||
harmony.send(
|
||||
{
|
||||
"function": copy_files + replace_files,
|
||||
"args": [files, node, 1]
|
||||
}
|
||||
)
|
||||
|
||||
harmony.imprint(
|
||||
node, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
node = container.pop("node")
|
||||
func = """function deleteNode(_node)
|
||||
{
|
||||
node.deleteNode(_node, true, true);
|
||||
}
|
||||
deleteNode
|
||||
"""
|
||||
harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -14,18 +14,6 @@ class ImportTemplateLoader(api.Loader):
|
|||
label = "Import Template"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
# Make backdrops from metadata.
|
||||
backdrops = context["representation"]["data"].get("backdrops", [])
|
||||
|
||||
func = """function func(args)
|
||||
{
|
||||
Backdrop.addBackdrop("Top", args[0]);
|
||||
}
|
||||
func
|
||||
"""
|
||||
for backdrop in backdrops:
|
||||
harmony.send({"function": func, "args": [backdrop]})
|
||||
|
||||
# Import template.
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
zip_file = api.get_representation_path(context["representation"])
|
||||
|
|
@ -33,19 +21,6 @@ class ImportTemplateLoader(api.Loader):
|
|||
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
||||
zip_ref.extractall(template_path)
|
||||
|
||||
func = """function func(args)
|
||||
{
|
||||
var template_path = args[0];
|
||||
var drag_object = copyPaste.copyFromTemplate(
|
||||
template_path, 0, 0, copyPaste.getCurrentCreateOptions()
|
||||
);
|
||||
copyPaste.pasteNewNodes(
|
||||
drag_object, "", copyPaste.getCurrentPasteOptions()
|
||||
);
|
||||
}
|
||||
func
|
||||
"""
|
||||
|
||||
func = """function func(args)
|
||||
{
|
||||
var template_path = args[0];
|
||||
|
|
@ -59,3 +34,11 @@ class ImportTemplateLoader(api.Loader):
|
|||
harmony.send({"function": func, "args": [template_path]})
|
||||
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
class ImportWorkfileLoader(ImportTemplateLoader):
|
||||
"""Import workfiles."""
|
||||
|
||||
families = ["workfile"]
|
||||
representations = ["*"]
|
||||
label = "Import Workfile"
|
||||
28
pype/plugins/harmony/publish/collect_workfile.py
Normal file
28
pype/plugins/harmony/publish/collect_workfile.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Workfile"
|
||||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
family = "workfile"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
subset = family + task.capitalize()
|
||||
basename = os.path.basename(context.data["currentFile"])
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": basename,
|
||||
"name": basename,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import tempfile
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
import pype.lib
|
||||
|
||||
import clique
|
||||
|
||||
|
|
@ -28,7 +28,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
scene.currentScene(),
|
||||
scene.getFrameRate(),
|
||||
scene.getStartFrame(),
|
||||
scene.getStopFrame()
|
||||
scene.getStopFrame(),
|
||||
sound.getSoundtrackAll().path()
|
||||
]
|
||||
}
|
||||
func
|
||||
|
|
@ -37,11 +38,11 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
{"function": func, "args": [instance[0]]}
|
||||
)["result"]
|
||||
application_path = result[0]
|
||||
project_path = result[1]
|
||||
scene_path = os.path.join(result[1], result[2] + ".xstage")
|
||||
frame_rate = result[3]
|
||||
frame_start = result[4]
|
||||
frame_end = result[5]
|
||||
audio_path = result[6]
|
||||
|
||||
# Set output path to temp folder.
|
||||
path = tempfile.mkdtemp()
|
||||
|
|
@ -59,9 +60,16 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
)
|
||||
harmony.save_scene()
|
||||
|
||||
# Execute rendering.
|
||||
output = pype.lib._subprocess([application_path, "-batch", scene_path])
|
||||
self.log.info(output)
|
||||
# Execute rendering. Ignoring error cause Harmony returns error code
|
||||
# always.
|
||||
proc = subprocess.Popen(
|
||||
[application_path, "-batch", scene_path],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
output, error = proc.communicate()
|
||||
self.log.info(output.decode("utf-8"))
|
||||
|
||||
# Collect rendered files.
|
||||
files = os.listdir(path)
|
||||
|
|
@ -76,25 +84,85 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
path, len(collections)
|
||||
)
|
||||
)
|
||||
collection = collections[0]
|
||||
|
||||
extension = os.path.splitext(list(collections[0])[0])[-1][1:]
|
||||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(path, "thumbnail.png")
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", os.path.join(path, list(collections[0])[0]),
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
thumbnail_path
|
||||
]
|
||||
process = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
|
||||
output = process.communicate()[0]
|
||||
|
||||
if process.returncode != 0:
|
||||
raise ValueError(output.decode("utf-8"))
|
||||
|
||||
self.log.debug(output.decode("utf-8"))
|
||||
|
||||
# Generate mov.
|
||||
mov_path = os.path.join(path, instance.data["name"] + ".mov")
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", audio_path,
|
||||
"-i",
|
||||
os.path.join(path, collection.head + "%04d" + collection.tail),
|
||||
mov_path
|
||||
]
|
||||
process = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
|
||||
output = process.communicate()[0]
|
||||
|
||||
if process.returncode != 0:
|
||||
raise ValueError(output.decode("utf-8"))
|
||||
|
||||
self.log.debug(output.decode("utf-8"))
|
||||
|
||||
# Generate representations.
|
||||
extension = collection.tail[1:]
|
||||
representation = {
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": list(collections[0]),
|
||||
"files": list(collection),
|
||||
"stagingDir": path
|
||||
}
|
||||
movie = {
|
||||
"name": "mov",
|
||||
"ext": "mov",
|
||||
"files": os.path.basename(mov_path),
|
||||
"stagingDir": path,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"fps": frame_rate,
|
||||
"preview": True,
|
||||
"tags": ["review"]
|
||||
"tags": ["review", "ftrackreview"]
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
self.log.info(frame_rate)
|
||||
thumbnail = {
|
||||
"name": "thumbnail",
|
||||
"ext": "png",
|
||||
"files": os.path.basename(thumbnail_path),
|
||||
"stagingDir": path,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
instance.data["representations"] = [representation, movie, thumbnail]
|
||||
|
||||
# Required for extract_review plugin (L222 onwards).
|
||||
instance.data["frameStart"] = frame_start
|
||||
instance.data["frameEnd"] = frame_end
|
||||
instance.data["fps"] = frame_rate
|
||||
|
||||
self.log.info("Extracted {instance} to {path}".format(**locals()))
|
||||
self.log.info(f"Extracted {instance} to {path}")
|
||||
|
|
|
|||
13
pype/plugins/harmony/publish/extract_save_scene.py
Normal file
13
pype/plugins/harmony/publish/extract_save_scene.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import pyblish.api
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
class ExtractSaveScene(pyblish.api.ContextPlugin):
|
||||
"""Save scene for extraction."""
|
||||
|
||||
label = "Extract Save Scene"
|
||||
order = pyblish.api.ExtractorOrder - 0.49
|
||||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
harmony.save_scene()
|
||||
|
|
@ -2,7 +2,8 @@ import os
|
|||
import shutil
|
||||
|
||||
import pype.api
|
||||
from avalon import harmony
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
class ExtractTemplate(pype.api.Extractor):
|
||||
|
|
@ -14,6 +15,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filepath = os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
|
||||
self.log.info("Outputting template to {}".format(staging_dir))
|
||||
|
||||
|
|
@ -28,7 +30,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
unique_backdrops = [backdrops[x] for x in set(backdrops.keys())]
|
||||
|
||||
# Get non-connected nodes within backdrops.
|
||||
all_nodes = harmony.send(
|
||||
all_nodes = avalon.harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
for node in [x for x in all_nodes if x not in dependencies]:
|
||||
|
|
@ -43,48 +45,9 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
dependencies.remove(instance[0])
|
||||
|
||||
# Export template.
|
||||
func = """function func(args)
|
||||
{
|
||||
// Add an extra node just so a new group can be created.
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
var template_group = node.createGroup(temp_node, "temp_group");
|
||||
node.deleteNode( template_group + "/temp_note" );
|
||||
|
||||
// This will make Node View to focus on the new group.
|
||||
selection.clearSelection();
|
||||
selection.addNodeToSelection(template_group);
|
||||
Action.perform("onActionEnterGroup()", "Node View");
|
||||
|
||||
// Recreate backdrops in group.
|
||||
for (var i = 0 ; i < args[0].length; i++)
|
||||
{
|
||||
Backdrop.addBackdrop(template_group, args[0][i]);
|
||||
};
|
||||
|
||||
// Copy-paste the selected nodes into the new group.
|
||||
var drag_object = copyPaste.copy(args[1], 1, frame.numberOf, "");
|
||||
copyPaste.pasteNewNodes(drag_object, template_group, "");
|
||||
|
||||
// Select all nodes within group and export as template.
|
||||
Action.perform( "selectAll()", "Node View" );
|
||||
copyPaste.createTemplateFromSelection(args[2], args[3]);
|
||||
|
||||
// Unfocus the group in Node view, delete all nodes and backdrops
|
||||
// created during the process.
|
||||
Action.perform("onActionUpToParent()", "Node View");
|
||||
node.deleteNode(template_group, true, true);
|
||||
}
|
||||
func
|
||||
"""
|
||||
harmony.send({
|
||||
"function": func,
|
||||
"args": [
|
||||
unique_backdrops,
|
||||
dependencies,
|
||||
"{}.tpl".format(instance.name),
|
||||
staging_dir
|
||||
]
|
||||
})
|
||||
pype.hosts.harmony.export_template(
|
||||
unique_backdrops, dependencies, filepath
|
||||
)
|
||||
|
||||
# Prep representation.
|
||||
os.chdir(staging_dir)
|
||||
|
|
@ -131,7 +94,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
}
|
||||
func
|
||||
"""
|
||||
return harmony.send(
|
||||
return avalon.harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)["result"]
|
||||
|
||||
|
|
@ -150,7 +113,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
func
|
||||
"""
|
||||
|
||||
current_dependencies = harmony.send(
|
||||
current_dependencies = avalon.harmony.send(
|
||||
{"function": func, "args": [node]}
|
||||
)["result"]
|
||||
|
||||
|
|
|
|||
43
pype/plugins/harmony/publish/extract_workfile.py
Normal file
43
pype/plugins/harmony/publish/extract_workfile.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pype.api
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
class ExtractWorkfile(pype.api.Extractor):
|
||||
"""Extract the connected nodes to the composite instance."""
|
||||
|
||||
label = "Extract Workfile"
|
||||
hosts = ["harmony"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
# Export template.
|
||||
backdrops = avalon.harmony.send(
|
||||
{"function": "Backdrop.backdrops", "args": ["Top"]}
|
||||
)["result"]
|
||||
nodes = avalon.harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filepath = os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
|
||||
pype.hosts.harmony.export_template(backdrops, nodes, filepath)
|
||||
|
||||
# Prep representation.
|
||||
os.chdir(staging_dir)
|
||||
shutil.make_archive(
|
||||
"{}".format(instance.name),
|
||||
"zip",
|
||||
os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
)
|
||||
|
||||
representation = {
|
||||
"name": "tpl",
|
||||
"ext": "zip",
|
||||
"files": "{}.zip".format(instance.name),
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
37
pype/plugins/harmony/publish/increment_workfile.py
Normal file
37
pype/plugins/harmony/publish/increment_workfile.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
from pype.lib import version_up
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
class IncrementWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current workfile.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
"""
|
||||
|
||||
label = "Increment Workfile"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["harmony"]
|
||||
families = ["workfile"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
||||
if errored_plugins:
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because publishing failed."
|
||||
)
|
||||
|
||||
scene_dir = version_up(
|
||||
os.path.dirname(instance.context.data["currentFile"])
|
||||
)
|
||||
scene_path = os.path.join(
|
||||
scene_dir, os.path.basename(scene_dir) + ".xstage"
|
||||
)
|
||||
|
||||
harmony.save_scene_as(scene_path)
|
||||
|
||||
self.log.info("Incremented workfile to: {}".format(scene_path))
|
||||
48
pype/plugins/harmony/publish/validate_instances.py
Normal file
48
pype/plugins/harmony/publish/validate_instances.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
class ValidateInstanceRepair(pyblish.api.Action):
|
||||
"""Repair the instance."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
data = harmony.read(instance[0])
|
||||
data["asset"] = os.environ["AVALON_ASSET"]
|
||||
harmony.imprint(instance[0], data)
|
||||
|
||||
|
||||
class ValidateInstance(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current asset."""
|
||||
|
||||
label = "Validate Instance"
|
||||
hosts = ["harmony"]
|
||||
actions = [ValidateInstanceRepair]
|
||||
order = pype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
current_asset = os.environ["AVALON_ASSET"]
|
||||
msg = (
|
||||
"Instance asset is not the same as current asset:"
|
||||
f"\nInstance: {instance_asset}\nCurrent: {current_asset}"
|
||||
)
|
||||
assert instance_asset == current_asset, msg
|
||||
66
pype/plugins/harmony/publish/validate_scene_settings.py
Normal file
66
pype/plugins/harmony/publish/validate_scene_settings.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import json
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import avalon.harmony
|
||||
import pype.hosts.harmony
|
||||
|
||||
|
||||
class ValidateSceneSettingsRepair(pyblish.api.Action):
|
||||
"""Repair the instance."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
pype.hosts.harmony.set_scene_settings(
|
||||
pype.hosts.harmony.get_asset_settings()
|
||||
)
|
||||
|
||||
|
||||
class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
||||
"""Ensure the scene settings are in sync with database."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Scene Settings"
|
||||
families = ["workfile"]
|
||||
hosts = ["harmony"]
|
||||
actions = [ValidateSceneSettingsRepair]
|
||||
|
||||
def process(self, instance):
|
||||
expected_settings = pype.hosts.harmony.get_asset_settings()
|
||||
|
||||
# Harmony is expected to start at 1.
|
||||
frame_start = expected_settings["frameStart"]
|
||||
frame_end = expected_settings["frameEnd"]
|
||||
expected_settings["frameEnd"] = frame_end - frame_start + 1
|
||||
expected_settings["frameStart"] = 1
|
||||
|
||||
func = """function func()
|
||||
{
|
||||
return {
|
||||
"fps": scene.getFrameRate(),
|
||||
"frameStart": scene.getStartFrame(),
|
||||
"frameEnd": scene.getStopFrame(),
|
||||
"resolutionWidth": scene.defaultResolutionX(),
|
||||
"resolutionHeight": scene.defaultResolutionY()
|
||||
};
|
||||
}
|
||||
func
|
||||
"""
|
||||
current_settings = avalon.harmony.send({"function": func})["result"]
|
||||
|
||||
invalid_settings = []
|
||||
for key, value in expected_settings.items():
|
||||
if value != current_settings[key]:
|
||||
invalid_settings.append({
|
||||
"name": key,
|
||||
"expected": value,
|
||||
"current": current_settings[key]
|
||||
})
|
||||
|
||||
msg = "Found invalid settings:\n{}".format(
|
||||
json.dumps(invalid_settings, sort_keys=True, indent=4)
|
||||
)
|
||||
assert not invalid_settings, msg
|
||||
|
|
@ -68,7 +68,7 @@ class CreateRender(avalon.maya.Creator):
|
|||
|
||||
_image_prefixes = {
|
||||
'mentalray': 'maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>',
|
||||
'vray': '"maya/<scene>/<Layer>/<Layer>',
|
||||
'vray': 'maya/<scene>/<Layer>/<Layer>',
|
||||
'arnold': 'maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>',
|
||||
'renderman': 'maya/<Scene>/<layer>/<layer>_<aov>',
|
||||
'redshift': 'maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>'
|
||||
|
|
@ -179,7 +179,7 @@ class CreateRender(avalon.maya.Creator):
|
|||
self.data["framesPerTask"] = 1
|
||||
self.data["whitelist"] = False
|
||||
self.data["machineList"] = ""
|
||||
self.data["useMayaBatch"] = True
|
||||
self.data["useMayaBatch"] = False
|
||||
self.data["vrayScene"] = False
|
||||
# Disable for now as this feature is not working yet
|
||||
# self.data["assScene"] = False
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
# in expectedFiles. If so, raise error as we cannot attach AOV
|
||||
# (considered to be subset on its own) to another subset
|
||||
if attach_to:
|
||||
assert len(exp_files[0].keys()) == 1, (
|
||||
assert isinstance(exp_files, list), (
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
"subset is not supported"
|
||||
)
|
||||
|
|
@ -332,9 +332,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
options["extendFrames"] = extend_frames
|
||||
options["overrideExistingFrame"] = override_frames
|
||||
|
||||
maya_render_plugin = "MayaBatch"
|
||||
if not attributes.get("useMayaBatch", True):
|
||||
maya_render_plugin = "MayaCmd"
|
||||
maya_render_plugin = "MayaPype"
|
||||
if attributes.get("useMayaBatch", True):
|
||||
maya_render_plugin = "MayaBatch"
|
||||
|
||||
options["mayaRenderPlugin"] = maya_render_plugin
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class ExtractAnimation(pype.api.Extractor):
|
|||
# Collect the start and end including handles
|
||||
start = instance.data["frameStart"]
|
||||
end = instance.data["frameEnd"]
|
||||
handles = instance.data.get("handles", 0)
|
||||
handles = instance.data.get("handles", 0) or 0
|
||||
if handles:
|
||||
start -= handles
|
||||
end += handles
|
||||
|
|
@ -50,7 +50,7 @@ class ExtractAnimation(pype.api.Extractor):
|
|||
path = os.path.join(parent_dir, filename)
|
||||
|
||||
options = {
|
||||
"step": instance.data.get("step", 1.0),
|
||||
"step": instance.data.get("step", 1.0) or 1.0,
|
||||
"attr": ["cbId"],
|
||||
"writeVisibility": True,
|
||||
"writeCreases": True,
|
||||
|
|
@ -74,8 +74,8 @@ class ExtractAnimation(pype.api.Extractor):
|
|||
with avalon.maya.maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
extract_alembic(file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
startFrame=float(start),
|
||||
endFrame=float(end),
|
||||
**options)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ payload_skeleton = {
|
|||
"BatchName": None, # Top-level group name
|
||||
"Name": None, # Job name, as seen in Monitor
|
||||
"UserName": None,
|
||||
"Plugin": "MayaBatch",
|
||||
"Plugin": "MayaPype",
|
||||
"Frames": "{start}-{end}x{step}",
|
||||
"Comment": None,
|
||||
},
|
||||
|
|
@ -274,7 +274,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
step=int(self._instance.data["byFrameStep"]))
|
||||
|
||||
payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get(
|
||||
"mayaRenderPlugin", "MayaBatch")
|
||||
"mayaRenderPlugin", "MayaPype")
|
||||
|
||||
payload_skeleton["JobInfo"]["BatchName"] = filename
|
||||
# Job name, as seen in Monitor
|
||||
|
|
@ -311,12 +311,14 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AVALON_TASK",
|
||||
"PYPE_USERNAME",
|
||||
"PYPE_DEV",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"PYPE_LOG_NO_COLORS",
|
||||
"PYPE_SETUP_PATH"
|
||||
]
|
||||
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
environment["PYPE_LOG_NO_COLORS"] = "1"
|
||||
environment["PYPE_MAYA_VERSION"] = cmds.about(v=True)
|
||||
payload_skeleton["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
|
|
@ -428,7 +430,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
int(self._instance.data["frameStartHandle"]),
|
||||
int(self._instance.data["frameEndHandle"])),
|
||||
|
||||
"Plugin": "MayaBatch",
|
||||
"Plugin": self._instance.data.get(
|
||||
"mayaRenderPlugin", "MayaPype"),
|
||||
"FramesPerTask": self._instance.data.get("framesPerTask", 1)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import pymel.core as pc
|
||||
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
import pype.hosts.maya.action
|
||||
|
|
@ -23,6 +23,11 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin):
|
|||
pype.api.RepairAction
|
||||
]
|
||||
optional = True
|
||||
if cmds.getAttr(
|
||||
"defaultRenderGlobals.currentRenderer").lower() == "arnold":
|
||||
active = True
|
||||
else:
|
||||
active = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid_attributes(cls, instance, compute=False):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Plugin for validating naming conventions."""
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -42,7 +44,8 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
|
||||
|
||||
@staticmethod
|
||||
def is_valid_name(node_name, shape_type, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
|
||||
def is_valid_name(node_name, shape_type,
|
||||
SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
|
||||
"""Return whether node's name is correct.
|
||||
|
||||
The correctness for a transform's suffix is dependent on what
|
||||
|
|
@ -52,6 +55,12 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
When `shape_type` is None the transform doesn't have any direct
|
||||
children shapes.
|
||||
|
||||
Args:
|
||||
node_name (str): Node name.
|
||||
shape_type (str): Type of node.
|
||||
SUFFIX_NAMING_TABLE (dict): Mapping dict for suffixes.
|
||||
ALLOW_IF_NOT_IN_SUFFIX_TABLE (dict): Filter dict.
|
||||
|
||||
"""
|
||||
if shape_type not in SUFFIX_NAMING_TABLE:
|
||||
return ALLOW_IF_NOT_IN_SUFFIX_TABLE
|
||||
|
|
@ -63,7 +72,13 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
return False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
|
||||
def get_invalid(cls, instance):
|
||||
"""Get invalid nodes in instance.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): published instance.
|
||||
|
||||
"""
|
||||
transforms = cmds.ls(instance, type='transform', long=True)
|
||||
|
||||
invalid = []
|
||||
|
|
@ -74,16 +89,23 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
noIntermediate=True)
|
||||
|
||||
shape_type = cmds.nodeType(shapes[0]) if shapes else None
|
||||
if not cls.is_valid_name(transform, shape_type, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
|
||||
if not cls.is_valid_name(transform, shape_type,
|
||||
cls.SUFFIX_NAMING_TABLE,
|
||||
cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE):
|
||||
invalid.append(transform)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
"""Process all the nodes in the instance.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): published instance.
|
||||
|
||||
invalid = self.get_invalid(instance, self.SUFFIX_NAMING_TABLE, self.ALLOW_IF_NOT_IN_SUFFIX_TABLE)
|
||||
"""
|
||||
invalid = self.get_invalid(instance,
|
||||
self.SUFFIX_NAMING_TABLE,
|
||||
self.ALLOW_IF_NOT_IN_SUFFIX_TABLE)
|
||||
if invalid:
|
||||
raise ValueError("Incorrectly named geometry "
|
||||
"transforms: {0}".format(invalid))
|
||||
|
|
|
|||
|
|
@ -1,12 +1,77 @@
|
|||
from avalon import photoshop
|
||||
from avalon import api, photoshop
|
||||
from avalon.vendor import Qt
|
||||
|
||||
|
||||
class CreateImage(photoshop.Creator):
|
||||
class CreateImage(api.Creator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
label = "Image"
|
||||
family = "image"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateImage, self).__init__(*args, **kwargs)
|
||||
def process(self):
|
||||
groups = []
|
||||
layers = []
|
||||
create_group = False
|
||||
group_constant = photoshop.get_com_objects().constants().psLayerSet
|
||||
if (self.options or {}).get("useSelection"):
|
||||
multiple_instances = False
|
||||
selection = photoshop.get_selected_layers()
|
||||
|
||||
if len(selection) > 1:
|
||||
# Ask user whether to create one image or image per selected
|
||||
# item.
|
||||
msg_box = Qt.QtWidgets.QMessageBox()
|
||||
msg_box.setIcon(Qt.QtWidgets.QMessageBox.Warning)
|
||||
msg_box.setText(
|
||||
"Multiple layers selected."
|
||||
"\nDo you want to make one image per layer?"
|
||||
)
|
||||
msg_box.setStandardButtons(
|
||||
Qt.QtWidgets.QMessageBox.Yes |
|
||||
Qt.QtWidgets.QMessageBox.No |
|
||||
Qt.QtWidgets.QMessageBox.Cancel
|
||||
)
|
||||
ret = msg_box.exec_()
|
||||
if ret == Qt.QtWidgets.QMessageBox.Yes:
|
||||
multiple_instances = True
|
||||
elif ret == Qt.QtWidgets.QMessageBox.Cancel:
|
||||
return
|
||||
|
||||
if multiple_instances:
|
||||
for item in selection:
|
||||
if item.LayerType == group_constant:
|
||||
groups.append(item)
|
||||
else:
|
||||
layers.append(item)
|
||||
else:
|
||||
group = photoshop.group_selected_layers()
|
||||
group.Name = self.name
|
||||
groups.append(group)
|
||||
|
||||
elif len(selection) == 1:
|
||||
# One selected item. Use group if its a LayerSet (group), else
|
||||
# create a new group.
|
||||
if selection[0].LayerType == group_constant:
|
||||
groups.append(selection[0])
|
||||
else:
|
||||
layers.append(selection[0])
|
||||
elif len(selection) == 0:
|
||||
# No selection creates an empty group.
|
||||
create_group = True
|
||||
else:
|
||||
create_group = True
|
||||
|
||||
if create_group:
|
||||
group = photoshop.app().ActiveDocument.LayerSets.Add()
|
||||
group.Name = self.name
|
||||
groups.append(group)
|
||||
|
||||
for layer in layers:
|
||||
photoshop.select_layers([layer])
|
||||
group = photoshop.group_selected_layers()
|
||||
group.Name = layer.Name
|
||||
groups.append(group)
|
||||
|
||||
for group in groups:
|
||||
photoshop.imprint(group, self.data)
|
||||
|
|
|
|||
29
pype/plugins/photoshop/publish/increment_workfile.py
Normal file
29
pype/plugins/photoshop/publish/increment_workfile.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import pyblish.api
|
||||
from pype.action import get_errored_plugins_from_data
|
||||
from pype.lib import version_up
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class IncrementWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current workfile.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
"""
|
||||
|
||||
label = "Increment Workfile"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["photoshop"]
|
||||
families = ["workfile"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
errored_plugins = get_errored_plugins_from_data(instance.context)
|
||||
if errored_plugins:
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because publishing failed."
|
||||
)
|
||||
|
||||
scene_path = version_up(instance.context.data["currentFile"])
|
||||
photoshop.app().ActiveDocument.SaveAs(scene_path)
|
||||
|
||||
self.log.info("Incremented workfile to: {}".format(scene_path))
|
||||
44
pype/plugins/photoshop/publish/validate_naming.py
Normal file
44
pype/plugins/photoshop/publish/validate_naming.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class ValidateNamingRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
instance[0].Name = instance.data["name"].replace(" ", "_")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ValidateNaming(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance name.
|
||||
|
||||
Spaces in names are not allowed. Will be replace with underscores.
|
||||
"""
|
||||
|
||||
label = "Validate Naming"
|
||||
hosts = ["photoshop"]
|
||||
order = pype.api.ValidateContentsOrder
|
||||
families = ["image"]
|
||||
actions = [ValidateNamingRepair]
|
||||
|
||||
def process(self, instance):
|
||||
msg = "Name \"{}\" is not allowed.".format(instance.data["name"])
|
||||
assert " " not in instance.data["name"], msg
|
||||
136
pype/plugins/standalonepublisher/publish/collect_shots.py
Normal file
136
pype/plugins/standalonepublisher/publish/collect_shots.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
import os
|
||||
|
||||
import opentimelineio as otio
|
||||
from bson import json_util
|
||||
|
||||
import pyblish.api
|
||||
from pype import lib
|
||||
from avalon import io
|
||||
|
||||
|
||||
class OTIO_View(pyblish.api.Action):
|
||||
"""Currently disabled because OTIO requires PySide2. Issue on Qt.py:
|
||||
https://github.com/PixarAnimationStudios/OpenTimelineIO/issues/289
|
||||
"""
|
||||
|
||||
label = "OTIO View"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
instance = context[0]
|
||||
representation = instance.data["representations"][0]
|
||||
file_path = os.path.join(
|
||||
representation["stagingDir"], representation["files"]
|
||||
)
|
||||
lib._subprocess(["otioview", file_path])
|
||||
|
||||
|
||||
class CollectShots(pyblish.api.InstancePlugin):
|
||||
"""Collect Anatomy object into Context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Shots"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["editorial"]
|
||||
actions = []
|
||||
|
||||
def process(self, instance):
|
||||
representation = instance.data["representations"][0]
|
||||
file_path = os.path.join(
|
||||
representation["stagingDir"], representation["files"]
|
||||
)
|
||||
instance.context.data["editorialPath"] = file_path
|
||||
|
||||
extension = os.path.splitext(file_path)[1][1:]
|
||||
kwargs = {}
|
||||
if extension == "edl":
|
||||
# EDL has no frame rate embedded so needs explicit frame rate else
|
||||
# 24 is asssumed.
|
||||
kwargs["rate"] = lib.get_asset()["data"]["fps"]
|
||||
|
||||
timeline = otio.adapters.read_from_file(file_path, **kwargs)
|
||||
tracks = timeline.each_child(
|
||||
descended_from_type=otio.schema.track.Track
|
||||
)
|
||||
asset_entity = instance.context.data["assetEntity"]
|
||||
asset_name = asset_entity["name"]
|
||||
|
||||
# Project specific prefix naming. This needs to be replaced with some
|
||||
# options to be more flexible.
|
||||
asset_name = asset_name.split("_")[0]
|
||||
|
||||
instances = []
|
||||
for track in tracks:
|
||||
for child in track.each_child():
|
||||
|
||||
# Transitions are ignored, because Clips have the full frame
|
||||
# range.
|
||||
if isinstance(child, otio.schema.transition.Transition):
|
||||
continue
|
||||
|
||||
# Hardcoded to expect a shot name of "[name].[extension]"
|
||||
child_name = os.path.splitext(child.name)[0].lower()
|
||||
name = f"{asset_name}_{child_name}"
|
||||
|
||||
frame_start = child.range_in_parent().start_time.value
|
||||
frame_end = child.range_in_parent().end_time_inclusive().value
|
||||
|
||||
label = f"{name} (framerange: {frame_start}-{frame_end})"
|
||||
instances.append(
|
||||
instance.context.create_instance(**{
|
||||
"name": name,
|
||||
"label": label,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"family": "shot",
|
||||
"families": ["review", "ftrack"],
|
||||
"ftrackFamily": "review",
|
||||
"asset": name,
|
||||
"subset": "shotMain",
|
||||
"representations": [],
|
||||
"source": file_path
|
||||
})
|
||||
)
|
||||
|
||||
visual_hierarchy = [asset_entity]
|
||||
while True:
|
||||
visual_parent = io.find_one(
|
||||
{"_id": visual_hierarchy[-1]["data"]["visualParent"]}
|
||||
)
|
||||
if visual_parent:
|
||||
visual_hierarchy.append(visual_parent)
|
||||
else:
|
||||
visual_hierarchy.append(instance.context.data["projectEntity"])
|
||||
break
|
||||
|
||||
context_hierarchy = None
|
||||
for entity in visual_hierarchy:
|
||||
childs = {}
|
||||
if context_hierarchy:
|
||||
name = context_hierarchy.pop("name")
|
||||
childs = {name: context_hierarchy}
|
||||
else:
|
||||
for instance in instances:
|
||||
childs[instance.data["name"]] = {
|
||||
"childs": {},
|
||||
"entity_type": "Shot",
|
||||
"custom_attributes": {
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"]
|
||||
}
|
||||
}
|
||||
|
||||
context_hierarchy = {
|
||||
"entity_type": entity["data"]["entityType"],
|
||||
"childs": childs,
|
||||
"name": entity["name"]
|
||||
}
|
||||
|
||||
name = context_hierarchy.pop("name")
|
||||
context_hierarchy = {name: context_hierarchy}
|
||||
instance.context.data["hierarchyContext"] = context_hierarchy
|
||||
self.log.info(
|
||||
"Hierarchy:\n" +
|
||||
json_util.dumps(context_hierarchy, sort_keys=True, indent=4)
|
||||
)
|
||||
|
|
@ -42,7 +42,7 @@ class ExtractReviewSP(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
|
||||
# get specific profile if was defined
|
||||
specific_profiles = instance.data.get("repreProfiles")
|
||||
specific_profiles = instance.data.get("repreProfiles", [])
|
||||
|
||||
new_repres = []
|
||||
# filter out mov and img sequences
|
||||
|
|
|
|||
96
pype/plugins/standalonepublisher/publish/extract_shot.py
Normal file
96
pype/plugins/standalonepublisher/publish/extract_shot.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import os
|
||||
|
||||
import clique
|
||||
|
||||
import pype.api
|
||||
import pype.lib
|
||||
|
||||
|
||||
class ExtractShot(pype.api.Extractor):
|
||||
"""Extract shot "mov" and "wav" files."""
|
||||
|
||||
label = "Extract Shot"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["shot"]
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting shot to {}".format(staging_dir))
|
||||
|
||||
editorial_path = instance.context.data["editorialPath"]
|
||||
basename = os.path.splitext(os.path.basename(editorial_path))[0]
|
||||
|
||||
# Generate mov file.
|
||||
fps = pype.lib.get_asset()["data"]["fps"]
|
||||
input_path = os.path.join(
|
||||
os.path.dirname(editorial_path), basename + ".mov"
|
||||
)
|
||||
shot_mov = os.path.join(staging_dir, instance.data["name"] + ".mov")
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
args = [
|
||||
ffmpeg_path,
|
||||
"-ss", str(instance.data["frameStart"] / fps),
|
||||
"-i", input_path,
|
||||
"-t", str(
|
||||
(instance.data["frameEnd"] - instance.data["frameStart"] + 1) /
|
||||
fps
|
||||
),
|
||||
"-crf", "18",
|
||||
"-pix_fmt", "yuv420p",
|
||||
shot_mov
|
||||
]
|
||||
self.log.info(f"Processing: {args}")
|
||||
output = pype.lib._subprocess(args)
|
||||
self.log.info(output)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "mov",
|
||||
"ext": "mov",
|
||||
"files": os.path.basename(shot_mov),
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
"fps": fps,
|
||||
"thumbnail": True,
|
||||
"tags": ["review", "ftrackreview"]
|
||||
})
|
||||
|
||||
# Generate jpegs.
|
||||
shot_jpegs = os.path.join(
|
||||
staging_dir, instance.data["name"] + ".%04d.jpeg"
|
||||
)
|
||||
args = [ffmpeg_path, "-i", shot_mov, shot_jpegs]
|
||||
self.log.info(f"Processing: {args}")
|
||||
output = pype.lib._subprocess(args)
|
||||
self.log.info(output)
|
||||
|
||||
collection = clique.Collection(
|
||||
head=instance.data["name"] + ".", tail='.jpeg', padding=4
|
||||
)
|
||||
for f in os.listdir(staging_dir):
|
||||
if collection.match(f):
|
||||
collection.add(f)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "jpeg",
|
||||
"ext": "jpeg",
|
||||
"files": list(collection),
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
# Generate wav file.
|
||||
shot_wav = os.path.join(staging_dir, instance.data["name"] + ".wav")
|
||||
args = [ffmpeg_path, "-i", shot_mov, shot_wav]
|
||||
self.log.info(f"Processing: {args}")
|
||||
output = pype.lib._subprocess(args)
|
||||
self.log.info(output)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "wav",
|
||||
"ext": "wav",
|
||||
"files": os.path.basename(shot_wav),
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
# Required for extract_review plugin (L222 onwards).
|
||||
instance.data["fps"] = fps
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class ValidateEditorialResources(pyblish.api.InstancePlugin):
|
||||
"""Validate there is a "mov" next to the editorial file."""
|
||||
|
||||
label = "Validate Editorial Resources"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["editorial"]
|
||||
order = pype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
representation = instance.data["representations"][0]
|
||||
staging_dir = representation["stagingDir"]
|
||||
basename = os.path.splitext(
|
||||
os.path.basename(representation["files"])
|
||||
)[0]
|
||||
|
||||
files = [x for x in os.listdir(staging_dir)]
|
||||
|
||||
# Check for "mov" file.
|
||||
filename = basename + ".mov"
|
||||
filepath = os.path.join(staging_dir, filename)
|
||||
msg = f"Missing \"{filepath}\"."
|
||||
assert filename in files, msg
|
||||
23
pype/plugins/standalonepublisher/publish/validate_shots.py
Normal file
23
pype/plugins/standalonepublisher/publish/validate_shots.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class ValidateShots(pyblish.api.ContextPlugin):
|
||||
"""Validate there is a "mov" next to the editorial file."""
|
||||
|
||||
label = "Validate Shots"
|
||||
hosts = ["standalonepublisher"]
|
||||
order = pype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, context):
|
||||
shot_names = []
|
||||
duplicate_names = []
|
||||
for instance in context:
|
||||
name = instance.data["name"]
|
||||
if name in shot_names:
|
||||
duplicate_names.append(name)
|
||||
else:
|
||||
shot_names.append(name)
|
||||
|
||||
msg = "There are duplicate shot names:\n{}".format(duplicate_names)
|
||||
assert not duplicate_names, msg
|
||||
|
|
@ -491,3 +491,24 @@ QToolButton {
|
|||
|
||||
#TerminalFilerBtn[type="log_critical"]:checked {color: rgb(255, 79, 117);}
|
||||
#TerminalFilerBtn[type="log_critical"] {color: rgba(255, 79, 117, 63);}
|
||||
|
||||
#SuspendLogsBtn {
|
||||
background: #444;
|
||||
border: none;
|
||||
border-top-right-radius: 7px;
|
||||
border-bottom-right-radius: 7px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
font-family: "FontAwesome";
|
||||
font-size: 11pt;
|
||||
color: white;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
#SuspendLogsBtn:hover {
|
||||
background: #333;
|
||||
}
|
||||
|
||||
#SuspendLogsBtn:disabled {
|
||||
background: #4c4c4c;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import os
|
|||
import sys
|
||||
|
||||
from . import compat, control, settings, util, window
|
||||
from .vendor.Qt import QtCore, QtGui, QtWidgets
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from .vendor.Qt import QtCore
|
||||
from Qt import QtCore
|
||||
|
||||
|
||||
def flags(*args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -8,8 +8,9 @@ an active window manager; such as via Travis-CI.
|
|||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import inspect
|
||||
|
||||
from .vendor.Qt import QtCore
|
||||
from Qt import QtCore
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
|
@ -19,10 +20,8 @@ import pyblish.version
|
|||
|
||||
from . import util
|
||||
from .constants import InstanceStates
|
||||
try:
|
||||
from pypeapp.config import get_presets
|
||||
except Exception:
|
||||
get_presets = dict
|
||||
|
||||
from pype.api import config
|
||||
|
||||
|
||||
class IterationBreak(Exception):
|
||||
|
|
@ -62,11 +61,15 @@ class Controller(QtCore.QObject):
|
|||
# store OrderGroups - now it is a singleton
|
||||
order_groups = util.OrderGroups
|
||||
|
||||
# When instance is toggled
|
||||
instance_toggled = QtCore.Signal(object, object, object)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(Controller, self).__init__(parent)
|
||||
self.context = None
|
||||
self.plugins = {}
|
||||
self.optional_default = {}
|
||||
self.instance_toggled.connect(self._on_instance_toggled)
|
||||
|
||||
def reset_variables(self):
|
||||
# Data internal to the GUI itself
|
||||
|
|
@ -83,7 +86,6 @@ class Controller(QtCore.QObject):
|
|||
# - passing collectors order disables plugin/instance toggle
|
||||
self.collectors_order = None
|
||||
self.collect_state = 0
|
||||
self.collected = False
|
||||
|
||||
# - passing validators order disables validate button and gives ability
|
||||
# to know when to stop on validate button press
|
||||
|
|
@ -114,7 +116,7 @@ class Controller(QtCore.QObject):
|
|||
|
||||
def presets_by_hosts(self):
|
||||
# Get global filters as base
|
||||
presets = get_presets().get("plugins", {})
|
||||
presets = config.get_presets().get("plugins", {})
|
||||
if not presets:
|
||||
return {}
|
||||
|
||||
|
|
@ -304,6 +306,11 @@ class Controller(QtCore.QObject):
|
|||
"%s was inactive, skipping.." % instance
|
||||
)
|
||||
continue
|
||||
# Stop if was stopped
|
||||
if self.stopped:
|
||||
self.stopped = False
|
||||
yield IterationBreak("Stopped")
|
||||
|
||||
yield (plugin, instance)
|
||||
else:
|
||||
families = util.collect_families_from_instances(
|
||||
|
|
@ -412,3 +419,19 @@ class Controller(QtCore.QObject):
|
|||
|
||||
for plugin in self.plugins:
|
||||
del(plugin)
|
||||
|
||||
def _on_instance_toggled(self, instance, old_value, new_value):
|
||||
callbacks = pyblish.api.registered_callbacks().get("instanceToggled")
|
||||
if not callbacks:
|
||||
return
|
||||
|
||||
for callback in callbacks:
|
||||
try:
|
||||
callback(instance, old_value, new_value)
|
||||
except Exception:
|
||||
print(
|
||||
"Callback for `instanceToggled` crashed. {}".format(
|
||||
os.path.abspath(inspect.getfile(callback))
|
||||
)
|
||||
)
|
||||
traceback.print_exception(*sys.exc_info())
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import platform
|
||||
|
||||
from .vendor.Qt import QtWidgets, QtGui, QtCore
|
||||
from Qt import QtWidgets, QtGui, QtCore
|
||||
|
||||
from . import model
|
||||
from .awesome import tags as awesome
|
||||
|
|
|
|||
|
|
@ -29,18 +29,14 @@ import pyblish
|
|||
|
||||
from . import settings, util
|
||||
from .awesome import tags as awesome
|
||||
from .vendor import Qt
|
||||
from .vendor.Qt import QtCore, QtGui
|
||||
from .vendor.six import text_type
|
||||
from .vendor.six.moves import queue
|
||||
import Qt
|
||||
from Qt import QtCore, QtGui
|
||||
from six import text_type
|
||||
from .vendor import qtawesome
|
||||
from .constants import PluginStates, InstanceStates, GroupStates, Roles
|
||||
|
||||
try:
|
||||
from pypeapp import config
|
||||
get_presets = config.get_presets
|
||||
except Exception:
|
||||
get_presets = dict
|
||||
from pype.api import config
|
||||
|
||||
|
||||
# ItemTypes
|
||||
InstanceType = QtGui.QStandardItem.UserType
|
||||
|
|
@ -52,6 +48,7 @@ TerminalDetailType = QtGui.QStandardItem.UserType + 4
|
|||
|
||||
class QAwesomeTextIconFactory:
|
||||
icons = {}
|
||||
|
||||
@classmethod
|
||||
def icon(cls, icon_name):
|
||||
if icon_name not in cls.icons:
|
||||
|
|
@ -61,6 +58,7 @@ class QAwesomeTextIconFactory:
|
|||
|
||||
class QAwesomeIconFactory:
|
||||
icons = {}
|
||||
|
||||
@classmethod
|
||||
def icon(cls, icon_name, icon_color):
|
||||
if icon_name not in cls.icons:
|
||||
|
|
@ -106,7 +104,7 @@ class IntentModel(QtGui.QStandardItemModel):
|
|||
self.default_index = 0
|
||||
|
||||
intents_preset = (
|
||||
get_presets()
|
||||
config.get_presets()
|
||||
.get("tools", {})
|
||||
.get("pyblish", {})
|
||||
.get("ui", {})
|
||||
|
|
@ -492,12 +490,8 @@ class PluginModel(QtGui.QStandardItemModel):
|
|||
new_records = result.get("records") or []
|
||||
if not has_warning:
|
||||
for record in new_records:
|
||||
if not hasattr(record, "levelname"):
|
||||
continue
|
||||
|
||||
if str(record.levelname).lower() in [
|
||||
"warning", "critical", "error"
|
||||
]:
|
||||
level_no = record.get("levelno")
|
||||
if level_no and level_no >= 30:
|
||||
new_flag_states[PluginStates.HasWarning] = True
|
||||
break
|
||||
|
||||
|
|
@ -791,12 +785,8 @@ class InstanceModel(QtGui.QStandardItemModel):
|
|||
new_records = result.get("records") or []
|
||||
if not has_warning:
|
||||
for record in new_records:
|
||||
if not hasattr(record, "levelname"):
|
||||
continue
|
||||
|
||||
if str(record.levelname).lower() in [
|
||||
"warning", "critical", "error"
|
||||
]:
|
||||
level_no = record.get("levelno")
|
||||
if level_no and level_no >= 30:
|
||||
new_flag_states[InstanceStates.HasWarning] = True
|
||||
break
|
||||
|
||||
|
|
@ -1012,7 +1002,7 @@ class ArtistProxy(QtCore.QAbstractProxyModel):
|
|||
return QtCore.QModelIndex()
|
||||
|
||||
|
||||
class TerminalModel(QtGui.QStandardItemModel):
|
||||
class TerminalDetailItem(QtGui.QStandardItem):
|
||||
key_label_record_map = (
|
||||
("instance", "Instance"),
|
||||
("msg", "Message"),
|
||||
|
|
@ -1025,6 +1015,57 @@ class TerminalModel(QtGui.QStandardItemModel):
|
|||
("msecs", "Millis")
|
||||
)
|
||||
|
||||
def __init__(self, record_item):
|
||||
self.record_item = record_item
|
||||
self.msg = None
|
||||
msg = record_item.get("msg")
|
||||
if msg is None:
|
||||
msg = record_item["label"].split("\n")[0]
|
||||
|
||||
super(TerminalDetailItem, self).__init__(msg)
|
||||
|
||||
def data(self, role=QtCore.Qt.DisplayRole):
|
||||
if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
|
||||
if self.msg is None:
|
||||
self.msg = self.compute_detail_text(self.record_item)
|
||||
return self.msg
|
||||
return super(TerminalDetailItem, self).data(role)
|
||||
|
||||
def compute_detail_text(self, item_data):
|
||||
if item_data["type"] == "info":
|
||||
return item_data["label"]
|
||||
|
||||
html_text = ""
|
||||
for key, title in self.key_label_record_map:
|
||||
if key not in item_data:
|
||||
continue
|
||||
value = item_data[key]
|
||||
text = (
|
||||
str(value)
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace('\n', '<br/>')
|
||||
.replace(' ', ' ')
|
||||
)
|
||||
|
||||
title_tag = (
|
||||
'<span style=\" font-size:8pt; font-weight:600;'
|
||||
# ' background-color:#bbb; color:#333;\" >{}:</span> '
|
||||
' color:#fff;\" >{}:</span> '
|
||||
).format(title)
|
||||
|
||||
html_text += (
|
||||
'<tr><td width="100%" align=left>{}</td></tr>'
|
||||
'<tr><td width="100%">{}</td></tr>'
|
||||
).format(title_tag, text)
|
||||
|
||||
html_text = '<table width="100%" cellspacing="3">{}</table>'.format(
|
||||
html_text
|
||||
)
|
||||
return html_text
|
||||
|
||||
|
||||
class TerminalModel(QtGui.QStandardItemModel):
|
||||
item_icon_name = {
|
||||
"info": "fa.info",
|
||||
"record": "fa.circle",
|
||||
|
|
@ -1056,38 +1097,38 @@ class TerminalModel(QtGui.QStandardItemModel):
|
|||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.items_to_set_widget = queue.Queue()
|
||||
self.clear()
|
||||
|
||||
def prepare_records(self, result):
|
||||
def prepare_records(self, result, suspend_logs):
|
||||
prepared_records = []
|
||||
instance_name = None
|
||||
instance = result["instance"]
|
||||
if instance is not None:
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
for record in result.get("records") or []:
|
||||
if isinstance(record, dict):
|
||||
record_item = record
|
||||
else:
|
||||
record_item = {
|
||||
"label": text_type(record.msg),
|
||||
"type": "record",
|
||||
"levelno": record.levelno,
|
||||
"threadName": record.threadName,
|
||||
"name": record.name,
|
||||
"filename": record.filename,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"msg": text_type(record.msg),
|
||||
"msecs": record.msecs,
|
||||
"levelname": record.levelname
|
||||
}
|
||||
if not suspend_logs:
|
||||
for record in result.get("records") or []:
|
||||
if isinstance(record, dict):
|
||||
record_item = record
|
||||
else:
|
||||
record_item = {
|
||||
"label": text_type(record.msg),
|
||||
"type": "record",
|
||||
"levelno": record.levelno,
|
||||
"threadName": record.threadName,
|
||||
"name": record.name,
|
||||
"filename": record.filename,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"msg": text_type(record.msg),
|
||||
"msecs": record.msecs,
|
||||
"levelname": record.levelname
|
||||
}
|
||||
|
||||
if instance_name is not None:
|
||||
record_item["instance"] = instance_name
|
||||
if instance_name is not None:
|
||||
record_item["instance"] = instance_name
|
||||
|
||||
prepared_records.append(record_item)
|
||||
prepared_records.append(record_item)
|
||||
|
||||
error = result.get("error")
|
||||
if error:
|
||||
|
|
@ -1143,49 +1184,14 @@ class TerminalModel(QtGui.QStandardItemModel):
|
|||
|
||||
self.appendRow(top_item)
|
||||
|
||||
detail_text = self.prepare_detail_text(record_item)
|
||||
detail_item = QtGui.QStandardItem(detail_text)
|
||||
detail_item = TerminalDetailItem(record_item)
|
||||
detail_item.setData(TerminalDetailType, Roles.TypeRole)
|
||||
top_item.appendRow(detail_item)
|
||||
self.items_to_set_widget.put(detail_item)
|
||||
|
||||
def update_with_result(self, result):
|
||||
for record in result["records"]:
|
||||
self.append(record)
|
||||
|
||||
def prepare_detail_text(self, item_data):
|
||||
if item_data["type"] == "info":
|
||||
return item_data["label"]
|
||||
|
||||
html_text = ""
|
||||
for key, title in self.key_label_record_map:
|
||||
if key not in item_data:
|
||||
continue
|
||||
value = item_data[key]
|
||||
text = (
|
||||
str(value)
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace('\n', '<br/>')
|
||||
.replace(' ', ' ')
|
||||
)
|
||||
|
||||
title_tag = (
|
||||
'<span style=\" font-size:8pt; font-weight:600;'
|
||||
# ' background-color:#bbb; color:#333;\" >{}:</span> '
|
||||
' color:#fff;\" >{}:</span> '
|
||||
).format(title)
|
||||
|
||||
html_text += (
|
||||
'<tr><td width="100%" align=left>{}</td></tr>'
|
||||
'<tr><td width="100%">{}</td></tr>'
|
||||
).format(title_tag, text)
|
||||
|
||||
html_text = '<table width="100%" cellspacing="3">{}</table>'.format(
|
||||
html_text
|
||||
)
|
||||
return html_text
|
||||
|
||||
|
||||
class TerminalProxy(QtCore.QSortFilterProxyModel):
|
||||
filter_buttons_checks = {
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ import numbers
|
|||
import copy
|
||||
import collections
|
||||
|
||||
from .vendor.Qt import QtCore
|
||||
from .vendor.six import text_type
|
||||
from Qt import QtCore
|
||||
from six import text_type
|
||||
import pyblish.api
|
||||
|
||||
root = os.path.dirname(__file__)
|
||||
|
|
|
|||
1827
pype/tools/pyblish_pype/vendor/Qt.py
vendored
1827
pype/tools/pyblish_pype/vendor/Qt.py
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,4 @@
|
|||
from ..Qt import QtCore
|
||||
from Qt import QtCore
|
||||
|
||||
|
||||
class Spin:
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ from __future__ import print_function
|
|||
import json
|
||||
import os
|
||||
|
||||
from .. import six
|
||||
from ..Qt import QtCore, QtGui
|
||||
import six
|
||||
from Qt import QtCore, QtGui
|
||||
|
||||
|
||||
_default_options = {
|
||||
|
|
|
|||
868
pype/tools/pyblish_pype/vendor/six.py
vendored
868
pype/tools/pyblish_pype/vendor/six.py
vendored
|
|
@ -1,868 +0,0 @@
|
|||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.10.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
raise value from from_value
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
|
|
@ -1,6 +1,14 @@
|
|||
from .vendor.Qt import QtCore, QtWidgets
|
||||
from Qt import QtCore, QtWidgets
|
||||
from . import model
|
||||
from .constants import Roles
|
||||
# Imported when used
|
||||
widgets = None
|
||||
|
||||
|
||||
def _import_widgets():
|
||||
global widgets
|
||||
if widgets is None:
|
||||
from . import widgets
|
||||
|
||||
|
||||
class ArtistView(QtWidgets.QListView):
|
||||
|
|
@ -151,6 +159,8 @@ class TerminalView(QtWidgets.QTreeView):
|
|||
|
||||
self.clicked.connect(self.item_expand)
|
||||
|
||||
_import_widgets()
|
||||
|
||||
def event(self, event):
|
||||
if not event.type() == QtCore.QEvent.KeyPress:
|
||||
return super(TerminalView, self).event(event)
|
||||
|
|
@ -190,6 +200,23 @@ class TerminalView(QtWidgets.QTreeView):
|
|||
self.updateGeometry()
|
||||
self.scrollToBottom()
|
||||
|
||||
def expand(self, index):
|
||||
"""Wrapper to set widget for expanded index."""
|
||||
model = index.model()
|
||||
row_count = model.rowCount(index)
|
||||
is_new = False
|
||||
for child_idx in range(row_count):
|
||||
child_index = model.index(child_idx, index.column(), index)
|
||||
widget = self.indexWidget(child_index)
|
||||
if widget is None:
|
||||
is_new = True
|
||||
msg = child_index.data(QtCore.Qt.DisplayRole)
|
||||
widget = widgets.TerminalDetail(msg)
|
||||
self.setIndexWidget(child_index, widget)
|
||||
super(TerminalView, self).expand(index)
|
||||
if is_new:
|
||||
self.updateGeometries()
|
||||
|
||||
def resizeEvent(self, event):
|
||||
super(self.__class__, self).resizeEvent(event)
|
||||
self.model().layoutChanged.emit()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import sys
|
||||
from .vendor.Qt import QtCore, QtWidgets, QtGui
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
from . import model, delegate, view, awesome
|
||||
from .constants import PluginStates, InstanceStates, Roles
|
||||
|
||||
|
|
@ -321,11 +321,6 @@ class PerspectiveWidget(QtWidgets.QWidget):
|
|||
data = {"records": records}
|
||||
self.terminal_model.reset()
|
||||
self.terminal_model.update_with_result(data)
|
||||
while not self.terminal_model.items_to_set_widget.empty():
|
||||
item = self.terminal_model.items_to_set_widget.get()
|
||||
widget = TerminalDetail(item.data(QtCore.Qt.DisplayRole))
|
||||
index = self.terminal_proxy.mapFromSource(item.index())
|
||||
self.terminal_view.setIndexWidget(index, widget)
|
||||
|
||||
self.records.button_toggle_text.setText(
|
||||
"{} ({})".format(self.l_rec, len_records)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ from functools import partial
|
|||
from . import delegate, model, settings, util, view, widgets
|
||||
from .awesome import tags as awesome
|
||||
|
||||
from .vendor.Qt import QtCore, QtGui, QtWidgets
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from .constants import (
|
||||
PluginStates, PluginActionStates, InstanceStates, GroupStates, Roles
|
||||
)
|
||||
|
|
@ -54,6 +54,7 @@ class Window(QtWidgets.QDialog):
|
|||
def __init__(self, controller, parent=None):
|
||||
super(Window, self).__init__(parent=parent)
|
||||
|
||||
self._suspend_logs = False
|
||||
# Use plastique style for specific ocations
|
||||
# TODO set style name via environment variable
|
||||
low_keys = {
|
||||
|
|
@ -95,6 +96,18 @@ class Window(QtWidgets.QDialog):
|
|||
header_tab_terminal = QtWidgets.QRadioButton(header_tab_widget)
|
||||
header_spacer = QtWidgets.QWidget(header_tab_widget)
|
||||
|
||||
button_suspend_logs_widget = QtWidgets.QWidget()
|
||||
button_suspend_logs_widget_layout = QtWidgets.QHBoxLayout(
|
||||
button_suspend_logs_widget
|
||||
)
|
||||
button_suspend_logs_widget_layout.setContentsMargins(0, 10, 0, 10)
|
||||
button_suspend_logs = QtWidgets.QPushButton(header_widget)
|
||||
button_suspend_logs.setFixedWidth(7)
|
||||
button_suspend_logs.setSizePolicy(
|
||||
QtWidgets.QSizePolicy.Preferred,
|
||||
QtWidgets.QSizePolicy.Expanding
|
||||
)
|
||||
button_suspend_logs_widget_layout.addWidget(button_suspend_logs)
|
||||
header_aditional_btns = QtWidgets.QWidget(header_tab_widget)
|
||||
|
||||
aditional_btns_layout = QtWidgets.QHBoxLayout(header_aditional_btns)
|
||||
|
|
@ -109,9 +122,11 @@ class Window(QtWidgets.QDialog):
|
|||
layout_tab.addWidget(header_tab_artist, 0)
|
||||
layout_tab.addWidget(header_tab_overview, 0)
|
||||
layout_tab.addWidget(header_tab_terminal, 0)
|
||||
layout_tab.addWidget(button_suspend_logs_widget, 0)
|
||||
|
||||
# Compress items to the left
|
||||
layout_tab.addWidget(header_spacer, 1)
|
||||
layout_tab.addWidget(header_aditional_btns, 1)
|
||||
layout_tab.addWidget(header_aditional_btns, 0)
|
||||
|
||||
layout = QtWidgets.QHBoxLayout(header_widget)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
|
@ -226,6 +241,10 @@ class Window(QtWidgets.QDialog):
|
|||
|
||||
footer_info = QtWidgets.QLabel(footer_widget)
|
||||
footer_spacer = QtWidgets.QWidget(footer_widget)
|
||||
|
||||
footer_button_stop = QtWidgets.QPushButton(
|
||||
awesome["stop"], footer_widget
|
||||
)
|
||||
footer_button_reset = QtWidgets.QPushButton(
|
||||
awesome["refresh"], footer_widget
|
||||
)
|
||||
|
|
@ -235,14 +254,12 @@ class Window(QtWidgets.QDialog):
|
|||
footer_button_play = QtWidgets.QPushButton(
|
||||
awesome["play"], footer_widget
|
||||
)
|
||||
footer_button_stop = QtWidgets.QPushButton(
|
||||
awesome["stop"], footer_widget
|
||||
)
|
||||
|
||||
layout = QtWidgets.QHBoxLayout()
|
||||
layout.setContentsMargins(5, 5, 5, 5)
|
||||
layout.addWidget(footer_info, 0)
|
||||
layout.addWidget(footer_spacer, 1)
|
||||
|
||||
layout.addWidget(footer_button_stop, 0)
|
||||
layout.addWidget(footer_button_reset, 0)
|
||||
layout.addWidget(footer_button_validate, 0)
|
||||
|
|
@ -342,10 +359,11 @@ class Window(QtWidgets.QDialog):
|
|||
"TerminalView": terminal_view,
|
||||
|
||||
# Buttons
|
||||
"Play": footer_button_play,
|
||||
"Validate": footer_button_validate,
|
||||
"Reset": footer_button_reset,
|
||||
"SuspendLogsBtn": button_suspend_logs,
|
||||
"Stop": footer_button_stop,
|
||||
"Reset": footer_button_reset,
|
||||
"Validate": footer_button_validate,
|
||||
"Play": footer_button_play,
|
||||
|
||||
# Misc
|
||||
"HeaderSpacer": header_spacer,
|
||||
|
|
@ -370,10 +388,11 @@ class Window(QtWidgets.QDialog):
|
|||
overview_page,
|
||||
terminal_page,
|
||||
footer_widget,
|
||||
footer_button_play,
|
||||
footer_button_validate,
|
||||
button_suspend_logs,
|
||||
footer_button_stop,
|
||||
footer_button_reset,
|
||||
footer_button_validate,
|
||||
footer_button_play,
|
||||
footer_spacer,
|
||||
closing_placeholder
|
||||
):
|
||||
|
|
@ -415,10 +434,11 @@ class Window(QtWidgets.QDialog):
|
|||
QtCore.Qt.DirectConnection
|
||||
)
|
||||
|
||||
artist_view.toggled.connect(self.on_item_toggled)
|
||||
overview_instance_view.toggled.connect(self.on_item_toggled)
|
||||
overview_plugin_view.toggled.connect(self.on_item_toggled)
|
||||
artist_view.toggled.connect(self.on_instance_toggle)
|
||||
overview_instance_view.toggled.connect(self.on_instance_toggle)
|
||||
overview_plugin_view.toggled.connect(self.on_plugin_toggle)
|
||||
|
||||
button_suspend_logs.clicked.connect(self.on_suspend_clicked)
|
||||
footer_button_stop.clicked.connect(self.on_stop_clicked)
|
||||
footer_button_reset.clicked.connect(self.on_reset_clicked)
|
||||
footer_button_validate.clicked.connect(self.on_validate_clicked)
|
||||
|
|
@ -442,10 +462,11 @@ class Window(QtWidgets.QDialog):
|
|||
self.terminal_filters_widget = terminal_filters_widget
|
||||
|
||||
self.footer_widget = footer_widget
|
||||
self.button_suspend_logs = button_suspend_logs
|
||||
self.footer_button_stop = footer_button_stop
|
||||
self.footer_button_reset = footer_button_reset
|
||||
self.footer_button_validate = footer_button_validate
|
||||
self.footer_button_play = footer_button_play
|
||||
self.footer_button_stop = footer_button_stop
|
||||
|
||||
self.overview_instance_view = overview_instance_view
|
||||
self.overview_plugin_view = overview_plugin_view
|
||||
|
|
@ -537,7 +558,29 @@ class Window(QtWidgets.QDialog):
|
|||
):
|
||||
instance_item.setData(enable_value, Roles.IsEnabledRole)
|
||||
|
||||
def on_item_toggled(self, index, state=None):
|
||||
def on_instance_toggle(self, index, state=None):
|
||||
"""An item is requesting to be toggled"""
|
||||
if not index.data(Roles.IsOptionalRole):
|
||||
return self.info("This item is mandatory")
|
||||
|
||||
if self.controller.collect_state != 1:
|
||||
return self.info("Cannot toggle")
|
||||
|
||||
current_state = index.data(QtCore.Qt.CheckStateRole)
|
||||
if state is None:
|
||||
state = not current_state
|
||||
|
||||
instance_id = index.data(Roles.ObjectIdRole)
|
||||
instance_item = self.instance_model.instance_items[instance_id]
|
||||
instance_item.setData(state, QtCore.Qt.CheckStateRole)
|
||||
|
||||
self.controller.instance_toggled.emit(
|
||||
instance_item.instance, current_state, state
|
||||
)
|
||||
|
||||
self.update_compatibility()
|
||||
|
||||
def on_plugin_toggle(self, index, state=None):
|
||||
"""An item is requesting to be toggled"""
|
||||
if not index.data(Roles.IsOptionalRole):
|
||||
return self.info("This item is mandatory")
|
||||
|
|
@ -548,7 +591,10 @@ class Window(QtWidgets.QDialog):
|
|||
if state is None:
|
||||
state = not index.data(QtCore.Qt.CheckStateRole)
|
||||
|
||||
index.model().setData(index, state, QtCore.Qt.CheckStateRole)
|
||||
plugin_id = index.data(Roles.ObjectIdRole)
|
||||
plugin_item = self.plugin_model.plugin_items[plugin_id]
|
||||
plugin_item.setData(state, QtCore.Qt.CheckStateRole)
|
||||
|
||||
self.update_compatibility()
|
||||
|
||||
def on_tab_changed(self, target):
|
||||
|
|
@ -587,6 +633,13 @@ class Window(QtWidgets.QDialog):
|
|||
self.footer_button_play.setEnabled(False)
|
||||
self.footer_button_stop.setEnabled(False)
|
||||
|
||||
def on_suspend_clicked(self):
|
||||
self._suspend_logs = not self._suspend_logs
|
||||
if self.state["current_page"] == "terminal":
|
||||
self.on_tab_changed("overview")
|
||||
|
||||
self.tabs["terminal"].setVisible(not self._suspend_logs)
|
||||
|
||||
def on_comment_entered(self):
|
||||
"""The user has typed a comment."""
|
||||
self.controller.context.data["comment"] = self.comment_box.text()
|
||||
|
|
@ -701,14 +754,14 @@ class Window(QtWidgets.QDialog):
|
|||
self.on_tab_changed(self.state["current_page"])
|
||||
self.update_compatibility()
|
||||
|
||||
self.footer_button_validate.setEnabled(True)
|
||||
self.footer_button_reset.setEnabled(True)
|
||||
self.footer_button_stop.setEnabled(False)
|
||||
self.footer_button_play.setEnabled(True)
|
||||
self.footer_button_play.setFocus()
|
||||
self.button_suspend_logs.setEnabled(False)
|
||||
|
||||
self.footer_button_validate.setEnabled(False)
|
||||
self.footer_button_reset.setEnabled(False)
|
||||
self.footer_button_stop.setEnabled(True)
|
||||
self.footer_button_play.setEnabled(False)
|
||||
|
||||
def on_passed_group(self, order):
|
||||
|
||||
for group_item in self.instance_model.group_items.values():
|
||||
if self.overview_instance_view.isExpanded(group_item.index()):
|
||||
continue
|
||||
|
|
@ -740,16 +793,28 @@ class Window(QtWidgets.QDialog):
|
|||
|
||||
def on_was_stopped(self):
|
||||
errored = self.controller.errored
|
||||
self.footer_button_play.setEnabled(not errored)
|
||||
self.footer_button_validate.setEnabled(
|
||||
not errored and not self.controller.validated
|
||||
)
|
||||
if self.controller.collect_state == 0:
|
||||
self.footer_button_play.setEnabled(False)
|
||||
self.footer_button_validate.setEnabled(False)
|
||||
else:
|
||||
self.footer_button_play.setEnabled(not errored)
|
||||
self.footer_button_validate.setEnabled(
|
||||
not errored and not self.controller.validated
|
||||
)
|
||||
self.footer_button_play.setFocus()
|
||||
|
||||
self.footer_button_reset.setEnabled(True)
|
||||
self.footer_button_stop.setEnabled(False)
|
||||
if errored:
|
||||
self.footer_widget.setProperty("success", 0)
|
||||
self.footer_widget.style().polish(self.footer_widget)
|
||||
|
||||
suspend_log_bool = (
|
||||
self.controller.collect_state == 1
|
||||
and not self.controller.stopped
|
||||
)
|
||||
self.button_suspend_logs.setEnabled(suspend_log_bool)
|
||||
|
||||
def on_was_skipped(self, plugin):
|
||||
plugin_item = self.plugin_model.plugin_items[plugin.id]
|
||||
plugin_item.setData(
|
||||
|
|
@ -809,17 +874,15 @@ class Window(QtWidgets.QDialog):
|
|||
if self.tabs["artist"].isChecked():
|
||||
self.tabs["overview"].toggle()
|
||||
|
||||
result["records"] = self.terminal_model.prepare_records(result)
|
||||
result["records"] = self.terminal_model.prepare_records(
|
||||
result,
|
||||
self._suspend_logs
|
||||
)
|
||||
|
||||
plugin_item = self.plugin_model.update_with_result(result)
|
||||
instance_item = self.instance_model.update_with_result(result)
|
||||
|
||||
self.terminal_model.update_with_result(result)
|
||||
while not self.terminal_model.items_to_set_widget.empty():
|
||||
item = self.terminal_model.items_to_set_widget.get()
|
||||
widget = widgets.TerminalDetail(item.data(QtCore.Qt.DisplayRole))
|
||||
index = self.terminal_proxy.mapFromSource(item.index())
|
||||
self.terminal_view.setIndexWidget(index, widget)
|
||||
|
||||
self.update_compatibility()
|
||||
|
||||
|
|
@ -872,16 +935,19 @@ class Window(QtWidgets.QDialog):
|
|||
self.footer_button_validate.setEnabled(False)
|
||||
self.footer_button_play.setEnabled(False)
|
||||
|
||||
self.button_suspend_logs.setEnabled(False)
|
||||
|
||||
util.defer(5, self.controller.validate)
|
||||
|
||||
def publish(self):
|
||||
self.info(self.tr("Preparing publish.."))
|
||||
|
||||
self.footer_button_stop.setEnabled(True)
|
||||
self.footer_button_reset.setEnabled(False)
|
||||
self.footer_button_validate.setEnabled(False)
|
||||
self.footer_button_play.setEnabled(False)
|
||||
|
||||
self.button_suspend_logs.setEnabled(False)
|
||||
|
||||
util.defer(5, self.controller.publish)
|
||||
|
||||
def act(self, plugin_item, action):
|
||||
|
|
@ -913,29 +979,24 @@ class Window(QtWidgets.QDialog):
|
|||
plugin_item = self.plugin_model.plugin_items[result["plugin"].id]
|
||||
action_state = plugin_item.data(Roles.PluginActionProgressRole)
|
||||
action_state |= PluginActionStates.HasFinished
|
||||
result["records"] = self.terminal_model.prepare_records(
|
||||
result,
|
||||
self._suspend_logs
|
||||
)
|
||||
|
||||
error = result.get("error")
|
||||
if error:
|
||||
records = result.get("records") or []
|
||||
if result.get("error"):
|
||||
action_state |= PluginActionStates.HasFailed
|
||||
fname, line_no, func, exc = error.traceback
|
||||
|
||||
records.append({
|
||||
"label": str(error),
|
||||
"type": "error",
|
||||
"filename": str(fname),
|
||||
"lineno": str(line_no),
|
||||
"func": str(func),
|
||||
"traceback": error.formatted_traceback
|
||||
})
|
||||
|
||||
result["records"] = records
|
||||
|
||||
plugin_item.setData(action_state, Roles.PluginActionProgressRole)
|
||||
|
||||
self.plugin_model.update_with_result(result)
|
||||
self.instance_model.update_with_result(result)
|
||||
self.terminal_model.update_with_result(result)
|
||||
plugin_item = self.plugin_model.update_with_result(result)
|
||||
instance_item = self.instance_model.update_with_result(result)
|
||||
|
||||
if self.perspective_widget.isVisible():
|
||||
self.perspective_widget.update_context(
|
||||
plugin_item, instance_item
|
||||
)
|
||||
|
||||
def closeEvent(self, event):
|
||||
"""Perform post-flight checks before closing
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "2.9.1"
|
||||
__version__ = "2.10.0"
|
||||
|
|
|
|||
BIN
res/app_icons/celaction_local.png
Normal file
BIN
res/app_icons/celaction_local.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
BIN
res/app_icons/celaction_remotel.png
Normal file
BIN
res/app_icons/celaction_remotel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 36 KiB |
|
|
@ -56,13 +56,6 @@
|
|||
"pattern": "^\\w*$",
|
||||
"example": "maya2016"
|
||||
},
|
||||
"AVALON_MONGO": {
|
||||
"description": "Address to the asset database",
|
||||
"type": "string",
|
||||
"pattern": "^mongodb://[\\w/@:.]*$",
|
||||
"example": "mongodb://localhost:27017",
|
||||
"default": "mongodb://localhost:27017"
|
||||
},
|
||||
"AVALON_DB": {
|
||||
"description": "Name of database",
|
||||
"type": "string",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue