mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into 3.0/anatomy_load_from_settings
This commit is contained in:
commit
c44a16d142
94 changed files with 1939 additions and 2346 deletions
|
|
@ -81,6 +81,26 @@ def validate_path_string(path: str) -> (bool, str):
|
|||
return False, "Not implemented yet"
|
||||
|
||||
|
||||
def add_acre_to_sys_path():
|
||||
"""Add full path of acre module to sys.path on ignitation."""
|
||||
try:
|
||||
# Skip if is possible to import
|
||||
import acre
|
||||
|
||||
except ImportError:
|
||||
# Full path to acred repository related to current file
|
||||
acre_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
"repos",
|
||||
"acre"
|
||||
)
|
||||
# Add path to sys.path
|
||||
sys.path.append(acre_dir)
|
||||
|
||||
# Validate that acre can be imported
|
||||
import acre
|
||||
|
||||
|
||||
def load_environments(sections: list = None) -> dict:
|
||||
"""Load environments from Pype.
|
||||
|
||||
|
|
@ -94,11 +114,9 @@ def load_environments(sections: list = None) -> dict:
|
|||
dict of str: loaded and processed environments.
|
||||
|
||||
"""
|
||||
try:
|
||||
import acre
|
||||
except ImportError:
|
||||
sys.path.append("repos/acre")
|
||||
import acre
|
||||
add_acre_to_sys_path()
|
||||
import acre
|
||||
|
||||
from pype import settings
|
||||
|
||||
all_env = settings.get_environments()
|
||||
|
|
|
|||
10
pype.py
10
pype.py
|
|
@ -44,15 +44,13 @@ import re
|
|||
import sys
|
||||
import traceback
|
||||
|
||||
from igniter.tools import load_environments
|
||||
from igniter.tools import load_environments, add_acre_to_sys_path
|
||||
|
||||
try:
|
||||
import acre
|
||||
except ImportError:
|
||||
sys.path.append("repos/acre")
|
||||
import acre
|
||||
from igniter import BootstrapRepos
|
||||
|
||||
add_acre_to_sys_path()
|
||||
import acre
|
||||
|
||||
|
||||
def set_environments() -> None:
|
||||
"""Set loaded environments.
|
||||
|
|
|
|||
|
|
@ -1,208 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import winreg
|
||||
import shutil
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import (
|
||||
Anatomy,
|
||||
Logger,
|
||||
get_last_version_from_path
|
||||
)
|
||||
|
||||
from avalon import io, api, lib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CelactionPrelaunchHook(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Unreal
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Unreal launcher
|
||||
shell script.
|
||||
"""
|
||||
workfile_ext = "scn"
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initialize
|
||||
self._S = api.Session
|
||||
|
||||
# get publish version of celaction
|
||||
app = "celaction_publish"
|
||||
|
||||
# get context variables
|
||||
project = self._S["AVALON_PROJECT"] = env["AVALON_PROJECT"]
|
||||
asset = self._S["AVALON_ASSET"] = env["AVALON_ASSET"]
|
||||
task = self._S["AVALON_TASK"] = env["AVALON_TASK"]
|
||||
workdir = self._S["AVALON_WORKDIR"] = env["AVALON_WORKDIR"]
|
||||
|
||||
# get workfile path
|
||||
anatomy_filled = self.get_anatomy_filled()
|
||||
workfile = anatomy_filled["work"]["file"]
|
||||
version = anatomy_filled["version"]
|
||||
|
||||
# create workdir if doesn't exist
|
||||
os.makedirs(workdir, exist_ok=True)
|
||||
self.log.info(f"Work dir is: `{workdir}`")
|
||||
|
||||
# get last version of workfile
|
||||
workfile_last = env.get("AVALON_LAST_WORKFILE")
|
||||
self.log.debug(f"_ workfile_last: `{workfile_last}`")
|
||||
|
||||
if workfile_last:
|
||||
workfile = workfile_last
|
||||
|
||||
workfile_path = os.path.join(workdir, workfile)
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
if not os.path.isfile(workfile_path):
|
||||
# try to get path from environment or use default
|
||||
# from `pype.celation` dir
|
||||
template_path = env.get("CELACTION_TEMPLATE") or os.path.join(
|
||||
env.get("PYPE_MODULE_ROOT"),
|
||||
"pype/hosts/celaction/celaction_template_scene.scn"
|
||||
)
|
||||
self.log.info(
|
||||
f"Creating workfile from template: `{template_path}`")
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(workfile_path)
|
||||
)
|
||||
|
||||
self.log.info(f"Workfile to open: `{workfile_path}`")
|
||||
|
||||
# adding compulsory environment var for openting file
|
||||
env["PYPE_CELACTION_PROJECT_FILE"] = workfile_path
|
||||
|
||||
# setting output parameters
|
||||
path = r"Software\CelAction\CelAction2D\User Settings"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
"Software\\CelAction\\CelAction2D\\User Settings", 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
|
||||
# TODO: change to root path and pyblish standalone to premiere way
|
||||
pype_root_path = os.getenv("PYPE_SETUP_PATH")
|
||||
path = os.path.join(pype_root_path,
|
||||
"pype.bat")
|
||||
|
||||
winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, path)
|
||||
|
||||
parameters = [
|
||||
"launch",
|
||||
f"--app {app}",
|
||||
f"--project {project}",
|
||||
f"--asset {asset}",
|
||||
f"--task {task}",
|
||||
"--currentFile \\\"\"*SCENE*\"\\\"",
|
||||
"--chunk 10",
|
||||
"--frameStart *START*",
|
||||
"--frameEnd *END*",
|
||||
"--resolutionWidth *X*",
|
||||
"--resolutionHeight *Y*",
|
||||
# "--programDir \"'*PROGPATH*'\""
|
||||
]
|
||||
winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
|
||||
" ".join(parameters))
|
||||
|
||||
# setting resolution parameters
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
|
||||
path += r"\SubmitOutput"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
|
||||
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
|
||||
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
|
||||
|
||||
# making sure message dialogs don't appear when overwriting
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
||||
path += r"\OverwriteScene"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
|
||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||
|
||||
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
|
||||
path += r"\SceneSaved"
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
|
||||
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
|
||||
winreg.KEY_ALL_ACCESS)
|
||||
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
|
||||
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
|
||||
|
||||
return True
|
||||
|
||||
def get_anatomy_filled(self):
|
||||
root_path = api.registered_root()
|
||||
project_name = self._S["AVALON_PROJECT"]
|
||||
asset_name = self._S["AVALON_ASSET"]
|
||||
|
||||
io.install()
|
||||
project_entity = io.find_one({
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
})
|
||||
assert project_entity, (
|
||||
"Project '{0}' was not found."
|
||||
).format(project_name)
|
||||
log.debug("Collected Project \"{}\"".format(project_entity))
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
assert asset_entity, (
|
||||
"No asset found by the name '{0}' in project '{1}'"
|
||||
).format(asset_name, project_name)
|
||||
|
||||
project_name = project_entity["name"]
|
||||
|
||||
log.info(
|
||||
"Anatomy object collected for project \"{}\".".format(project_name)
|
||||
)
|
||||
|
||||
hierarchy_items = asset_entity["data"]["parents"]
|
||||
hierarchy = ""
|
||||
if hierarchy_items:
|
||||
hierarchy = os.path.join(*hierarchy_items)
|
||||
|
||||
template_data = {
|
||||
"root": root_path,
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": project_entity["data"].get("code")
|
||||
},
|
||||
"asset": asset_entity["name"],
|
||||
"hierarchy": hierarchy.replace("\\", "/"),
|
||||
"task": self._S["AVALON_TASK"],
|
||||
"ext": self.workfile_ext,
|
||||
"version": 1,
|
||||
"username": os.getenv("PYPE_USERNAME", "").strip()
|
||||
}
|
||||
|
||||
avalon_app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if avalon_app_name:
|
||||
application_def = lib.get_application(avalon_app_name)
|
||||
app_dir = application_def.get("application_dir")
|
||||
if app_dir:
|
||||
template_data["app"] = app_dir
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy_filled = anatomy.format_all(template_data).get_solved()
|
||||
|
||||
return anatomy_filled
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import Logger
|
||||
from pype.hosts.fusion import utils
|
||||
|
||||
|
||||
class FusionPrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Fusion
|
||||
project inside.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# making sure pyton 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(env.get("PYTHON36", ""))
|
||||
assert os.path.isdir(py36_dir), (
|
||||
"Python 3.6 is not installed at the provided folder path. Either "
|
||||
"make sure the `environments\resolve.json` is having correctly "
|
||||
"set `PYTHON36` or make sure Python 3.6 is installed "
|
||||
f"in given path. \nPYTHON36E: `{py36_dir}`"
|
||||
)
|
||||
self.log.info(f"Path to Fusion Python folder: `{py36_dir}`...")
|
||||
env["PYTHON36"] = py36_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(env.get("FUSION_UTILITY_SCRIPTS_DIR", ""))
|
||||
assert os.path.isdir(us_dir), (
|
||||
"Fusion utility script dir does not exists. Either make sure "
|
||||
"the `environments\fusion.json` is having correctly set "
|
||||
"`FUSION_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
|
||||
f"FUSION_UTILITY_SCRIPTS_DIR: `{us_dir}`"
|
||||
)
|
||||
|
||||
try:
|
||||
__import__("avalon.fusion")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
utils.setup(env)
|
||||
|
||||
return True
|
||||
29
pype/hooks/global/pre_add_last_workfile_arg.py
Normal file
29
pype/hooks/global/pre_add_last_workfile_arg.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
"""Add last workfile path to launch arguments.
|
||||
|
||||
This is not possible to do for all applications the same way.
|
||||
"""
|
||||
|
||||
order = 0
|
||||
app_groups = ["maya", "nuke", "nukex", "hiero", "nukestudio"]
|
||||
|
||||
def execute(self):
|
||||
if not self.data.get("start_last_workfile"):
|
||||
self.log.info("It is set to not start last workfile on start.")
|
||||
return
|
||||
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if not last_workfile:
|
||||
self.log.warning("Last workfile was not collected.")
|
||||
return
|
||||
|
||||
if not os.path.exists(last_workfile):
|
||||
self.log.info("Current context does not have any workfile yet.")
|
||||
return
|
||||
|
||||
# Add path to workfile to arguments
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
24
pype/hooks/global/pre_with_windows_shell.py
Normal file
24
pype/hooks/global/pre_with_windows_shell.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class LaunchWithWindowsShell(PreLaunchHook):
|
||||
"""Add shell command before executable.
|
||||
|
||||
Some hosts have issues when are launched directly from python in that case
|
||||
it is possible to prepend shell executable which will trigger process
|
||||
instead.
|
||||
"""
|
||||
|
||||
order = 10
|
||||
app_groups = ["nuke", "nukex", "hiero", "nukestudio"]
|
||||
platforms = ["windows"]
|
||||
|
||||
def execute(self):
|
||||
# Get comspec which is cmd.exe in most cases.
|
||||
comspec = os.environ.get("COMSPEC", "cmd.exe")
|
||||
# Add comspec to arguments list and add "/k"
|
||||
new_args = [comspec, "/c"]
|
||||
new_args.extend(self.launch_context.launch_args)
|
||||
# Replace launch args with new one
|
||||
self.launch_context.launch_args = new_args
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class HieroLaunchArguments(PreLaunchHook):
|
||||
order = 0
|
||||
app_groups = ["hiero"]
|
||||
|
||||
def execute(self):
|
||||
"""Prepare suprocess launch arguments for Hiero."""
|
||||
# Add path to workfile to arguments
|
||||
if self.data.get("start_last_workfile"):
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if os.path.exists(last_workfile):
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class MayaLaunchArguments(PreLaunchHook):
|
||||
"""Add path to last workfile to launch arguments."""
|
||||
order = 0
|
||||
app_groups = ["maya"]
|
||||
|
||||
def execute(self):
|
||||
"""Prepare suprocess launch arguments for Maya."""
|
||||
# Add path to workfile to arguments
|
||||
if self.data.get("start_last_workfile"):
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if os.path.exists(last_workfile):
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class NukeStudioLaunchArguments(PreLaunchHook):
|
||||
order = 0
|
||||
app_groups = ["nukestudio"]
|
||||
|
||||
def execute(self):
|
||||
"""Prepare suprocess launch arguments for NukeStudio."""
|
||||
# Add path to workfile to arguments
|
||||
if self.data.get("start_last_workfile"):
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if os.path.exists(last_workfile):
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import os
|
||||
from pype.lib import PreLaunchHook
|
||||
|
||||
|
||||
class NukeXLaunchArguments(PreLaunchHook):
|
||||
order = 0
|
||||
app_groups = ["nukex"]
|
||||
|
||||
def execute(self):
|
||||
"""Prepare suprocess launch arguments for NukeX."""
|
||||
# Add path to workfile to arguments
|
||||
if self.data.get("start_last_workfile"):
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if os.path.exists(last_workfile):
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
import pype.lib
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class PhotoshopPrelaunch(pype.lib.PypeHook):
|
||||
"""This hook will check for the existence of PyWin
|
||||
|
||||
PyWin is a requirement for the Photoshop integration.
|
||||
"""
|
||||
project_code = None
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
output = pype.lib._subprocess(["pip", "install", "pywin32==227"])
|
||||
self.log.info(output)
|
||||
return True
|
||||
|
|
@ -1,161 +0,0 @@
|
|||
import os
|
||||
import traceback
|
||||
import winreg
|
||||
from avalon import api, io, lib
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import Logger, Anatomy
|
||||
from pype.hosts.premiere import lib as prlib
|
||||
|
||||
|
||||
class PremierePrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Adobe Premiere
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Premiere launcher
|
||||
shell script.
|
||||
"""
|
||||
project_code = None
|
||||
reg_string_value = [{
|
||||
"path": r"Software\Adobe\CSXS.9",
|
||||
"name": "PlayerDebugMode",
|
||||
"type": winreg.REG_SZ,
|
||||
"value": "1"
|
||||
}]
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initialize
|
||||
self._S = api.Session
|
||||
|
||||
# get context variables
|
||||
self._S["AVALON_PROJECT"] = env["AVALON_PROJECT"]
|
||||
self._S["AVALON_ASSET"] = env["AVALON_ASSET"]
|
||||
task = self._S["AVALON_TASK"] = env["AVALON_TASK"]
|
||||
|
||||
# get workfile path
|
||||
anatomy_filled = self.get_anatomy_filled()
|
||||
|
||||
# if anatomy template should have different root for particular task
|
||||
# just add for example > work[conforming]:
|
||||
workfile_search_key = f"work[{task.lower()}]"
|
||||
workfile_key = anatomy_filled.get(
|
||||
workfile_search_key,
|
||||
anatomy_filled.get("work")
|
||||
)
|
||||
workdir = env["AVALON_WORKDIR"] = workfile_key["folder"]
|
||||
|
||||
# create workdir if doesn't exist
|
||||
os.makedirs(workdir, exist_ok=True)
|
||||
self.log.info(f"Work dir is: `{workdir}`")
|
||||
|
||||
# adding project code to env
|
||||
env["AVALON_PROJECT_CODE"] = self.project_code
|
||||
|
||||
# add keys to registry
|
||||
self.modify_registry()
|
||||
|
||||
# start avalon
|
||||
try:
|
||||
__import__("pype.hosts.premiere")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Premiere Setup integration
|
||||
prlib.setup(env)
|
||||
|
||||
return True
|
||||
|
||||
def modify_registry(self):
|
||||
# adding key to registry
|
||||
for key in self.reg_string_value:
|
||||
winreg.CreateKey(winreg.HKEY_CURRENT_USER, key["path"])
|
||||
rg_key = winreg.OpenKey(
|
||||
key=winreg.HKEY_CURRENT_USER,
|
||||
sub_key=key["path"],
|
||||
reserved=0,
|
||||
access=winreg.KEY_ALL_ACCESS)
|
||||
|
||||
winreg.SetValueEx(
|
||||
rg_key,
|
||||
key["name"],
|
||||
0,
|
||||
key["type"],
|
||||
key["value"]
|
||||
)
|
||||
|
||||
def get_anatomy_filled(self):
|
||||
root_path = api.registered_root()
|
||||
project_name = self._S["AVALON_PROJECT"]
|
||||
asset_name = self._S["AVALON_ASSET"]
|
||||
|
||||
io.install()
|
||||
project_entity = io.find_one({
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
})
|
||||
assert project_entity, (
|
||||
"Project '{0}' was not found."
|
||||
).format(project_name)
|
||||
self.log.debug("Collected Project \"{}\"".format(project_entity))
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
assert asset_entity, (
|
||||
"No asset found by the name '{0}' in project '{1}'"
|
||||
).format(asset_name, project_name)
|
||||
|
||||
project_name = project_entity["name"]
|
||||
self.project_code = project_entity["data"].get("code")
|
||||
|
||||
self.log.info(
|
||||
"Anatomy object collected for project \"{}\".".format(project_name)
|
||||
)
|
||||
|
||||
hierarchy_items = asset_entity["data"]["parents"]
|
||||
hierarchy = ""
|
||||
if hierarchy_items:
|
||||
hierarchy = os.path.join(*hierarchy_items)
|
||||
|
||||
template_data = {
|
||||
"root": root_path,
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": self.project_code
|
||||
},
|
||||
"asset": asset_entity["name"],
|
||||
"hierarchy": hierarchy.replace("\\", "/"),
|
||||
"task": self._S["AVALON_TASK"],
|
||||
"ext": "ppro",
|
||||
"version": 1,
|
||||
"username": os.getenv("PYPE_USERNAME", "").strip()
|
||||
}
|
||||
|
||||
avalon_app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if avalon_app_name:
|
||||
application_def = lib.get_application(avalon_app_name)
|
||||
app_dir = application_def.get("application_dir")
|
||||
if app_dir:
|
||||
template_data["app"] = app_dir
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
anatomy_filled = anatomy.format_all(template_data).get_solved()
|
||||
|
||||
return anatomy_filled
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import Logger
|
||||
from pype.hosts.resolve import utils
|
||||
|
||||
|
||||
class ResolvePrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Resolve
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Premiere launcher
|
||||
shell script.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# making sure pyton 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(env.get("PYTHON36_RESOLVE", ""))
|
||||
assert os.path.isdir(py36_dir), (
|
||||
"Python 3.6 is not installed at the provided folder path. Either "
|
||||
"make sure the `environments\resolve.json` is having correctly "
|
||||
"set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed "
|
||||
f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`"
|
||||
)
|
||||
self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...")
|
||||
env["PYTHON36_RESOLVE"] = py36_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(env.get("RESOLVE_UTILITY_SCRIPTS_DIR", ""))
|
||||
assert os.path.isdir(us_dir), (
|
||||
"Resolve utility script dir does not exists. Either make sure "
|
||||
"the `environments\resolve.json` is having correctly set "
|
||||
"`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
|
||||
f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`"
|
||||
)
|
||||
self.log.debug(f"-- us_dir: `{us_dir}`")
|
||||
|
||||
# correctly format path for pre python script
|
||||
pre_py_sc = os.path.normpath(env.get("PRE_PYTHON_SCRIPT", ""))
|
||||
env["PRE_PYTHON_SCRIPT"] = pre_py_sc
|
||||
self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...")
|
||||
try:
|
||||
__import__("pype.hosts.resolve")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
self.log.debug(f"-- utils.__file__: `{utils.__file__}`")
|
||||
utils.setup(env)
|
||||
|
||||
return True
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
import platform
|
||||
import pype.lib
|
||||
from pype.api import Anatomy, Logger
|
||||
import getpass
|
||||
import avalon.api
|
||||
|
||||
|
||||
class TvpaintPrelaunchHook(pype.lib.PypeHook):
|
||||
"""
|
||||
Workfile preparation hook
|
||||
"""
|
||||
host_name = "tvpaint"
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def install_pywin(self):
|
||||
if platform.system().lower() != "windows":
|
||||
return
|
||||
|
||||
try:
|
||||
from win32com.shell import shell
|
||||
except Exception:
|
||||
output = pype.lib._subprocess(["pip", "install", "pywin32==227"])
|
||||
self.log.info(output)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
self.install_pywin()
|
||||
|
||||
# get context variables
|
||||
project_name = env["AVALON_PROJECT"]
|
||||
asset_name = env["AVALON_ASSET"]
|
||||
task_name = env["AVALON_TASK"]
|
||||
workdir = env["AVALON_WORKDIR"]
|
||||
extension = avalon.api.HOST_WORKFILE_EXTENSIONS[self.host_name][0]
|
||||
|
||||
# get workfile path
|
||||
workfile_path = self.get_anatomy_filled(
|
||||
workdir, project_name, asset_name, task_name)
|
||||
|
||||
# create workdir if doesn't exist
|
||||
os.makedirs(workdir, exist_ok=True)
|
||||
self.log.info(f"Work dir is: `{workdir}`")
|
||||
|
||||
# get last version of workfile
|
||||
workfile_last = env.get("AVALON_LAST_WORKFILE")
|
||||
self.log.debug(f"_ workfile_last: `{workfile_last}`")
|
||||
|
||||
if workfile_last:
|
||||
workfile = workfile_last
|
||||
workfile_path = os.path.join(workdir, workfile)
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
if not os.path.isfile(workfile_path):
|
||||
# try to get path from environment or use default
|
||||
# from `pype.hosts.tvpaint` dir
|
||||
template_path = env.get("TVPAINT_TEMPLATE") or os.path.join(
|
||||
env.get("PYPE_MODULE_ROOT"),
|
||||
"pype/hosts/tvpaint/template.tvpp"
|
||||
)
|
||||
|
||||
# try to get template from project config folder
|
||||
proj_config_path = os.path.join(
|
||||
env["PYPE_PROJECT_CONFIGS"], project_name)
|
||||
if os.path.exists(proj_config_path):
|
||||
|
||||
template_file = None
|
||||
for f in os.listdir(proj_config_path):
|
||||
if extension in os.path.splitext(f):
|
||||
template_file = f
|
||||
|
||||
if template_file:
|
||||
template_path = os.path.join(
|
||||
proj_config_path, template_file)
|
||||
self.log.info(
|
||||
f"Creating workfile from template: `{template_path}`")
|
||||
|
||||
# copy template to new destinantion
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(workfile_path)
|
||||
)
|
||||
|
||||
self.log.info(f"Workfile to open: `{workfile_path}`")
|
||||
|
||||
# adding compulsory environment var for openting file
|
||||
env["PYPE_TVPAINT_PROJECT_FILE"] = workfile_path
|
||||
|
||||
return True
|
||||
|
||||
def get_anatomy_filled(self, workdir, project_name, asset_name, task_name):
|
||||
dbcon = avalon.api.AvalonMongoDB()
|
||||
dbcon.install()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
project_document = dbcon.find_one({"type": "project"})
|
||||
asset_document = dbcon.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
dbcon.uninstall()
|
||||
|
||||
asset_doc_parents = asset_document["data"].get("parents")
|
||||
hierarchy = "/".join(asset_doc_parents)
|
||||
|
||||
data = {
|
||||
"project": {
|
||||
"name": project_document["name"],
|
||||
"code": project_document["data"].get("code")
|
||||
},
|
||||
"task": task_name,
|
||||
"asset": asset_name,
|
||||
"app": self.host_name,
|
||||
"hierarchy": hierarchy
|
||||
}
|
||||
anatomy = Anatomy(project_name)
|
||||
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS[self.host_name]
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
data.update({
|
||||
"version": 1,
|
||||
"user": os.environ.get("PYPE_USERNAME") or getpass.getuser(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
return avalon.api.last_workfile(
|
||||
workdir, file_template, data, extensions, True
|
||||
)
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from pype.lib import PypeHook
|
||||
from pype.hosts.unreal import lib as unreal_lib
|
||||
from pype.api import Logger
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UnrealPrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Unreal
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Unreal launcher
|
||||
shell script.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
asset = env["AVALON_ASSET"]
|
||||
task = env["AVALON_TASK"]
|
||||
workdir = env["AVALON_WORKDIR"]
|
||||
engine_version = env["AVALON_APP_NAME"].split("_")[-1]
|
||||
project_name = f"{asset}_{task}"
|
||||
|
||||
# Unreal is sensitive about project names longer then 20 chars
|
||||
if len(project_name) > 20:
|
||||
self.log.warning((f"Project name exceed 20 characters "
|
||||
f"({project_name})!"))
|
||||
|
||||
# Unreal doesn't accept non alphabet characters at the start
|
||||
# of the project name. This is because project name is then used
|
||||
# in various places inside c++ code and there variable names cannot
|
||||
# start with non-alpha. We append 'P' before project name to solve it.
|
||||
# 😱
|
||||
if not project_name[:1].isalpha():
|
||||
self.log.warning(f"Project name doesn't start with alphabet "
|
||||
f"character ({project_name}). Appending 'P'")
|
||||
project_name = f"P{project_name}"
|
||||
|
||||
project_path = os.path.join(workdir, project_name)
|
||||
|
||||
self.log.info((f"{self.signature} requested UE4 version: "
|
||||
f"[ {engine_version} ]"))
|
||||
|
||||
detected = unreal_lib.get_engine_versions()
|
||||
detected_str = ', '.join(detected.keys()) or 'none'
|
||||
self.log.info((f"{self.signature} detected UE4 versions: "
|
||||
f"[ {detected_str} ]"))
|
||||
del(detected_str)
|
||||
engine_version = ".".join(engine_version.split(".")[:2])
|
||||
if engine_version not in detected.keys():
|
||||
self.log.error((f"{self.signature} requested version not "
|
||||
f"detected [ {engine_version} ]"))
|
||||
return False
|
||||
|
||||
os.makedirs(project_path, exist_ok=True)
|
||||
|
||||
project_file = os.path.join(project_path, f"{project_name}.uproject")
|
||||
engine_path = detected[engine_version]
|
||||
if not os.path.isfile(project_file):
|
||||
self.log.info((f"{self.signature} creating unreal "
|
||||
f"project [ {project_name} ]"))
|
||||
if env.get("AVALON_UNREAL_PLUGIN"):
|
||||
os.environ["AVALON_UNREAL_PLUGIN"] = env.get("AVALON_UNREAL_PLUGIN") # noqa: E501
|
||||
unreal_lib.create_unreal_project(project_name,
|
||||
engine_version,
|
||||
project_path,
|
||||
engine_path=engine_path)
|
||||
|
||||
env["PYPE_UNREAL_PROJECT_FILE"] = project_file
|
||||
env["AVALON_CURRENT_UNREAL_ENGINE"] = engine_path
|
||||
return True
|
||||
|
|
@ -183,11 +183,11 @@ PypeHarmony.color = function(rgba) {
|
|||
/**
|
||||
* get all dependencies for given node.
|
||||
* @function
|
||||
* @param {string} node node path.
|
||||
* @param {string} _node node path.
|
||||
* @return {array} List of dependent nodes.
|
||||
*/
|
||||
PypeHarmony.getDependencies = function(node) {
|
||||
var target_node = node;
|
||||
PypeHarmony.getDependencies = function(_node) {
|
||||
var target_node = _node;
|
||||
var numInput = node.numberOfInputPorts(target_node);
|
||||
var dependencies = [];
|
||||
for (var i = 0 ; i < numInput; i++) {
|
||||
|
|
|
|||
|
|
@ -41,8 +41,6 @@ from .avalon_context import (
|
|||
BuildWorkfile
|
||||
)
|
||||
|
||||
from .hooks import PypeHook, execute_hook
|
||||
|
||||
from .applications import (
|
||||
ApplicationLaunchFailed,
|
||||
ApplictionExecutableNotFound,
|
||||
|
|
@ -55,7 +53,9 @@ from .applications import (
|
|||
|
||||
from .plugin_tools import (
|
||||
filter_pyblish_plugins,
|
||||
source_hash
|
||||
source_hash,
|
||||
get_unique_layer_name,
|
||||
get_background_layers
|
||||
)
|
||||
|
||||
from .user_settings import (
|
||||
|
|
@ -105,6 +105,8 @@ __all__ = [
|
|||
|
||||
"filter_pyblish_plugins",
|
||||
"source_hash",
|
||||
"get_unique_layer_name",
|
||||
"get_background_layers",
|
||||
|
||||
"version_up",
|
||||
"get_version_from_path",
|
||||
|
|
|
|||
|
|
@ -73,14 +73,14 @@ class RenderInstance(object):
|
|||
@frameStart.validator
|
||||
def check_frame_start(self, _, value):
|
||||
"""Validate if frame start is not larger then end."""
|
||||
if value >= self.frameEnd:
|
||||
if value > self.frameEnd:
|
||||
raise ValueError("frameStart must be smaller "
|
||||
"or equal then frameEnd")
|
||||
|
||||
@frameEnd.validator
|
||||
def check_frame_end(self, _, value):
|
||||
"""Validate if frame end is not less then start."""
|
||||
if value <= self.frameStart:
|
||||
if value < self.frameStart:
|
||||
raise ValueError("frameEnd must be smaller "
|
||||
"or equal then frameStart")
|
||||
|
||||
|
|
|
|||
|
|
@ -373,8 +373,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = os.environ.get(
|
||||
"DEADLINE_REST_URL", "http://localhost:8082")
|
||||
self._deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert self._deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
file_path = None
|
||||
|
|
|
|||
|
|
@ -248,11 +248,11 @@ class ApplicationTool:
|
|||
class ApplicationExecutable:
|
||||
def __init__(self, executable):
|
||||
default_launch_args = []
|
||||
executable_path = None
|
||||
if isinstance(executable, str):
|
||||
executable_path = executable
|
||||
|
||||
elif isinstance(executable, list):
|
||||
executable_path = None
|
||||
for arg in executable:
|
||||
if arg:
|
||||
if executable_path is None:
|
||||
|
|
@ -711,7 +711,7 @@ class ApplicationLaunchContext:
|
|||
# Execute prelaunch hooks
|
||||
for prelaunch_hook in self.prelaunch_hooks:
|
||||
self.log.debug("Executing prelaunch hook: {}".format(
|
||||
str(prelaunch_hook)
|
||||
str(prelaunch_hook.__class__.__name__)
|
||||
))
|
||||
prelaunch_hook.execute()
|
||||
|
||||
|
|
@ -730,7 +730,7 @@ class ApplicationLaunchContext:
|
|||
# Process post launch hooks
|
||||
for postlaunch_hook in self.postlaunch_hooks:
|
||||
self.log.debug("Executing postlaunch hook: {}".format(
|
||||
str(prelaunch_hook)
|
||||
str(postlaunch_hook.__class__.__name__)
|
||||
))
|
||||
|
||||
# TODO how to handle errors?
|
||||
|
|
|
|||
|
|
@ -1,71 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package containing code for handling hooks."""
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
import logging
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class PypeHook:
|
||||
"""Abstract class from all hooks should inherit."""
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def execute(self, *args, **kwargs):
|
||||
"""Abstract execute method."""
|
||||
pass
|
||||
|
||||
|
||||
def execute_hook(hook, *args, **kwargs):
|
||||
"""Execute hook with arguments.
|
||||
|
||||
This will load hook file, instantiate class and call
|
||||
:meth:`PypeHook.execute` method on it. Hook must be in a form::
|
||||
|
||||
$PYPE_SETUP_PATH/repos/pype/path/to/hook.py/HookClass
|
||||
|
||||
This will load `hook.py`, instantiate HookClass and then execute_hook
|
||||
`execute(*args, **kwargs)`
|
||||
|
||||
Args:
|
||||
hook (str): path to hook class.
|
||||
|
||||
"""
|
||||
class_name = hook.split("/")[-1]
|
||||
|
||||
abspath = os.path.join(os.getenv('PYPE_SETUP_PATH'),
|
||||
'repos', 'pype', *hook.split("/")[:-1])
|
||||
|
||||
mod_name, mod_ext = os.path.splitext(os.path.basename(abspath))
|
||||
|
||||
if not mod_ext == ".py":
|
||||
return False
|
||||
|
||||
module = types.ModuleType(mod_name)
|
||||
module.__file__ = abspath
|
||||
|
||||
try:
|
||||
with open(abspath, errors='ignore') as f:
|
||||
six.exec_(f.read(), module.__dict__)
|
||||
|
||||
sys.modules[abspath] = module
|
||||
|
||||
except Exception as exp:
|
||||
log.exception("loading hook failed: {}".format(exp),
|
||||
exc_info=True)
|
||||
return False
|
||||
|
||||
obj = getattr(module, class_name)
|
||||
hook_obj = obj()
|
||||
ret_val = hook_obj.execute(*args, **kwargs)
|
||||
return ret_val
|
||||
|
|
@ -3,6 +3,8 @@
|
|||
import os
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
import json
|
||||
|
||||
from pype.settings import get_project_settings
|
||||
|
||||
|
|
@ -78,3 +80,57 @@ def source_hash(filepath, *args):
|
|||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, name):
|
||||
"""
|
||||
Gets all layer names and if 'name' is present in them, increases
|
||||
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||
Args:
|
||||
layers (list): of strings, names only
|
||||
name (string): checked value
|
||||
|
||||
Returns:
|
||||
(string): name_00X (without version)
|
||||
"""
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
||||
|
||||
def get_background_layers(file_url):
|
||||
"""
|
||||
Pulls file name from background json file, enrich with folder url for
|
||||
AE to be able import files.
|
||||
|
||||
Order is important, follows order in json.
|
||||
|
||||
Args:
|
||||
file_url (str): abs url of background json
|
||||
|
||||
Returns:
|
||||
(list): of abs paths to images
|
||||
"""
|
||||
with open(file_url) as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
layers = list()
|
||||
bg_folder = os.path.dirname(file_url)
|
||||
for child in data['children']:
|
||||
if child.get("filename"):
|
||||
layers.append(os.path.join(bg_folder, child.get("filename")).
|
||||
replace("\\", "/"))
|
||||
else:
|
||||
for layer in child['children']:
|
||||
if layer.get("filename"):
|
||||
layers.append(os.path.join(bg_folder,
|
||||
layer.get("filename")).
|
||||
replace("\\", "/"))
|
||||
return layers
|
||||
|
|
|
|||
|
|
@ -4,11 +4,11 @@ from . import ftrack_server
|
|||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
from .lib import BaseHandler, BaseEvent, BaseAction, ServerAction
|
||||
|
||||
from pype.api import system_settings
|
||||
from pype.api import get_system_settings
|
||||
|
||||
# TODO: set in ftrack module
|
||||
os.environ["FTRACK_SERVER"] = (
|
||||
system_settings()["global"]["modules"]["Ftrack"]["ftrack_server"]
|
||||
get_system_settings()["modules"]["Ftrack"]["ftrack_server"]
|
||||
)
|
||||
__all__ = (
|
||||
"ftrack_server",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import json
|
||||
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import config, Anatomy, project_overrides_dir_path
|
||||
from pype.api import config, Anatomy
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_pype_attr
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,9 +14,13 @@ import uuid
|
|||
|
||||
import ftrack_api
|
||||
import pymongo
|
||||
from pype.modules.ftrack.lib import credentials
|
||||
from pype.modules.ftrack.lib import (
|
||||
credentials,
|
||||
get_ftrack_url_from_settings
|
||||
)
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
check_ftrack_url, get_ftrack_event_mongo_info
|
||||
check_ftrack_url,
|
||||
get_ftrack_event_mongo_info
|
||||
)
|
||||
|
||||
import socket_thread
|
||||
|
|
@ -87,25 +91,6 @@ def validate_credentials(url, user, api):
|
|||
return True
|
||||
|
||||
|
||||
def process_event_paths(event_paths):
|
||||
print('DEBUG: Processing event paths: {}.'.format(str(event_paths)))
|
||||
return_paths = []
|
||||
not_found = []
|
||||
if not event_paths:
|
||||
return return_paths, not_found
|
||||
|
||||
if isinstance(event_paths, str):
|
||||
event_paths = event_paths.split(os.pathsep)
|
||||
|
||||
for path in event_paths:
|
||||
if os.path.exists(path):
|
||||
return_paths.append(path)
|
||||
else:
|
||||
not_found.append(path)
|
||||
|
||||
return os.pathsep.join(return_paths), not_found
|
||||
|
||||
|
||||
def legacy_server(ftrack_url):
|
||||
# Current file
|
||||
file_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
|
@ -537,16 +522,20 @@ def main(argv):
|
|||
"environment: $CLOCKIFY_WORKSPACE)"
|
||||
)
|
||||
)
|
||||
ftrack_url = os.environ.get('FTRACK_SERVER')
|
||||
username = os.environ.get('FTRACK_API_USER')
|
||||
api_key = os.environ.get('FTRACK_API_KEY')
|
||||
event_paths = os.environ.get('FTRACK_EVENTS_PATH')
|
||||
ftrack_url = os.environ.get("FTRACK_SERVER")
|
||||
username = os.environ.get("FTRACK_API_USER")
|
||||
api_key = os.environ.get("FTRACK_API_KEY")
|
||||
|
||||
kwargs, args = parser.parse_known_args(argv)
|
||||
|
||||
if kwargs.ftrackurl:
|
||||
ftrack_url = kwargs.ftrackurl
|
||||
|
||||
# Load Ftrack url from settings if not set
|
||||
if not ftrack_url:
|
||||
ftrack_url = get_ftrack_url_from_settings()
|
||||
|
||||
event_paths = None
|
||||
if kwargs.ftrackeventpaths:
|
||||
event_paths = kwargs.ftrackeventpaths
|
||||
|
||||
|
|
@ -568,6 +557,7 @@ def main(argv):
|
|||
os.environ["CLOCKIFY_API_KEY"] = kwargs.clockifyapikey
|
||||
|
||||
legacy = kwargs.legacy
|
||||
|
||||
# Check url regex and accessibility
|
||||
ftrack_url = check_ftrack_url(ftrack_url)
|
||||
if not ftrack_url:
|
||||
|
|
@ -579,19 +569,6 @@ def main(argv):
|
|||
print('Exiting! < Please enter valid credentials >')
|
||||
return 1
|
||||
|
||||
# Process events path
|
||||
event_paths, not_found = process_event_paths(event_paths)
|
||||
if not_found:
|
||||
print(
|
||||
'WARNING: These paths were not found: {}'.format(str(not_found))
|
||||
)
|
||||
if not event_paths:
|
||||
if not_found:
|
||||
print('ERROR: Any of entered paths is valid or can be accesible.')
|
||||
else:
|
||||
print('ERROR: Paths to events are not set. Exiting.')
|
||||
return 1
|
||||
|
||||
if kwargs.storecred:
|
||||
credentials.save_credentials(username, api_key, ftrack_url)
|
||||
|
||||
|
|
@ -599,7 +576,10 @@ def main(argv):
|
|||
os.environ["FTRACK_SERVER"] = ftrack_url
|
||||
os.environ["FTRACK_API_USER"] = username
|
||||
os.environ["FTRACK_API_KEY"] = api_key
|
||||
os.environ["FTRACK_EVENTS_PATH"] = event_paths
|
||||
if event_paths:
|
||||
if isinstance(event_paths, (list, tuple)):
|
||||
event_paths = os.pathsep.join(event_paths)
|
||||
os.environ["FTRACK_EVENTS_PATH"] = event_paths
|
||||
|
||||
if legacy:
|
||||
return legacy_server(ftrack_url)
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
|
|||
|
||||
|
||||
class FtrackServer:
|
||||
def __init__(self, server_type='action'):
|
||||
def __init__(self, handler_paths=None, server_type='action'):
|
||||
"""
|
||||
- 'type' is by default set to 'action' - Runs Action server
|
||||
- enter 'event' for Event server
|
||||
|
|
@ -47,14 +47,15 @@ class FtrackServer:
|
|||
ftrack_log = logging.getLogger("ftrack_api")
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
|
||||
env_key = "FTRACK_ACTIONS_PATH"
|
||||
if server_type.lower() == 'event':
|
||||
env_key = "FTRACK_EVENTS_PATH"
|
||||
self.stopped = True
|
||||
self.is_running = False
|
||||
|
||||
self.handler_paths = handler_paths or []
|
||||
|
||||
self.server_type = server_type
|
||||
self.env_key = env_key
|
||||
|
||||
def stop_session(self):
|
||||
self.stopped = True
|
||||
if self.session.event_hub.connected is True:
|
||||
self.session.event_hub.disconnect()
|
||||
self.session.close()
|
||||
|
|
@ -107,10 +108,6 @@ class FtrackServer:
|
|||
" in registered paths: \"{}\""
|
||||
).format("| ".join(paths)))
|
||||
|
||||
# Load presets for setting plugins
|
||||
key = "user"
|
||||
if self.server_type.lower() == "event":
|
||||
key = "server"
|
||||
# TODO replace with settings or get rid of passing the dictionary
|
||||
plugins_presets = {}
|
||||
|
||||
|
|
@ -132,25 +129,37 @@ class FtrackServer:
|
|||
)
|
||||
log.warning(msg, exc_info=True)
|
||||
|
||||
def set_handler_paths(self, paths):
|
||||
self.handler_paths = paths
|
||||
if self.is_running:
|
||||
self.stop_session()
|
||||
self.run_server()
|
||||
|
||||
elif not self.stopped:
|
||||
self.run_server()
|
||||
|
||||
def run_server(self, session=None, load_files=True):
|
||||
self.stopped = False
|
||||
self.is_running = True
|
||||
if not session:
|
||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||
|
||||
self.session = session
|
||||
|
||||
if load_files:
|
||||
paths_str = os.environ.get(self.env_key)
|
||||
if paths_str is None:
|
||||
log.error((
|
||||
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
|
||||
).format(self.env_key, self.server_type))
|
||||
if not self.handler_paths:
|
||||
log.warning((
|
||||
"Paths to event handlers are not set."
|
||||
" Ftrack server won't launch."
|
||||
))
|
||||
self.is_running = False
|
||||
return
|
||||
|
||||
paths = paths_str.split(os.pathsep)
|
||||
self.set_files(paths)
|
||||
self.set_files(self.handler_paths)
|
||||
|
||||
log.info(60*"*")
|
||||
log.info('Registration of actions/events has finished!')
|
||||
msg = "Registration of event handlers has finished!"
|
||||
log.info(len(msg) * "*")
|
||||
log.info(msg)
|
||||
|
||||
# keep event_hub on session running
|
||||
self.session.event_hub.wait()
|
||||
self.is_running = False
|
||||
|
|
|
|||
|
|
@ -19,39 +19,15 @@ import ftrack_api._centralized_storage_scenario
|
|||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
|
||||
from pype.api import (
|
||||
Logger,
|
||||
get_default_components,
|
||||
decompose_url,
|
||||
compose_url
|
||||
)
|
||||
from pype.modules.ftrack.lib import get_ftrack_event_mongo_info
|
||||
|
||||
from .custom_db_connector import CustomDbConnector
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
TOPIC_STATUS_SERVER = "pype.event.server.status"
|
||||
TOPIC_STATUS_SERVER_RESULT = "pype.event.server.status.result"
|
||||
|
||||
|
||||
def get_ftrack_event_mongo_info():
|
||||
database_name = (
|
||||
os.environ.get("FTRACK_EVENTS_MONGO_DB") or "pype"
|
||||
)
|
||||
collection_name = (
|
||||
os.environ.get("FTRACK_EVENTS_MONGO_COL") or "ftrack_events"
|
||||
)
|
||||
|
||||
mongo_url = os.environ.get("FTRACK_EVENTS_MONGO_URL")
|
||||
if mongo_url is not None:
|
||||
components = decompose_url(mongo_url)
|
||||
else:
|
||||
components = get_default_components()
|
||||
|
||||
uri = compose_url(**components)
|
||||
|
||||
return uri, components["port"], database_name, collection_name
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if not url:
|
||||
|
|
|
|||
|
|
@ -6,11 +6,16 @@ import datetime
|
|||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER
|
||||
SocketSession,
|
||||
ProcessEventHub,
|
||||
TOPIC_STATUS_SERVER
|
||||
)
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.lib import get_server_event_handler_paths
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
import ftrack_api
|
||||
|
||||
log = Logger().get_logger("Event processor")
|
||||
|
||||
subprocess_started = datetime.datetime.now()
|
||||
|
|
@ -55,26 +60,6 @@ def register(session):
|
|||
)
|
||||
|
||||
|
||||
def clockify_module_registration():
|
||||
api_key = os.environ.get("CLOCKIFY_API_KEY")
|
||||
if not api_key:
|
||||
log.warning("Clockify API key is not set.")
|
||||
return
|
||||
|
||||
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
|
||||
if not workspace_name:
|
||||
log.warning("Clockify Workspace is not set.")
|
||||
return
|
||||
|
||||
from pype.modules.clockify.constants import CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
current = os.environ.get("FTRACK_EVENTS_PATH") or ""
|
||||
if current:
|
||||
current += os.pathsep
|
||||
os.environ["FTRACK_EVENTS_PATH"] = current + CLOCKIFY_FTRACK_SERVER_PATH
|
||||
return True
|
||||
|
||||
|
||||
def main(args):
|
||||
port = int(args[-1])
|
||||
# Create a TCP/IP socket
|
||||
|
|
@ -86,11 +71,8 @@ def main(args):
|
|||
sock.connect(server_address)
|
||||
|
||||
sock.sendall(b"CreatedProcess")
|
||||
try:
|
||||
clockify_module_registration()
|
||||
except Exception:
|
||||
log.info("Clockify registration failed.", exc_info=True)
|
||||
|
||||
returncode = 0
|
||||
try:
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub
|
||||
|
|
@ -98,17 +80,19 @@ def main(args):
|
|||
register(session)
|
||||
SessionFactory.session = session
|
||||
|
||||
server = FtrackServer("event")
|
||||
event_handler_paths = get_server_event_handler_paths()
|
||||
server = FtrackServer(event_handler_paths, "event")
|
||||
log.debug("Launched Ftrack Event processor")
|
||||
server.run_server(session)
|
||||
|
||||
except Exception:
|
||||
returncode = 1
|
||||
log.error("Event server crashed. See traceback below", exc_info=True)
|
||||
|
||||
finally:
|
||||
log.debug("First closing socket")
|
||||
sock.close()
|
||||
return 1
|
||||
return returncode
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -9,8 +9,10 @@ import datetime
|
|||
import ftrack_api
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
SocketSession, StatusEventHub,
|
||||
TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT
|
||||
SocketSession,
|
||||
StatusEventHub,
|
||||
TOPIC_STATUS_SERVER,
|
||||
TOPIC_STATUS_SERVER_RESULT
|
||||
)
|
||||
from pype.api import Logger
|
||||
|
||||
|
|
@ -368,7 +370,7 @@ def main(args):
|
|||
ObjectFactory.session = session
|
||||
session.event_hub.heartbeat_callbacks.append(heartbeat)
|
||||
register(session)
|
||||
server = FtrackServer("event")
|
||||
server = FtrackServer(server_type="event")
|
||||
log.debug("Launched Ftrack Event statuser")
|
||||
|
||||
server.run_server(session, load_files=False)
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@ import pymongo
|
|||
import ftrack_api
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
SocketSession, StorerEventHub,
|
||||
get_ftrack_event_mongo_info,
|
||||
TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT
|
||||
SocketSession,
|
||||
StorerEventHub,
|
||||
TOPIC_STATUS_SERVER,
|
||||
TOPIC_STATUS_SERVER_RESULT
|
||||
)
|
||||
from pype.modules.ftrack.lib import get_ftrack_event_mongo_info
|
||||
from pype.modules.ftrack.ftrack_server.custom_db_connector import (
|
||||
CustomDbConnector
|
||||
)
|
||||
|
|
@ -193,7 +195,7 @@ def main(args):
|
|||
)
|
||||
SessionFactory.session = session
|
||||
register(session)
|
||||
server = FtrackServer("event")
|
||||
server = FtrackServer(server_type="event")
|
||||
log.debug("Launched Ftrack Event storer")
|
||||
server.run_server(session, load_files=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,9 @@ import threading
|
|||
from ftrack_server import FtrackServer
|
||||
import ftrack_api
|
||||
from pype.api import Logger
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
get_server_event_handler_paths
|
||||
)
|
||||
|
||||
log = Logger().get_logger("Event Server Legacy")
|
||||
|
||||
|
|
@ -62,7 +65,8 @@ class TimerChecker(threading.Thread):
|
|||
def main(args):
|
||||
check_thread = None
|
||||
try:
|
||||
server = FtrackServer("event")
|
||||
event_handler_paths = get_server_event_handler_paths()
|
||||
server = FtrackServer(event_handler_paths, "event")
|
||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||
|
||||
check_thread = TimerChecker(server, session)
|
||||
|
|
|
|||
|
|
@ -2,13 +2,12 @@ import sys
|
|||
import signal
|
||||
import socket
|
||||
|
||||
import traceback
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.modules.ftrack.ftrack_server.lib import (
|
||||
SocketSession,
|
||||
SocketBaseEventHub
|
||||
)
|
||||
from pype.modules.ftrack.lib import get_user_event_handler_paths
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
|
|
@ -33,11 +32,13 @@ def main(args):
|
|||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub
|
||||
)
|
||||
server = FtrackServer("action")
|
||||
log.debug("Launched User Ftrack Server")
|
||||
event_handler_paths = get_user_event_handler_paths()
|
||||
server = FtrackServer(event_handler_paths, "action")
|
||||
log.debug("Launching User Ftrack Server")
|
||||
server.run_server(session=session)
|
||||
|
||||
except Exception:
|
||||
traceback.print_exception(*sys.exc_info())
|
||||
log.warning("Ftrack session server failed.", exc_info=True)
|
||||
|
||||
finally:
|
||||
log.debug("Closing socket")
|
||||
|
|
|
|||
|
|
@ -1,14 +1,36 @@
|
|||
from . settings import (
|
||||
FTRACK_MODULE_DIR,
|
||||
SERVER_HANDLERS_DIR,
|
||||
USER_HANDLERS_DIR,
|
||||
get_ftrack_url_from_settings,
|
||||
get_server_event_handler_paths,
|
||||
get_user_event_handler_paths,
|
||||
get_ftrack_event_mongo_info
|
||||
)
|
||||
from . import avalon_sync
|
||||
from . import credentials
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
from .ftrack_event_handler import BaseEvent
|
||||
from .ftrack_action_handler import BaseAction, ServerAction, statics_icon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FTRACK_MODULE_DIR",
|
||||
"SERVER_HANDLERS_DIR",
|
||||
"USER_HANDLERS_DIR",
|
||||
"get_ftrack_url_from_settings",
|
||||
"get_server_event_handler_paths",
|
||||
"get_user_event_handler_paths",
|
||||
"get_ftrack_event_mongo_info",
|
||||
|
||||
"avalon_sync",
|
||||
|
||||
"credentials",
|
||||
|
||||
"BaseHandler",
|
||||
|
||||
"BaseEvent",
|
||||
|
||||
"BaseAction",
|
||||
"ServerAction",
|
||||
"statics_icon"
|
||||
|
|
|
|||
109
pype/modules/ftrack/lib/settings.py
Normal file
109
pype/modules/ftrack/lib/settings.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
import os
|
||||
from pype.api import (
|
||||
Logger,
|
||||
get_system_settings,
|
||||
get_default_components,
|
||||
decompose_url,
|
||||
compose_url
|
||||
)
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
FTRACK_MODULE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
SERVER_HANDLERS_DIR = os.path.join(FTRACK_MODULE_DIR, "events")
|
||||
USER_HANDLERS_DIR = os.path.join(FTRACK_MODULE_DIR, "actions")
|
||||
|
||||
|
||||
def get_ftrack_settings():
|
||||
return get_system_settings()["modules"]["Ftrack"]
|
||||
|
||||
|
||||
def get_ftrack_url_from_settings():
|
||||
return get_ftrack_settings()["ftrack_server"]
|
||||
|
||||
|
||||
def get_server_event_handler_paths():
|
||||
paths = []
|
||||
# Environment variable overrides settings
|
||||
if "FTRACK_EVENTS_PATH" in os.environ:
|
||||
env_paths = os.environ.get("FTRACK_EVENTS_PATH")
|
||||
paths.extend(env_paths.split(os.pathsep))
|
||||
return paths
|
||||
|
||||
# Add pype's default dir
|
||||
paths.append(SERVER_HANDLERS_DIR)
|
||||
# Add additional paths from settings
|
||||
paths.extend(
|
||||
get_ftrack_settings()["ftrack_events_path"]
|
||||
)
|
||||
try:
|
||||
clockify_path = clockify_event_path()
|
||||
if clockify_path:
|
||||
paths.append(clockify_path)
|
||||
except Exception:
|
||||
log.warning("Clockify paths function failed.", exc_info=True)
|
||||
|
||||
# Filter only existing paths
|
||||
_paths = []
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
_paths.append(path)
|
||||
else:
|
||||
log.warning((
|
||||
"Registered event handler path is not accessible: {}"
|
||||
).format(path))
|
||||
return _paths
|
||||
|
||||
|
||||
def get_user_event_handler_paths():
|
||||
paths = []
|
||||
# Add pype's default dir
|
||||
paths.append(USER_HANDLERS_DIR)
|
||||
# Add additional paths from settings
|
||||
paths.extend(
|
||||
get_ftrack_settings()["ftrack_actions_path"]
|
||||
)
|
||||
|
||||
# Filter only existing paths
|
||||
_paths = []
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
_paths.append(path)
|
||||
else:
|
||||
log.warning((
|
||||
"Registered event handler path is not accessible: {}"
|
||||
).format(path))
|
||||
return _paths
|
||||
|
||||
|
||||
def clockify_event_path():
|
||||
api_key = os.environ.get("CLOCKIFY_API_KEY")
|
||||
if not api_key:
|
||||
log.warning("Clockify API key is not set.")
|
||||
return
|
||||
|
||||
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
|
||||
if not workspace_name:
|
||||
log.warning("Clockify Workspace is not set.")
|
||||
return
|
||||
|
||||
from pype.modules.clockify.constants import CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
return CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
|
||||
def get_ftrack_event_mongo_info():
|
||||
ftrack_settings = get_ftrack_settings()
|
||||
database_name = ftrack_settings["mongo_database_name"]
|
||||
collection_name = ftrack_settings["mongo_collection_name"]
|
||||
|
||||
# TODO add possibility to set in settings and use PYPE_MONGO_URL if not set
|
||||
mongo_url = os.environ.get("FTRACK_EVENTS_MONGO_URL")
|
||||
if mongo_url is not None:
|
||||
components = decompose_url(mongo_url)
|
||||
else:
|
||||
components = get_default_components()
|
||||
|
||||
uri = compose_url(**components)
|
||||
|
||||
return uri, components["port"], database_name, collection_name
|
||||
|
|
@ -10,7 +10,7 @@ from ..ftrack_server import socket_thread
|
|||
from ..lib import credentials
|
||||
from . import login_dialog
|
||||
|
||||
from pype.api import Logger, resources
|
||||
from pype.api import Logger, resources, get_system_settings
|
||||
|
||||
|
||||
log = Logger().get_logger("FtrackModule", "ftrack")
|
||||
|
|
@ -29,6 +29,8 @@ class FtrackModule:
|
|||
self.bool_action_thread_running = False
|
||||
self.bool_timer_event = False
|
||||
|
||||
self.load_ftrack_url()
|
||||
|
||||
self.widget_login = login_dialog.CredentialsDialog()
|
||||
self.widget_login.login_changed.connect(self.on_login_change)
|
||||
self.widget_login.logout_signal.connect(self.on_logout)
|
||||
|
|
@ -292,6 +294,15 @@ class FtrackModule:
|
|||
def tray_exit(self):
|
||||
self.stop_action_server()
|
||||
|
||||
def load_ftrack_url(self):
|
||||
ftrack_url = (
|
||||
get_system_settings()
|
||||
["modules"]
|
||||
["Ftrack"]
|
||||
["ftrack_server"]
|
||||
)
|
||||
os.environ["FTRACK_SERVER"] = ftrack_url
|
||||
|
||||
# Definition of visibility of each menu actions
|
||||
def set_menu_visibility(self):
|
||||
self.tray_server_menu.menuAction().setVisible(self.bool_logged)
|
||||
|
|
|
|||
|
|
@ -126,20 +126,26 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
|
||||
self.setLayout(main)
|
||||
|
||||
def show(self, *args, **kwargs):
|
||||
super(CredentialsDialog, self).show(*args, **kwargs)
|
||||
self.fill_ftrack_url()
|
||||
|
||||
def fill_ftrack_url(self):
|
||||
url = os.getenv("FTRACK_SERVER")
|
||||
if url == self.ftsite_input.text():
|
||||
return
|
||||
|
||||
checked_url = self.check_url(url)
|
||||
self.ftsite_input.setText(checked_url or "")
|
||||
|
||||
if checked_url is None:
|
||||
checked_url = ""
|
||||
self.btn_login.setEnabled(False)
|
||||
self.btn_ftrack_login.setEnabled(False)
|
||||
enabled = bool(checked_url)
|
||||
|
||||
self.api_input.setEnabled(False)
|
||||
self.user_input.setEnabled(False)
|
||||
self.ftsite_input.setEnabled(False)
|
||||
self.btn_login.setEnabled(enabled)
|
||||
self.btn_ftrack_login.setEnabled(enabled)
|
||||
|
||||
self.ftsite_input.setText(checked_url)
|
||||
self.api_input.setEnabled(enabled)
|
||||
self.user_input.setEnabled(enabled)
|
||||
self.ftsite_input.setEnabled(enabled)
|
||||
|
||||
def set_advanced_mode(self, is_advanced):
|
||||
self._in_advance_mode = is_advanced
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import collections
|
||||
from Qt import QtCore, QtGui
|
||||
from pype.api import Logger
|
||||
from pypeapp.lib.log import _bootstrap_mongo_log, LOG_COLLECTION_NAME
|
||||
from pype.lib.log import _bootstrap_mongo_log, LOG_COLLECTION_NAME
|
||||
|
||||
log = Logger().get_logger("LogModel", "LoggingModule")
|
||||
|
||||
|
|
|
|||
|
|
@ -4,13 +4,31 @@ from pype.modules.websocket_server import WebSocketServer
|
|||
Used anywhere solution is calling client methods.
|
||||
"""
|
||||
import json
|
||||
from collections import namedtuple
|
||||
|
||||
import attr
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s
|
||||
class AEItem(object):
|
||||
"""
|
||||
Object denoting Item in AE. Each item is created in AE by any Loader,
|
||||
but contains same fields, which are being used in later processing.
|
||||
"""
|
||||
# metadata
|
||||
id = attr.ib() # id created by AE, could be used for querying
|
||||
name = attr.ib() # name of item
|
||||
item_type = attr.ib(default=None) # item type (footage, folder, comp)
|
||||
# all imported elements, single for
|
||||
# regular image, array for Backgrounds
|
||||
members = attr.ib(factory=list)
|
||||
workAreaStart = attr.ib(default=None)
|
||||
workAreaDuration = attr.ib(default=None)
|
||||
frameRate = attr.ib(default=None)
|
||||
file_name = attr.ib(default=None)
|
||||
|
||||
|
||||
class AfterEffectsServerStub():
|
||||
"""
|
||||
Stub for calling function on client (Photoshop js) side.
|
||||
|
|
@ -34,22 +52,14 @@ class AfterEffectsServerStub():
|
|||
('AfterEffects.open', path=path)
|
||||
)
|
||||
|
||||
def read(self, layer, layers_meta=None):
|
||||
"""
|
||||
Parses layer metadata from Label field of active document
|
||||
Args:
|
||||
layer: <namedTuple Layer("id":XX, "name":"YYY")
|
||||
layers_meta: full list from Headline (for performance in loops)
|
||||
Returns:
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_metadata()
|
||||
|
||||
return layers_meta.get(str(layer.id))
|
||||
|
||||
def get_metadata(self):
|
||||
"""
|
||||
Get stored JSON with metadata from AE.Metadata.Label field
|
||||
Get complete stored JSON with metadata from AE.Metadata.Label
|
||||
field.
|
||||
|
||||
It contains containers loaded by any Loader OR instances creted
|
||||
by Creator.
|
||||
|
||||
Returns:
|
||||
(dict)
|
||||
"""
|
||||
|
|
@ -57,54 +67,85 @@ class AfterEffectsServerStub():
|
|||
('AfterEffects.get_metadata')
|
||||
)
|
||||
try:
|
||||
layers_data = json.loads(res)
|
||||
metadata = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Unparsable metadata {}".format(res))
|
||||
return layers_data or {}
|
||||
return metadata or []
|
||||
|
||||
def imprint(self, layer, data, all_layers=None, layers_meta=None):
|
||||
def read(self, item, layers_meta=None):
|
||||
"""
|
||||
Save layer metadata to Label field of metadata of active document
|
||||
Parses item metadata from Label field of active document.
|
||||
Used as filter to pick metadata for specific 'item' only.
|
||||
|
||||
Args:
|
||||
layer (namedtuple): Layer("id": XXX, "name":'YYY')
|
||||
item (AEItem): pulled info from AE
|
||||
layers_meta (dict): full list from Headline
|
||||
(load and inject for better performance in loops)
|
||||
Returns:
|
||||
(dict):
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_metadata()
|
||||
|
||||
for item_meta in layers_meta:
|
||||
if 'container' in item_meta.get('id') and \
|
||||
str(item.id) == str(item_meta.get('members')[0]):
|
||||
return item_meta
|
||||
|
||||
log.debug("Couldn't find layer metadata")
|
||||
|
||||
def imprint(self, item, data, all_items=None, items_meta=None):
|
||||
"""
|
||||
Save item metadata to Label field of metadata of active document
|
||||
Args:
|
||||
item (AEItem):
|
||||
data(string): json representation for single layer
|
||||
all_layers (list of namedtuples): for performance, could be
|
||||
all_items (list of item): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
triggered
|
||||
layers_meta(string): json representation from Headline
|
||||
items_meta(string): json representation from Headline
|
||||
(for performance - provide only if imprint is in
|
||||
loop - value should be same)
|
||||
Returns: None
|
||||
"""
|
||||
if not layers_meta:
|
||||
layers_meta = self.get_metadata()
|
||||
if not items_meta:
|
||||
items_meta = self.get_metadata()
|
||||
|
||||
# json.dumps writes integer values in a dictionary to string, so
|
||||
# anticipating it here.
|
||||
if str(layer.id) in layers_meta and layers_meta[str(layer.id)]:
|
||||
if data:
|
||||
layers_meta[str(layer.id)].update(data)
|
||||
result_meta = []
|
||||
# fix existing
|
||||
is_new = True
|
||||
|
||||
for item_meta in items_meta:
|
||||
if item_meta.get('members') \
|
||||
and str(item.id) == str(item_meta.get('members')[0]):
|
||||
is_new = False
|
||||
if data:
|
||||
item_meta.update(data)
|
||||
result_meta.append(item_meta)
|
||||
else:
|
||||
layers_meta.pop(str(layer.id))
|
||||
else:
|
||||
layers_meta[str(layer.id)] = data
|
||||
result_meta.append(item_meta)
|
||||
|
||||
if is_new:
|
||||
result_meta.append(data)
|
||||
|
||||
# Ensure only valid ids are stored.
|
||||
if not all_layers:
|
||||
if not all_items:
|
||||
# loaders create FootageItem now
|
||||
all_layers = self.get_items(comps=True,
|
||||
folders=False,
|
||||
footages=True)
|
||||
item_ids = [int(item.id) for item in all_layers]
|
||||
cleaned_data = {}
|
||||
for id in layers_meta:
|
||||
if int(id) in item_ids:
|
||||
cleaned_data[id] = layers_meta[id]
|
||||
all_items = self.get_items(comps=True,
|
||||
folders=True,
|
||||
footages=True)
|
||||
item_ids = [int(item.id) for item in all_items]
|
||||
cleaned_data = []
|
||||
for meta in result_meta:
|
||||
# for creation of instance OR loaded container
|
||||
if 'instance' in meta.get('id') or \
|
||||
int(meta.get('members')[0]) in item_ids:
|
||||
cleaned_data.append(meta)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.imprint', payload=payload)
|
||||
)
|
||||
('AfterEffects.imprint', payload=payload))
|
||||
|
||||
def get_active_document_full_name(self):
|
||||
"""
|
||||
|
|
@ -130,8 +171,10 @@ class AfterEffectsServerStub():
|
|||
"""
|
||||
Get all items from Project panel according to arguments.
|
||||
There are multiple different types:
|
||||
CompItem (could have multiple layers - source for Creator)
|
||||
FolderItem (collection type, currently not used
|
||||
CompItem (could have multiple layers - source for Creator,
|
||||
will be rendered)
|
||||
FolderItem (collection type, currently used for Background
|
||||
loading)
|
||||
FootageItem (imported file - created by Loader)
|
||||
Args:
|
||||
comps (bool): return CompItems
|
||||
|
|
@ -218,15 +261,15 @@ class AfterEffectsServerStub():
|
|||
item_id=item.id,
|
||||
item_name=item_name))
|
||||
|
||||
def delete_item(self, item):
|
||||
""" Deletes FootageItem with new file
|
||||
def delete_item(self, item_id):
|
||||
""" Deletes *Item in a file
|
||||
Args:
|
||||
item (dict):
|
||||
item_id (int):
|
||||
|
||||
"""
|
||||
self.websocketserver.call(self.client.call
|
||||
('AfterEffects.delete_item',
|
||||
item_id=item.id
|
||||
item_id=item_id
|
||||
))
|
||||
|
||||
def is_saved(self):
|
||||
|
|
@ -340,12 +383,95 @@ class AfterEffectsServerStub():
|
|||
def close(self):
|
||||
self.client.close()
|
||||
|
||||
def import_background(self, comp_id, comp_name, files):
|
||||
"""
|
||||
Imports backgrounds images to existing or new composition.
|
||||
|
||||
If comp_id is not provided, new composition is created, basic
|
||||
values (width, heights, frameRatio) takes from first imported
|
||||
image.
|
||||
|
||||
All images from background json are imported as a FootageItem and
|
||||
separate layer is created for each of them under composition.
|
||||
|
||||
Order of imported 'files' is important.
|
||||
|
||||
Args:
|
||||
comp_id (int): id of existing composition (null if new)
|
||||
comp_name (str): used when new composition
|
||||
files (list): list of absolute paths to import and
|
||||
add as layers
|
||||
|
||||
Returns:
|
||||
(AEItem): object with id of created folder, all imported images
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.import_background',
|
||||
comp_id=comp_id,
|
||||
comp_name=comp_name,
|
||||
files=files))
|
||||
|
||||
records = self._to_records(res)
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
log.debug("Import background failed.")
|
||||
|
||||
def reload_background(self, comp_id, comp_name, files):
|
||||
"""
|
||||
Reloads backgrounds images to existing composition.
|
||||
|
||||
It actually deletes complete folder with imported images and
|
||||
created composition for safety.
|
||||
|
||||
Args:
|
||||
comp_id (int): id of existing composition to be overwritten
|
||||
comp_name (str): new name of composition (could be same as old
|
||||
if version up only)
|
||||
files (list): list of absolute paths to import and
|
||||
add as layers
|
||||
Returns:
|
||||
(AEItem): object with id of created folder, all imported images
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.reload_background',
|
||||
comp_id=comp_id,
|
||||
comp_name=comp_name,
|
||||
files=files))
|
||||
|
||||
records = self._to_records(res)
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
log.debug("Reload of background failed.")
|
||||
|
||||
def add_item_as_layer(self, comp_id, item_id):
|
||||
"""
|
||||
Adds already imported FootageItem ('item_id') as a new
|
||||
layer to composition ('comp_id').
|
||||
|
||||
Args:
|
||||
comp_id (int): id of target composition
|
||||
item_id (int): FootageItem.id
|
||||
comp already found previously
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call
|
||||
('AfterEffects.add_item_as_layer',
|
||||
comp_id=comp_id,
|
||||
item_id=item_id))
|
||||
|
||||
records = self._to_records(res)
|
||||
if records:
|
||||
return records.pop()
|
||||
|
||||
log.debug("Adding new layer failed.")
|
||||
|
||||
def _to_records(self, res):
|
||||
"""
|
||||
Converts string json representation into list of named tuples for
|
||||
Converts string json representation into list of AEItem
|
||||
dot notation access to work.
|
||||
Returns: <list of named tuples>
|
||||
res(string): - json representation
|
||||
Returns: <list of AEItem>
|
||||
res(string): - json representation
|
||||
"""
|
||||
if not res:
|
||||
return []
|
||||
|
|
@ -358,9 +484,19 @@ class AfterEffectsServerStub():
|
|||
return []
|
||||
|
||||
ret = []
|
||||
# convert to namedtuple to use dot donation
|
||||
if isinstance(layers_data, dict): # TODO refactore
|
||||
# convert to AEItem to use dot donation
|
||||
if isinstance(layers_data, dict):
|
||||
layers_data = [layers_data]
|
||||
for d in layers_data:
|
||||
ret.append(namedtuple('Layer', d.keys())(*d.values()))
|
||||
# currently implemented and expected fields
|
||||
item = AEItem(d.get('id'),
|
||||
d.get('name'),
|
||||
d.get('type'),
|
||||
d.get('members'),
|
||||
d.get('workAreaStart'),
|
||||
d.get('workAreaDuration'),
|
||||
d.get('frameRate'),
|
||||
d.get('file_name'))
|
||||
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class CreateRender(api.Creator):
|
|||
if self.name.lower() == item.name.lower():
|
||||
self._show_msg(txt)
|
||||
return False
|
||||
|
||||
self.data["members"] = [item.id]
|
||||
stub.imprint(item, self.data)
|
||||
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
|
||||
stub.rename_item(item, self.data["subset"])
|
||||
|
|
|
|||
99
pype/plugins/aftereffects/load/load_background.py
Normal file
99
pype/plugins/aftereffects/load/load_background.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import re
|
||||
|
||||
from avalon import api, aftereffects
|
||||
|
||||
from pype.lib import get_background_layers, get_unique_layer_name
|
||||
|
||||
stub = aftereffects.stub()
|
||||
|
||||
|
||||
class BackgroundLoader(api.Loader):
|
||||
"""
|
||||
Load images from Background family
|
||||
Creates for each background separate folder with all imported images
|
||||
from background json AND automatically created composition with layers,
|
||||
each layer for separate image.
|
||||
|
||||
For each load container is created and stored in project (.aep)
|
||||
metadata
|
||||
"""
|
||||
families = ["background"]
|
||||
representations = ["json"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
items = stub.get_items(comps=True)
|
||||
existing_items = [layer.name for layer in items]
|
||||
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"]["name"], name))
|
||||
|
||||
layers = get_background_layers(self.fname)
|
||||
comp = stub.import_background(None, comp_name, layers)
|
||||
|
||||
if not comp:
|
||||
self.log.warning(
|
||||
"Import background failed.")
|
||||
self.log.warning("Check host app for alert error.")
|
||||
return
|
||||
|
||||
self[:] = [comp]
|
||||
namespace = namespace or comp_name
|
||||
|
||||
return aftereffects.containerise(
|
||||
name,
|
||||
namespace,
|
||||
comp,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
""" Switch asset or change version """
|
||||
context = representation.get("context", {})
|
||||
_ = container.pop("layer")
|
||||
|
||||
# without iterator number (_001, 002...)
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
comp_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
|
||||
# switching assets
|
||||
if namespace_from_container != comp_name:
|
||||
items = stub.get_items(comps=True)
|
||||
existing_items = [layer.name for layer in items]
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
else: # switching version - keep same name
|
||||
comp_name = container["namespace"]
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
layers = get_background_layers(path)
|
||||
comp = stub.reload_background(container["members"][1],
|
||||
comp_name,
|
||||
layers)
|
||||
|
||||
# update container
|
||||
container["representation"] = str(representation["_id"])
|
||||
container["name"] = context["subset"]
|
||||
container["namespace"] = comp_name
|
||||
container["members"] = comp.members
|
||||
|
||||
stub.imprint(comp, container)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Removes element from scene: deletes layer + removes from file
|
||||
metadata.
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
print("!!!! container:: {}".format(container))
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from avalon import api, aftereffects
|
||||
from pype.plugins import lib
|
||||
from pype import lib
|
||||
import re
|
||||
|
||||
stub = aftereffects.stub()
|
||||
|
|
@ -21,9 +21,10 @@ class FileLoader(api.Loader):
|
|||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
comp_name = lib.get_unique_layer_name(stub.get_items(comps=True),
|
||||
context["asset"]["name"],
|
||||
name)
|
||||
layers = stub.get_items(comps=True, folders=True, footages=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
comp_name = lib.get_unique_layer_name(
|
||||
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
||||
|
||||
import_options = {}
|
||||
|
||||
|
|
@ -77,9 +78,11 @@ class FileLoader(api.Loader):
|
|||
layer_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layer_name = lib.get_unique_layer_name(stub.get_items(comps=True),
|
||||
context["asset"],
|
||||
context["subset"])
|
||||
layers = stub.get_items(comps=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
layer_name = lib.get_unique_layer_name(
|
||||
existing_layers,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
path = api.get_representation_path(representation)
|
||||
|
|
|
|||
|
|
@ -33,12 +33,16 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
|
||||
compositions = aftereffects.stub().get_items(True)
|
||||
compositions_by_id = {item.id: item for item in compositions}
|
||||
for item_id, inst in aftereffects.stub().get_metadata().items():
|
||||
for inst in aftereffects.stub().get_metadata():
|
||||
schema = inst.get('schema')
|
||||
# loaded asset container skip it
|
||||
if schema and 'container' in schema:
|
||||
continue
|
||||
|
||||
if not inst["members"]:
|
||||
raise ValueError("Couldn't find id, unable to publish. " +
|
||||
"Please recreate instance.")
|
||||
item_id = inst["members"][0]
|
||||
work_area_info = aftereffects.stub().get_work_area(int(item_id))
|
||||
frameStart = work_area_info.workAreaStart
|
||||
|
||||
|
|
@ -110,7 +114,10 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender):
|
|||
|
||||
# pull file name from Render Queue Output module
|
||||
render_q = aftereffects.stub().get_render_info()
|
||||
if not render_q:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
_, ext = os.path.splitext(os.path.basename(render_q.file_name))
|
||||
|
||||
base_dir = self._get_output_dir(render_instance)
|
||||
expected_files = []
|
||||
if "#" not in render_q.file_name: # single frame (mov)W
|
||||
|
|
|
|||
|
|
@ -105,3 +105,13 @@ class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline
|
|||
deadline_plugin_info.Output = render_path.replace("\\", "/")
|
||||
|
||||
return attr.asdict(deadline_plugin_info)
|
||||
|
||||
def from_published_scene(self):
|
||||
""" Do not overwrite expected files.
|
||||
|
||||
Use published is set to True, so rendering will be triggered
|
||||
from published scene (in 'publish' folder). Default implementation
|
||||
of abstract class renames expected (eg. rendered) files accordingly
|
||||
which is not needed here.
|
||||
"""
|
||||
return super().from_published_scene(False)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit CelAction2D scene to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -37,10 +37,15 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
context = instance.context
|
||||
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL")
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = context.data.get("comment", "")
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
|
|
|||
|
|
@ -61,8 +61,8 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
|
|||
"Missing FTrack Task entity in context")
|
||||
|
||||
host = pyblish.api.current_host()
|
||||
to_check = context.data["presets"].get(
|
||||
host, {}).get("ftrack_custom_attributes")
|
||||
to_check = self.ftrack_custom_attributes.get(host, {}))
|
||||
|
||||
if not to_check:
|
||||
self.log.warning("ftrack_attributes preset not found")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -32,9 +32,13 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
from avalon.fusion.lib import get_frame_path
|
||||
|
||||
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
saver_instances = []
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ Provides:
|
|||
import os
|
||||
import json
|
||||
|
||||
from pype.lib import ApplicationManager
|
||||
from avalon import api, lib
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -64,12 +65,12 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
|
|||
"username": context.data["user"]
|
||||
}
|
||||
|
||||
avalon_app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if avalon_app_name:
|
||||
application_def = lib.get_application(avalon_app_name)
|
||||
app_dir = application_def.get("application_dir")
|
||||
if app_dir:
|
||||
context_data["app"] = app_dir
|
||||
app_manager = ApplicationManager()
|
||||
app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if app_name:
|
||||
app = app_manager.applications.get(app_name)
|
||||
if app:
|
||||
context_data["app"] = app.host_name
|
||||
|
||||
datetime_data = context.data.get("datetimeData") or {}
|
||||
context_data.update(datetime_data)
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
"""
|
||||
Requires:
|
||||
config_data -> colorspace.default
|
||||
config_data -> dataflow.default
|
||||
|
||||
Provides:
|
||||
context -> presets
|
||||
"""
|
||||
|
||||
from pyblish import api
|
||||
from pype.api import get_current_project_settings
|
||||
|
||||
|
||||
class CollectPresets(api.ContextPlugin):
|
||||
"""Collect Presets."""
|
||||
|
||||
order = api.CollectorOrder - 0.491
|
||||
label = "Collect Presets"
|
||||
|
||||
def process(self, context):
|
||||
project_settings = get_current_project_settings()
|
||||
context.data["presets"] = project_settings
|
||||
|
||||
return
|
||||
13
pype/plugins/global/publish/collect_settings.py
Normal file
13
pype/plugins/global/publish/collect_settings.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from pyblish import api
|
||||
from pype.api import get_current_project_settings, get_system_settings
|
||||
|
||||
|
||||
class CollectSettings(api.ContextPlugin):
|
||||
"""Collect Settings and store in the context."""
|
||||
|
||||
order = api.CollectorOrder - 0.491
|
||||
label = "Collect Settings"
|
||||
|
||||
def process(self, context):
|
||||
context.data["project_settings"] = get_current_project_settings()
|
||||
context.data["system_settings"] = get_system_settings()
|
||||
|
|
@ -68,8 +68,6 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
raise RuntimeError("Burnin needs already created mov to work on.")
|
||||
|
||||
if self.use_legacy_code(instance):
|
||||
return self.legacy_process(instance)
|
||||
self.main_process(instance)
|
||||
|
||||
# Remove any representations tagged for deletion.
|
||||
|
|
@ -79,12 +77,6 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
self.log.debug("Removing representation: {}".format(repre))
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
def use_legacy_code(self, instance):
|
||||
presets = instance.context.data.get("presets")
|
||||
if presets is None and self.profiles is None:
|
||||
return True
|
||||
return "burnins" in (presets.get("tools") or {})
|
||||
|
||||
def main_process(self, instance):
|
||||
# TODO get these data from context
|
||||
host_name = os.environ["AVALON_APP"]
|
||||
|
|
@ -700,7 +692,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
return filtered_burnin_defs
|
||||
|
||||
def families_filter_validation(self, families, output_families_filter):
|
||||
"""Determines if entered families intersect with families filters.
|
||||
"""Determine if entered families intersect with families filters.
|
||||
|
||||
All family values are lowered to avoid unexpected results.
|
||||
"""
|
||||
|
|
@ -747,7 +739,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
return regexes
|
||||
|
||||
def validate_value_by_regexes(self, value, in_list):
|
||||
"""Validates in any regexe from list match entered value.
|
||||
"""Validate in any regexe from list match entered value.
|
||||
|
||||
Args:
|
||||
in_list (list): List with regexes.
|
||||
|
|
@ -770,14 +762,14 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
return output
|
||||
|
||||
def main_family_from_instance(self, instance):
|
||||
"""Returns main family of entered instance."""
|
||||
"""Return main family of entered instance."""
|
||||
family = instance.data.get("family")
|
||||
if not family:
|
||||
family = instance.data["families"][0]
|
||||
return family
|
||||
|
||||
def families_from_instance(self, instance):
|
||||
"""Returns all families of entered instance."""
|
||||
"""Return all families of entered instance."""
|
||||
families = []
|
||||
family = instance.data.get("family")
|
||||
if family:
|
||||
|
|
@ -789,7 +781,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
return families
|
||||
|
||||
def burnin_script_path(self):
|
||||
"""Returns path to python script for burnin processing."""
|
||||
"""Return path to python script for burnin processing."""
|
||||
# TODO maybe convert to Plugin's attribute
|
||||
# Get script path.
|
||||
module_path = os.environ["PYPE_MODULE_ROOT"]
|
||||
|
|
@ -813,7 +805,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
return scriptpath
|
||||
|
||||
def python_executable_path(self):
|
||||
"""Returns path to Python 3 executable."""
|
||||
"""Return path to Python 3 executable."""
|
||||
# TODO maybe convert to Plugin's attribute
|
||||
# Get executable.
|
||||
executable = os.getenv("PYPE_PYTHON_EXE")
|
||||
|
|
@ -825,211 +817,3 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
|
||||
self.log.debug("executable: {}".format(executable))
|
||||
return executable
|
||||
|
||||
def legacy_process(self, instance):
|
||||
self.log.warning("Legacy burnin presets are used.")
|
||||
|
||||
context_data = instance.context.data
|
||||
|
||||
version = instance.data.get(
|
||||
'version', instance.context.data.get('version'))
|
||||
frame_start = int(instance.data.get("frameStart") or 0)
|
||||
frame_end = int(instance.data.get("frameEnd") or 1)
|
||||
handle_start = instance.data.get("handleStart",
|
||||
context_data.get("handleStart"))
|
||||
handle_end = instance.data.get("handleEnd",
|
||||
context_data.get("handleEnd"))
|
||||
|
||||
frame_start_handle = frame_start - handle_start
|
||||
frame_end_handle = frame_end + handle_end
|
||||
duration = frame_end_handle - frame_start_handle + 1
|
||||
|
||||
prep_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
|
||||
if "slate.farm" in instance.data["families"]:
|
||||
frame_start_handle += 1
|
||||
duration -= 1
|
||||
|
||||
prep_data.update({
|
||||
"frame_start": frame_start_handle,
|
||||
"frame_end": frame_end_handle,
|
||||
"duration": duration,
|
||||
"version": int(version),
|
||||
"comment": instance.context.data.get("comment", "")
|
||||
})
|
||||
|
||||
intent_label = instance.context.data.get("intent")
|
||||
if intent_label and isinstance(intent_label, dict):
|
||||
intent_label = intent_label.get("label")
|
||||
|
||||
if intent_label:
|
||||
prep_data["intent"] = intent_label
|
||||
|
||||
# get anatomy project
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
self.log.debug("__ prep_data: {}".format(prep_data))
|
||||
for i, repre in enumerate(instance.data["representations"]):
|
||||
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
|
||||
|
||||
if instance.data.get("multipartExr") is True:
|
||||
# ffmpeg doesn't support multipart exrs
|
||||
continue
|
||||
|
||||
if "burnin" not in repre.get("tags", []):
|
||||
continue
|
||||
|
||||
is_sequence = "sequence" in repre.get("tags", [])
|
||||
|
||||
# no handles switch from profile tags
|
||||
no_handles = "no-handles" in repre.get("tags", [])
|
||||
|
||||
stagingdir = repre["stagingDir"]
|
||||
filename = "{0}".format(repre["files"])
|
||||
|
||||
if is_sequence:
|
||||
filename = repre["sequence_file"]
|
||||
|
||||
name = "_burnin"
|
||||
ext = os.path.splitext(filename)[1]
|
||||
movieFileBurnin = filename.replace(ext, "") + name + ext
|
||||
|
||||
if is_sequence:
|
||||
fn_splt = filename.split(".")
|
||||
movieFileBurnin = ".".join(
|
||||
((fn_splt[0] + name), fn_splt[-2], fn_splt[-1]))
|
||||
|
||||
self.log.debug("__ movieFileBurnin: `{}`".format(movieFileBurnin))
|
||||
|
||||
full_movie_path = os.path.join(
|
||||
os.path.normpath(stagingdir), filename)
|
||||
full_burnin_path = os.path.join(
|
||||
os.path.normpath(stagingdir), movieFileBurnin)
|
||||
|
||||
self.log.debug("__ full_movie_path: {}".format(full_movie_path))
|
||||
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
|
||||
|
||||
# create copy of prep_data for anatomy formatting
|
||||
_prep_data = copy.deepcopy(prep_data)
|
||||
_prep_data["representation"] = repre["name"]
|
||||
filled_anatomy = anatomy.format_all(_prep_data)
|
||||
_prep_data["anatomy"] = filled_anatomy.get_solved()
|
||||
|
||||
# copy frame range variables
|
||||
frame_start_cp = frame_start_handle
|
||||
frame_end_cp = frame_end_handle
|
||||
duration_cp = duration
|
||||
|
||||
if no_handles:
|
||||
frame_start_cp = frame_start
|
||||
frame_end_cp = frame_end
|
||||
duration_cp = frame_end_cp - frame_start_cp + 1
|
||||
_prep_data.update({
|
||||
"frame_start": frame_start_cp,
|
||||
"frame_end": frame_end_cp,
|
||||
"duration": duration_cp,
|
||||
})
|
||||
|
||||
# dealing with slates
|
||||
slate_frame_start = frame_start_cp
|
||||
slate_frame_end = frame_end_cp
|
||||
slate_duration = duration_cp
|
||||
|
||||
# exception for slate workflow
|
||||
if "slate" in instance.data["families"]:
|
||||
if "slate-frame" in repre.get("tags", []):
|
||||
slate_frame_start = frame_start_cp - 1
|
||||
slate_frame_end = frame_end_cp
|
||||
slate_duration = duration_cp + 1
|
||||
|
||||
self.log.debug("__1 slate_frame_start: {}".format(
|
||||
slate_frame_start))
|
||||
|
||||
_prep_data.update({
|
||||
"slate_frame_start": slate_frame_start,
|
||||
"slate_frame_end": slate_frame_end,
|
||||
"slate_duration": slate_duration
|
||||
})
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"codec": repre.get("codec", []),
|
||||
"output": full_burnin_path.replace("\\", "/"),
|
||||
"burnin_data": _prep_data
|
||||
}
|
||||
|
||||
self.log.debug("__ burnin_data2: {}".format(burnin_data))
|
||||
|
||||
json_data = json.dumps(burnin_data)
|
||||
|
||||
# Get script path.
|
||||
module_path = os.environ['PYPE_MODULE_ROOT']
|
||||
|
||||
# There can be multiple paths in PYPE_MODULE_ROOT, in which case
|
||||
# we just take first one.
|
||||
if os.pathsep in module_path:
|
||||
module_path = module_path.split(os.pathsep)[0]
|
||||
|
||||
scriptpath = os.path.normpath(
|
||||
os.path.join(
|
||||
module_path,
|
||||
"pype",
|
||||
"scripts",
|
||||
"otio_burnin.py"
|
||||
)
|
||||
)
|
||||
|
||||
self.log.debug("__ scriptpath: {}".format(scriptpath))
|
||||
|
||||
# Get executable.
|
||||
executable = os.getenv("PYPE_PYTHON_EXE")
|
||||
|
||||
# There can be multiple paths in PYPE_PYTHON_EXE, in which case
|
||||
# we just take first one.
|
||||
if os.pathsep in executable:
|
||||
executable = executable.split(os.pathsep)[0]
|
||||
|
||||
self.log.debug("__ EXE: {}".format(executable))
|
||||
|
||||
args = [executable, scriptpath, json_data]
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
output = pype.api.subprocess(args, shell=True, logger=self.log)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
}
|
||||
|
||||
if is_sequence:
|
||||
burnin_seq_files = list()
|
||||
for frame_index in range(_prep_data["duration"] + 1):
|
||||
if frame_index == 0:
|
||||
continue
|
||||
burnin_seq_files.append(movieFileBurnin % frame_index)
|
||||
repre_update.update({
|
||||
"files": burnin_seq_files
|
||||
})
|
||||
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
|
||||
# removing the source mov file
|
||||
if is_sequence:
|
||||
for frame_index in range(_prep_data["duration"] + 1):
|
||||
if frame_index == 0:
|
||||
continue
|
||||
rm_file = full_movie_path % frame_index
|
||||
os.remove(rm_file)
|
||||
self.log.debug("Removed: `{}`".format(rm_file))
|
||||
else:
|
||||
os.remove(full_movie_path)
|
||||
self.log.debug("Removed: `{}`".format(full_movie_path))
|
||||
|
||||
# Remove any representations tagged for deletion.
|
||||
for repre in instance.data["representations"]:
|
||||
if "delete" in repre.get("tags", []):
|
||||
self.log.debug("Removing representation: {}".format(repre))
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
self.log.debug(instance.data["representations"])
|
||||
|
|
|
|||
|
|
@ -348,6 +348,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
+ 1
|
||||
)
|
||||
|
||||
duration_seconds = float(output_frames_len / temp_data["fps"])
|
||||
|
||||
if temp_data["input_is_sequence"]:
|
||||
# Set start frame of input sequence (just frame in filename)
|
||||
# - definition of input filepath
|
||||
|
|
@ -375,33 +377,39 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
# Change output's duration and start point if should not contain
|
||||
# handles
|
||||
start_sec = 0
|
||||
if temp_data["without_handles"] and temp_data["handles_are_set"]:
|
||||
# Set start time without handles
|
||||
# - check if handle_start is bigger than 0 to avoid zero division
|
||||
if temp_data["handle_start"] > 0:
|
||||
start_sec = float(temp_data["handle_start"]) / temp_data["fps"]
|
||||
ffmpeg_input_args.append("-ss {:0.2f}".format(start_sec))
|
||||
ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec))
|
||||
|
||||
# Set output duration inn seconds
|
||||
duration_sec = float(output_frames_len / temp_data["fps"])
|
||||
ffmpeg_output_args.append("-t {:0.2f}".format(duration_sec))
|
||||
ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds))
|
||||
|
||||
# Set frame range of output when input or output is sequence
|
||||
elif temp_data["input_is_sequence"] or temp_data["output_is_sequence"]:
|
||||
elif temp_data["output_is_sequence"]:
|
||||
ffmpeg_output_args.append("-frames:v {}".format(output_frames_len))
|
||||
|
||||
# Add duration of an input sequence if output is video
|
||||
if (
|
||||
temp_data["input_is_sequence"]
|
||||
and not temp_data["output_is_sequence"]
|
||||
):
|
||||
ffmpeg_input_args.append("-to {:0.10f}".format(
|
||||
duration_seconds + start_sec
|
||||
))
|
||||
|
||||
# Add video/image input path
|
||||
ffmpeg_input_args.append(
|
||||
"-i \"{}\"".format(temp_data["full_input_path"])
|
||||
)
|
||||
|
||||
# Use shortest input
|
||||
ffmpeg_output_args.append("-shortest")
|
||||
|
||||
# Add audio arguments if there are any. Skipped when output are images.
|
||||
if not temp_data["output_ext_is_image"] and temp_data["with_audio"]:
|
||||
audio_in_args, audio_filters, audio_out_args = self.audio_args(
|
||||
instance, temp_data
|
||||
instance, temp_data, duration_seconds
|
||||
)
|
||||
ffmpeg_input_args.extend(audio_in_args)
|
||||
ffmpeg_audio_filters.extend(audio_filters)
|
||||
|
|
@ -616,7 +624,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Input path {}".format(full_input_path))
|
||||
self.log.debug("Output path {}".format(full_output_path))
|
||||
|
||||
def audio_args(self, instance, temp_data):
|
||||
def audio_args(self, instance, temp_data, duration_seconds):
|
||||
"""Prepares FFMpeg arguments for audio inputs."""
|
||||
audio_in_args = []
|
||||
audio_filters = []
|
||||
|
|
@ -639,11 +647,19 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
audio_in_args.append(
|
||||
"-ss {}".format(offset_seconds)
|
||||
)
|
||||
|
||||
elif offset_seconds < 0:
|
||||
audio_in_args.append(
|
||||
"-itsoffset {}".format(abs(offset_seconds))
|
||||
)
|
||||
|
||||
# Audio duration is offset from `-ss`
|
||||
audio_duration = duration_seconds + offset_seconds
|
||||
|
||||
# Set audio duration
|
||||
audio_in_args.append("-to {:0.10f}".format(audio_duration))
|
||||
|
||||
# Add audio input path
|
||||
audio_in_args.append("-i \"{}\"".format(audio["filename"]))
|
||||
|
||||
# NOTE: These were changed from input to output arguments.
|
||||
|
|
|
|||
|
|
@ -305,7 +305,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("Submitting Deadline job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.DEADLINE_REST_URL)
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
@ -924,10 +924,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
}
|
||||
|
||||
if submission_type == "deadline":
|
||||
self.DEADLINE_REST_URL = os.environ.get(
|
||||
"DEADLINE_REST_URL", "http://localhost:8082"
|
||||
self.deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert self.DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
assert self.deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class ExtractPalette(pype.api.Extractor):
|
|||
os.path.basename(palette_file)
|
||||
.split(".plt")[0] + "_swatches.png"
|
||||
)
|
||||
self.log.info(f"Temporary humbnail path {tmp_thumb_path}")
|
||||
self.log.info(f"Temporary thumbnail path {tmp_thumb_path}")
|
||||
|
||||
palette_version = str(instance.data.get("version")).zfill(3)
|
||||
|
||||
|
|
@ -52,6 +52,11 @@ class ExtractPalette(pype.api.Extractor):
|
|||
palette_version,
|
||||
palette_file,
|
||||
tmp_thumb_path)
|
||||
except OSError as e:
|
||||
# FIXME: this happens on Mac where PIL cannot access fonts
|
||||
# for some reason.
|
||||
self.log.warning("Thumbnail generation failed")
|
||||
self.log.warning(e)
|
||||
except ValueError:
|
||||
self.log.error("Unsupported palette type for thumbnail.")
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,11 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
for backdrop in self.get_backdrops(dependency):
|
||||
backdrops[backdrop["title"]["text"]] = backdrop
|
||||
unique_backdrops = [backdrops[x] for x in set(backdrops.keys())]
|
||||
|
||||
if not unique_backdrops:
|
||||
self.log.error(("No backdrops detected for template. "
|
||||
"Please move template instance node onto "
|
||||
"some backdrop and try again."))
|
||||
raise AssertionError("No backdrop detected")
|
||||
# Get non-connected nodes within backdrops.
|
||||
all_nodes = instance.context.data.get("allNodes")
|
||||
for node in [x for x in all_nodes if x not in dependencies]:
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
import re
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, asset_name, subset_name):
|
||||
"""
|
||||
Gets all layer names and if 'name' is present in them, increases
|
||||
suffix by 1 (eg. creates unique layer name - for Loader)
|
||||
Args:
|
||||
layers (list): of namedtuples, expects 'name' field present
|
||||
asset_name (string): in format asset_subset (Hero)
|
||||
subset_name (string): (LOD)
|
||||
|
||||
Returns:
|
||||
(string): name_00X (without version)
|
||||
"""
|
||||
name = "{}_{}".format(asset_name, subset_name)
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer.name)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
|
@ -9,6 +9,7 @@ from maya import cmds
|
|||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
from pype.hosts.maya import lib
|
||||
from pype.api import get_system_settings
|
||||
import avalon.maya
|
||||
|
||||
|
||||
|
|
@ -124,8 +125,11 @@ class CreateRender(avalon.maya.Creator):
|
|||
# get pools
|
||||
pools = []
|
||||
|
||||
deadline_url = os.environ.get("DEADLINE_REST_URL", None)
|
||||
muster_url = os.environ.get("MUSTER_REST_URL", None)
|
||||
system_settings = get_system_settings()["modules"]
|
||||
|
||||
deadline_url = system_settings["deadline"]["DEADLINE_REST_URL"]
|
||||
muster_url = system_settings["muster"]["MUSTER_REST_URL"]
|
||||
|
||||
if deadline_url and muster_url:
|
||||
self.log.error(
|
||||
"Both Deadline and Muster are enabled. " "Cannot support both."
|
||||
|
|
@ -198,7 +202,7 @@ class CreateRender(avalon.maya.Creator):
|
|||
"""Load Muster credentials.
|
||||
|
||||
Load Muster credentials from file and set ``MUSTER_USER``,
|
||||
``MUSTER_PASSWORD``, ``MUSTER_REST_URL`` is loaded from presets.
|
||||
``MUSTER_PASSWORD``, ``MUSTER_REST_URL`` is loaded from settings.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If loaded credentials are invalid.
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class CreateRenderSetup(avalon.maya.Creator):
|
|||
super(CreateRenderSetup, self).__init__(*args, **kwargs)
|
||||
|
||||
# here we can pre-create renderSetup layers, possibly utlizing
|
||||
# presets for it.
|
||||
# settings for it.
|
||||
|
||||
# _____
|
||||
# / __\__
|
||||
|
|
|
|||
|
|
@ -102,10 +102,11 @@ class ExtractCameraMayaScene(pype.api.Extractor):
|
|||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# get settings
|
||||
ext_mapping = (instance.context.data["presets"]["maya"]
|
||||
.get("ext_mapping")) # noqa: E501
|
||||
ext_mapping = (
|
||||
instance.context.data["project_settings"]["maya"]["ext_mapping"]
|
||||
)
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
self.log.info("Looking in settings for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -24,9 +24,11 @@ class ExtractMayaSceneRaw(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
ext_mapping = (
|
||||
instance.context.data["project_settings"]["maya"]["ext_mapping"]
|
||||
)
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
self.log.info("Looking in settings for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -31,9 +31,11 @@ class ExtractModel(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
ext_mapping = (
|
||||
instance.context.data["project_settings"]["maya"]["ext_mapping"]
|
||||
)
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
self.log.info("Looking in settings for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -43,7 +43,9 @@ class ExtractPlayblast(pype.api.Extractor):
|
|||
|
||||
# get cameras
|
||||
camera = instance.data['review_camera']
|
||||
capture_preset = instance.context.data['presets']['maya']['capture']
|
||||
capture_preset = (
|
||||
instance.context.data['project_settings']['maya']['capture']
|
||||
)
|
||||
|
||||
try:
|
||||
preset = lib.load_capture_preset(data=capture_preset)
|
||||
|
|
|
|||
|
|
@ -18,9 +18,11 @@ class ExtractRig(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
ext_mapping = (
|
||||
instance.context.data["project_settings"]["maya"]["ext_mapping"]
|
||||
)
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
self.log.info("Looking in settings for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,10 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
camera = instance.data['review_camera']
|
||||
|
||||
capture_preset = ""
|
||||
capture_preset = instance.context.data['presets']['maya']['capture']
|
||||
capture_preset = (
|
||||
instance.context.data["project_settings"]['maya']['capture']
|
||||
)
|
||||
|
||||
try:
|
||||
preset = lib.load_capture_preset(data=capture_preset)
|
||||
except:
|
||||
|
|
|
|||
|
|
@ -101,9 +101,11 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
ext_mapping = (
|
||||
instance.context.data["project_settings"]["maya"]["ext_mapping"]
|
||||
)
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
self.log.info("Looking in settings for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -238,11 +238,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit available render layers to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable ``DEADLINE_REST_URL``.
|
||||
|
||||
Note:
|
||||
If Deadline configuration is not detected, this plugin will
|
||||
be disabled.
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
Attributes:
|
||||
use_published (bool): Use published scene to render instead of the
|
||||
|
|
@ -254,11 +250,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
if not os.environ.get("DEADLINE_REST_URL"):
|
||||
optional = False
|
||||
active = False
|
||||
else:
|
||||
optional = True
|
||||
|
||||
use_published = True
|
||||
tile_assembler_plugin = "PypeTileAssembler"
|
||||
|
|
@ -267,9 +258,16 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
context = instance.context
|
||||
|
||||
self._instance = instance
|
||||
self._deadline_url = os.environ.get(
|
||||
"DEADLINE_REST_URL", "http://localhost:8082")
|
||||
self._deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
|
||||
assert self._deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
context = instance.context
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
def _load_credentials(self):
|
||||
"""
|
||||
Load Muster credentials from file and set `MUSTER_USER`,
|
||||
`MUSTER_PASSWORD`, `MUSTER_REST_URL` is loaded from presets.
|
||||
`MUSTER_PASSWORD`, `MUSTER_REST_URL` is loaded from settings.
|
||||
|
||||
.. todo::
|
||||
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
actions = [pype.api.RepairContextAction]
|
||||
optional = True
|
||||
|
||||
attributes = None
|
||||
|
||||
def process(self, context):
|
||||
# Check for preset existence.
|
||||
if not context.data["presets"]["maya"].get("attributes"):
|
||||
|
||||
if not self.attributes:
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(context, compute=True)
|
||||
|
|
@ -43,7 +46,6 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
|
||||
@classmethod
|
||||
def get_invalid_attributes(cls, context):
|
||||
presets = context.data["presets"]["maya"]["attributes"]
|
||||
invalid_attributes = []
|
||||
for instance in context:
|
||||
# Filter publisable instances.
|
||||
|
|
@ -53,23 +55,23 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
# Filter families.
|
||||
families = [instance.data["family"]]
|
||||
families += instance.data.get("families", [])
|
||||
families = list(set(families) & set(presets.keys()))
|
||||
families = list(set(families) & set(self.attributes.keys()))
|
||||
if not families:
|
||||
continue
|
||||
|
||||
# Get all attributes to validate.
|
||||
attributes = {}
|
||||
for family in families:
|
||||
for preset in presets[family]:
|
||||
for preset in self.attributes[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
try:
|
||||
attributes[node_name].update(
|
||||
{attribute_name: presets[family][preset]}
|
||||
{attribute_name: self.attributes[family][preset]}
|
||||
)
|
||||
except KeyError:
|
||||
attributes.update({
|
||||
node_name: {
|
||||
attribute_name: presets[family][preset]
|
||||
attribute_name: self.attributes[family][preset]
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -12,8 +12,6 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
if not os.environ.get("DEADLINE_REST_URL"):
|
||||
active = False
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
@ -21,14 +19,15 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
|||
if not contextplugin_should_run(self, context):
|
||||
return
|
||||
|
||||
try:
|
||||
DEADLINE_REST_URL = os.environ["DEADLINE_REST_URL"]
|
||||
except KeyError:
|
||||
self.log.error("Deadline REST API url not found.")
|
||||
raise ValueError("Deadline REST API url not found.")
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
|
||||
# Check response
|
||||
response = self._requests_get(DEADLINE_REST_URL)
|
||||
response = self._requests_get(deadline_url)
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin):
|
|||
def _load_credentials(self):
|
||||
"""
|
||||
Load Muster credentials from file and set `MUSTER_USER`,
|
||||
`MUSTER_PASSWORD`, `MUSTER_REST_URL` is loaded from presets.
|
||||
`MUSTER_PASSWORD`, `MUSTER_REST_URL` is loaded from settings.
|
||||
|
||||
.. todo::
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit write to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -34,11 +34,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
node = instance[0]
|
||||
context = instance.context
|
||||
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
deadline_url = (
|
||||
context.data["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
["DEADLINE_REST_URL"]
|
||||
)
|
||||
assert deadline_url, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = context.data.get("comment", "")
|
||||
self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
|
||||
self._deadline_user = context.data.get(
|
||||
|
|
|
|||
|
|
@ -9,8 +9,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
|
|||
|
||||
Knobs to validate and their values comes from the
|
||||
|
||||
Example for presets in config:
|
||||
"presets/plugins/nuke/publish.json" preset, which needs this structure:
|
||||
Controled by plugin settings that require json in following structure:
|
||||
"ValidateKnobs": {
|
||||
"enabled": true,
|
||||
"knobs": {
|
||||
|
|
@ -28,20 +27,6 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
|
|||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
nuke_presets = context.data["presets"].get("nuke")
|
||||
|
||||
if not nuke_presets:
|
||||
return
|
||||
|
||||
publish_presets = nuke_presets.get("publish")
|
||||
|
||||
if not publish_presets:
|
||||
return
|
||||
|
||||
plugin_preset = publish_presets.get("ValidateKnobs")
|
||||
|
||||
if not plugin_preset:
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(context, compute=True)
|
||||
if invalid:
|
||||
|
|
@ -60,8 +45,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
|
|||
@classmethod
|
||||
def get_invalid_knobs(cls, context):
|
||||
invalid_knobs = []
|
||||
publish_presets = context.data["presets"]["nuke"]["publish"]
|
||||
knobs_preset = publish_presets["ValidateKnobs"]["knobs"]
|
||||
|
||||
for instance in context:
|
||||
# Filter publisable instances.
|
||||
if not instance.data["publish"]:
|
||||
|
|
@ -70,15 +54,15 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
|
|||
# Filter families.
|
||||
families = [instance.data["family"]]
|
||||
families += instance.data.get("families", [])
|
||||
families = list(set(families) & set(knobs_preset.keys()))
|
||||
families = list(set(families) & set(self.knobs.keys()))
|
||||
if not families:
|
||||
continue
|
||||
|
||||
# Get all knobs to validate.
|
||||
knobs = {}
|
||||
for family in families:
|
||||
for preset in knobs_preset[family]:
|
||||
knobs.update({preset: knobs_preset[family][preset]})
|
||||
for preset in self.knobs[family]:
|
||||
knobs.update({preset: self.knobs[family][preset]})
|
||||
|
||||
# Get invalid knobs.
|
||||
nodes = []
|
||||
|
|
|
|||
|
|
@ -104,6 +104,7 @@ class LoadImage(pipeline.Loader):
|
|||
|
||||
def _remove_layers(self, layer_ids, layers=None):
|
||||
if not layer_ids:
|
||||
self.log.warning("Got empty layer ids list.")
|
||||
return
|
||||
|
||||
if layers is None:
|
||||
|
|
@ -117,6 +118,7 @@ class LoadImage(pipeline.Loader):
|
|||
layer_ids_to_remove.append(layer_id)
|
||||
|
||||
if not layer_ids_to_remove:
|
||||
self.log.warning("No layers to delete.")
|
||||
return
|
||||
|
||||
george_script_lines = []
|
||||
|
|
@ -128,12 +130,14 @@ class LoadImage(pipeline.Loader):
|
|||
|
||||
def remove(self, container):
|
||||
layer_ids = self.layer_ids_from_container(container)
|
||||
self.log.warning("Layers to delete {}".format(layer_ids))
|
||||
self._remove_layers(layer_ids)
|
||||
|
||||
current_containers = pipeline.ls()
|
||||
pop_idx = None
|
||||
for idx, cur_con in enumerate(current_containers):
|
||||
if cur_con["objectName"] == container["objectName"]:
|
||||
cur_con_layer_ids = self.layer_ids_from_container(cur_con)
|
||||
if cur_con_layer_ids == layer_ids:
|
||||
pop_idx = idx
|
||||
break
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon.tvpaint import pipeline, lib
|
||||
|
||||
|
||||
|
|
@ -10,26 +12,64 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
hosts = ["tvpaint"]
|
||||
|
||||
def process(self, context):
|
||||
current_project_id = lib.execute_george("tv_projectcurrentid")
|
||||
lib.execute_george("tv_projectselect {}".format(current_project_id))
|
||||
|
||||
# Collect and store current context to have reference
|
||||
current_context = {
|
||||
"project": avalon.api.Session["AVALON_PROJECT"],
|
||||
"asset": avalon.api.Session["AVALON_ASSET"],
|
||||
"task": avalon.api.Session["AVALON_TASK"]
|
||||
}
|
||||
context.data["previous_context"] = current_context
|
||||
self.log.debug("Current context is: {}".format(current_context))
|
||||
|
||||
# Collect context from workfile metadata
|
||||
self.log.info("Collecting workfile context")
|
||||
workfile_context = pipeline.get_current_workfile_context()
|
||||
if workfile_context:
|
||||
# Change current context with context from workfile
|
||||
key_map = (
|
||||
("AVALON_ASSET", "asset"),
|
||||
("AVALON_TASK", "task")
|
||||
)
|
||||
for env_key, key in key_map:
|
||||
avalon.api.Session[env_key] = workfile_context[key]
|
||||
os.environ[env_key] = workfile_context[key]
|
||||
else:
|
||||
# Handle older workfiles or workfiles without metadata
|
||||
self.log.warning(
|
||||
"Workfile does not contain information about context."
|
||||
" Using current Session context."
|
||||
)
|
||||
workfile_context = current_context.copy()
|
||||
|
||||
context.data["workfile_context"] = workfile_context
|
||||
self.log.info("Context changed to: {}".format(workfile_context))
|
||||
|
||||
# Collect instances
|
||||
self.log.info("Collecting instance data from workfile")
|
||||
instance_data = pipeline.list_instances()
|
||||
context.data["workfileInstances"] = instance_data
|
||||
self.log.debug(
|
||||
"Instance data:\"{}".format(json.dumps(instance_data, indent=4))
|
||||
)
|
||||
context.data["workfileInstances"] = instance_data
|
||||
|
||||
# Collect information about layers
|
||||
self.log.info("Collecting layers data from workfile")
|
||||
layers_data = lib.layers_data()
|
||||
context.data["layersData"] = layers_data
|
||||
self.log.debug(
|
||||
"Layers data:\"{}".format(json.dumps(layers_data, indent=4))
|
||||
)
|
||||
context.data["layersData"] = layers_data
|
||||
|
||||
# Collect information about groups
|
||||
self.log.info("Collecting groups data from workfile")
|
||||
group_data = lib.groups_data()
|
||||
context.data["groupsData"] = group_data
|
||||
self.log.debug(
|
||||
"Group data:\"{}".format(json.dumps(group_data, indent=4))
|
||||
)
|
||||
context.data["groupsData"] = group_data
|
||||
|
||||
self.log.info("Collecting scene data from workfile")
|
||||
workfile_info_parts = lib.execute_george("tv_projectinfo").split(" ")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateWorkfileProjectName(pyblish.api.ContextPlugin):
|
||||
"""Validate project name stored in workfile metadata.
|
||||
|
||||
It is not possible to publish from different project than is set in
|
||||
environment variable "AVALON_PROJECT".
|
||||
"""
|
||||
|
||||
label = "Validate Workfile Project Name"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, context):
|
||||
workfile_context = context.data["workfile_context"]
|
||||
workfile_project_name = workfile_context["project"]
|
||||
env_project_name = os.environ["AVALON_PROJECT"]
|
||||
if workfile_project_name == env_project_name:
|
||||
self.log.info((
|
||||
"Both workfile project and environment project are same. {}"
|
||||
).format(env_project_name))
|
||||
return
|
||||
|
||||
# Raise an error
|
||||
raise AssertionError((
|
||||
# Short message
|
||||
"Workfile from different Project ({})."
|
||||
# Description what's wrong
|
||||
" It is not possible to publish when TVPaint was launched in"
|
||||
"context of different project. Current context project is \"{}\"."
|
||||
" Launch TVPaint in context of project \"{}\" and then publish."
|
||||
).format(
|
||||
workfile_project_name,
|
||||
env_project_name,
|
||||
workfile_project_name,
|
||||
))
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
{
|
||||
"PYPE_STUDIO_NAME": "Studio Name",
|
||||
"PYPE_STUDIO_CODE": "stu",
|
||||
"PYPE_APP_ROOT": "{PYPE_SETUP_PATH}/pypeapp",
|
||||
"PYPE_MODULE_ROOT": "{PYPE_SETUP_PATH}/repos/pype",
|
||||
"PYPE_PROJECT_PLUGINS": "",
|
||||
"STUDIO_SOFT": "{PYPE_SETUP_ROOT}/soft",
|
||||
"FFMPEG_PATH": {
|
||||
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/windows/bin",
|
||||
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/darwin/bin",
|
||||
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/linux"
|
||||
},
|
||||
"PATH": [
|
||||
"{PYPE_CONFIG}/launchers",
|
||||
"{PYPE_APP_ROOT}",
|
||||
"{FFMPEG_PATH}",
|
||||
"{PATH}"
|
||||
],
|
||||
"PYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs",
|
||||
"PYTHONPATH": {
|
||||
"windows": "{VIRTUAL_ENV}/Lib/site-packages;{PYPE_MODULE_ROOT}/pype/tools;{PYTHONPATH}",
|
||||
"linux": "{VIRTUAL_ENV}/lib/python{PYTHON_VERSION}/site-packages:{PYPE_MODULE_ROOT}/pype/tools:{PYTHONPATH}",
|
||||
"darwin": "{VIRTUAL_ENV}/lib/python{PYTHON_VERSION}/site-packages:{PYPE_MODULE_ROOT}/pype/tools:{PYTHONPATH}"
|
||||
},
|
||||
"PYPE_PROJECT_CONFIGS": "{PYPE_SETUP_PATH}/../studio-project-configs",
|
||||
"PYPE_PYTHON_EXE": {
|
||||
"windows": "{VIRTUAL_ENV}/Scripts/python.exe",
|
||||
"linux": "{VIRTUAL_ENV}/Scripts/python",
|
||||
"darwin": "{VIRTUAL_ENV}/bin/python"
|
||||
},
|
||||
"PYBLISH_GUI": "pyblish_pype"
|
||||
}
|
||||
|
|
@ -93,6 +93,10 @@
|
|||
"enabled": true,
|
||||
"note_with_intent_template": "",
|
||||
"note_labels": []
|
||||
},
|
||||
"ValidateFtrackAttributes": {
|
||||
"enabled": false,
|
||||
"ftrack_custom_attributes": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -94,8 +94,8 @@
|
|||
"deadline_department": "",
|
||||
"deadline_pool": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": "",
|
||||
"deadline_priority": "",
|
||||
"deadline_chunk_size": 1,
|
||||
"deadline_priority": 50,
|
||||
"aov_filter": {
|
||||
"maya": [
|
||||
".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*"
|
||||
|
|
@ -111,7 +111,7 @@
|
|||
}
|
||||
},
|
||||
"tools": {
|
||||
"Creator": {
|
||||
"creator": {
|
||||
"families_smart_select": {
|
||||
"Render": [
|
||||
"light",
|
||||
|
|
@ -179,4 +179,4 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"maya_capture": {
|
||||
"capture": {
|
||||
"Codec": {
|
||||
"compression": "jpg",
|
||||
"format": "image",
|
||||
|
|
@ -107,9 +107,10 @@
|
|||
"overscan": 1.0
|
||||
}
|
||||
},
|
||||
"ext_mapping": {},
|
||||
"publish": {
|
||||
"CollectMayaRender": {
|
||||
"sync_workfile_version": true
|
||||
"sync_workfile_version": false
|
||||
},
|
||||
"ValidateCameraAttributes": {
|
||||
"enabled": true,
|
||||
|
|
@ -134,6 +135,9 @@
|
|||
"ValidateMeshHasOverlappingUVs": {
|
||||
"enabled": false
|
||||
},
|
||||
"ValidateAttributes": {
|
||||
"enabled": false
|
||||
},
|
||||
"ExtractCameraAlembic": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
|
|
@ -316,4 +320,4 @@
|
|||
"ValidateNoAnimation": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -35,7 +35,7 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"ValidateNukeWriteKnobs": {
|
||||
"ValidateKnobs": {
|
||||
"enabled": true,
|
||||
"knobs": {
|
||||
"render": {
|
||||
|
|
@ -87,4 +87,4 @@
|
|||
]
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -123,4 +123,4 @@
|
|||
"help": "Script exported from matchmoving application"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -18,8 +18,8 @@
|
|||
]
|
||||
},
|
||||
"PYTHONPATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/maya",
|
||||
"{PYPE_SETUP_PATH}/repos/maya-look-assigner",
|
||||
"{PYPE_MODULE_ROOT}/repos/avalon-core/setup/maya",
|
||||
"{PYPE_MODULE_ROOT}/repos/maya-look-assigner",
|
||||
"{PYTHON_ENV}/python2/Lib/site-packages",
|
||||
"{PYTHONPATH}"
|
||||
],
|
||||
|
|
@ -140,8 +140,8 @@
|
|||
]
|
||||
},
|
||||
"PYTHONPATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/maya",
|
||||
"{PYPE_SETUP_PATH}/repos/maya-look-assigner",
|
||||
"{PYPE_MODULE_ROOT}/repos/avalon-core/setup/maya",
|
||||
"{PYPE_MODULE_ROOT}/repos/maya-look-assigner",
|
||||
"{PYTHON_ENV}/python2/Lib/site-packages",
|
||||
"{PYTHONPATH}"
|
||||
],
|
||||
|
|
@ -241,7 +241,7 @@
|
|||
]
|
||||
},
|
||||
"NUKE_PATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_MODULE_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_MODULE_ROOT}/setup/nuke/nuke_path",
|
||||
"{PYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
|
|
@ -364,7 +364,7 @@
|
|||
]
|
||||
},
|
||||
"NUKE_PATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_MODULE_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_MODULE_ROOT}/setup/nuke/nuke_path",
|
||||
"{PYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
|
|
@ -917,9 +917,9 @@
|
|||
"CREATE_NEW_CONSOLE"
|
||||
]
|
||||
},
|
||||
"BLENDER_USER_SCRIPTS": "{PYPE_SETUP_PATH}/repos/avalon-core/setup/blender",
|
||||
"BLENDER_USER_SCRIPTS": "{PYPE_MODULE_ROOT}/repos/avalon-core/setup/blender",
|
||||
"PYTHONPATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/blender",
|
||||
"{PYPE_MODULE_ROOT}/repos/avalon-core/setup/blender",
|
||||
"{PYTHONPATH}"
|
||||
],
|
||||
"CREATE_NEW_CONSOLE": "yes"
|
||||
|
|
@ -1277,7 +1277,7 @@
|
|||
"QT_PREFERRED_BINDING"
|
||||
]
|
||||
},
|
||||
"AVALON_UNREAL_PLUGIN": "{PYPE_SETUP_PATH}/repos/avalon-unreal-integration",
|
||||
"AVALON_UNREAL_PLUGIN": "{PYPE_MODULE_ROOT}/repos/avalon-unreal-integration",
|
||||
"PYPE_LOG_NO_COLORS": "True",
|
||||
"QT_PREFERRED_BINDING": "PySide"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@
|
|||
"PYPE_PROJECT_CONFIGS",
|
||||
"PYPE_PYTHON_EXE",
|
||||
"PYPE_OCIO_CONFIG",
|
||||
"PYBLISH_GUI"
|
||||
"PYBLISH_GUI",
|
||||
"PYBLISHPLUGINPATH"
|
||||
]
|
||||
},
|
||||
"FFMPEG_PATH": {
|
||||
|
|
@ -45,6 +46,9 @@
|
|||
"darwin": "{VIRTUAL_ENV}/bin/python"
|
||||
},
|
||||
"PYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs",
|
||||
"PYBLISH_GUI": "pyblish_pype"
|
||||
"PYBLISH_GUI": "pyblish_pype",
|
||||
"PYBLISHPLUGINPATH": [
|
||||
"{PYPE_MODULE_ROOT}/pype/plugins/ftrack/publish"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -43,8 +43,8 @@
|
|||
"ftrack_server": "https://pype.ftrackapp.com",
|
||||
"ftrack_actions_path": [],
|
||||
"ftrack_events_path": [],
|
||||
"FTRACK_EVENTS_MONGO_DB": "pype",
|
||||
"FTRACK_EVENTS_MONGO_COL": "ftrack_events",
|
||||
"mongo_database_name": "pype",
|
||||
"mongo_collection_name": "ftrack_events",
|
||||
"intent": {
|
||||
"items": {
|
||||
"-": "-",
|
||||
|
|
@ -131,29 +131,6 @@
|
|||
"read_security_role": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"environment": {
|
||||
"__environment_keys__": {
|
||||
"ftrack": [
|
||||
"FTRACK_ACTIONS_PATH",
|
||||
"FTRACK_EVENTS_PATH",
|
||||
"PYBLISHPLUGINPATH",
|
||||
"PYTHONPATH"
|
||||
]
|
||||
},
|
||||
"FTRACK_ACTIONS_PATH": [
|
||||
"{PYPE_MODULE_ROOT}/pype/modules/ftrack/actions"
|
||||
],
|
||||
"FTRACK_EVENTS_PATH": [
|
||||
"{PYPE_MODULE_ROOT}/pype/modules/ftrack/events"
|
||||
],
|
||||
"PYBLISHPLUGINPATH": [
|
||||
"{PYPE_MODULE_ROOT}/pype/plugins/ftrack/publish"
|
||||
],
|
||||
"PYTHONPATH": [
|
||||
"{PYPE_MODULE_ROOT}/pype/vendor",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
}
|
||||
},
|
||||
"Rest Api": {
|
||||
|
|
@ -169,11 +146,11 @@
|
|||
"enabled": false,
|
||||
"workspace_name": "studio name"
|
||||
},
|
||||
"Deadline": {
|
||||
"deadline": {
|
||||
"enabled": true,
|
||||
"DEADLINE_REST_URL": "http://localhost:8082"
|
||||
},
|
||||
"Muster": {
|
||||
"muster": {
|
||||
"enabled": false,
|
||||
"MUSTER_REST_URL": "http://127.0.0.1:9890",
|
||||
"templates_mapping": {
|
||||
|
|
@ -202,4 +179,4 @@
|
|||
"Idle Manager": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,261 +6,310 @@
|
|||
"checkbox_key": "enabled",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Additional Ftrack paths"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "ftrack_actions_path",
|
||||
"label": "Action paths",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "ftrack_events_path",
|
||||
"label": "Event paths",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Additional Ftrack paths"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "ftrack_actions_path",
|
||||
"label": "Action paths",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "ftrack_events_path",
|
||||
"label": "Event paths",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "events",
|
||||
"label": "Server Events",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "events",
|
||||
"label": "Server Events",
|
||||
"key": "sync_to_avalon",
|
||||
"label": "Sync to avalon",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "sync_to_avalon",
|
||||
"label": "Sync to avalon",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Allow name and hierarchy change only if following statuses are on all children tasks"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "statuses_name_change",
|
||||
"label": "Statuses",
|
||||
"object_type": {
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "push_frame_values_to_task",
|
||||
"label": "Sync Hierarchical and Entity Attributes",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}, {
|
||||
"type": "list",
|
||||
"key": "interest_entity_types",
|
||||
"label": "Entity types of interest",
|
||||
"object_type": {
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
}, {
|
||||
"type": "list",
|
||||
"key": "interest_attributess",
|
||||
"label": "Attributes to sync",
|
||||
"object_type": {
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "thumbnail_updates",
|
||||
"label": "Update Hierarchy thumbnails",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},{
|
||||
"type": "label",
|
||||
"label": "Push thumbnail from version, up through multiple hierarchy levels."
|
||||
},{
|
||||
"type": "number",
|
||||
"key": "levels",
|
||||
"label": "Levels"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "user_assignment",
|
||||
"label": "Run script on user assignments",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Allow name and hierarchy change only if following statuses are on all children tasks"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "statuses_name_change",
|
||||
"label": "Statuses",
|
||||
"object_type":
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_update",
|
||||
"label": "Update status on task action",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "mapping",
|
||||
"type": "dict-modifiable",
|
||||
"object_type": {
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_task_to_parent",
|
||||
"label": "Sync status from Task to Parent",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "parent_status_match_all_task_statuses",
|
||||
"type": "dict-modifiable",
|
||||
"label": "Change parent if all tasks match",
|
||||
"object_type": {
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "parent_status_by_task_status",
|
||||
"type": "dict-modifiable",
|
||||
"label": "Change parent status if a single task matches",
|
||||
"object_type": {
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_task_to_version",
|
||||
"label": "Sync status from Task to Version",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}, {
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type": {
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_version_to_task",
|
||||
"label": "Sync status from Version to Task",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}, {
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type": {
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "first_version_status",
|
||||
"label": "Set status on first created version",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},{
|
||||
"type": "text",
|
||||
"key": "status",
|
||||
"label": "Status"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "next_task_update",
|
||||
"label": "Update status on next task",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},{
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type": {
|
||||
"type": "text"
|
||||
}
|
||||
}]
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
]
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"is_file": true,
|
||||
"children": [{
|
||||
"key": "push_frame_values_to_task",
|
||||
"label": "Sync Hierarchical and Entity Attributes",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "interest_entity_types",
|
||||
"label": "Entity types of interest",
|
||||
"object_type":
|
||||
{
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "interest_attributess",
|
||||
"label": "Attributes to sync",
|
||||
"object_type":
|
||||
{
|
||||
"type": "text",
|
||||
"multiline": false
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "thumbnail_updates",
|
||||
"label": "Update Hierarchy thumbnails",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Push thumbnail from version, up through multiple hierarchy levels."
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "levels",
|
||||
"label": "Levels"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "user_assignment",
|
||||
"label": "Run script on user assignments",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_update",
|
||||
"label": "Update status on task action",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "mapping",
|
||||
"type": "dict-modifiable",
|
||||
"object_type":
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_task_to_parent",
|
||||
"label": "Sync status from Task to Parent",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "parent_status_match_all_task_statuses",
|
||||
"type": "dict-modifiable",
|
||||
"label": "Change parent if all tasks match",
|
||||
"object_type":
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "parent_status_by_task_status",
|
||||
"type": "dict-modifiable",
|
||||
"label": "Change parent status if a single task matches",
|
||||
"object_type":
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_task_to_version",
|
||||
"label": "Sync status from Task to Version",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type":
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "status_version_to_task",
|
||||
"label": "Sync status from Version to Task",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type":
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "first_version_status",
|
||||
"label": "Set status on first created version",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "status",
|
||||
"label": "Status"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "next_task_update",
|
||||
"label": "Update status on next task",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "mapping",
|
||||
"object_type":
|
||||
{
|
||||
"type": "text"
|
||||
}
|
||||
}]
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "IntegrateFtrackNote",
|
||||
"label": "IntegrateFtrackNote",
|
||||
"is_group": true,
|
||||
"children": [{
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}, {
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "note_with_intent_template",
|
||||
"label": "Note with intent template"
|
||||
}, {
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"object_type": "text",
|
||||
"key": "note_labels",
|
||||
"label": "Note labels"
|
||||
}]
|
||||
}]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "ValidateFtrackAttributes",
|
||||
"label": "ValidateFtrackAttributes",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "raw-json",
|
||||
"key": "ftrack_custom_attributes",
|
||||
"label": "Custom attributes to validate"
|
||||
}]
|
||||
}
|
||||
|
||||
]
|
||||
}]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,8 +57,8 @@
|
|||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "ValidateNukeWriteKnobs",
|
||||
"label": "ValidateNukeWriteKnobs",
|
||||
"key": "ValidateKnobs",
|
||||
"label": "ValidateKnobs",
|
||||
"is_group": true,
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
|
|
|
|||
|
|
@ -344,12 +344,12 @@
|
|||
"label": "Deadline Group"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"type": "number",
|
||||
"key": "deadline_chunk_size",
|
||||
"label": "Deadline Chunk Size"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"type": "number",
|
||||
"key": "deadline_priority",
|
||||
"label": "Deadline Priotity"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"key": "Creator",
|
||||
"key": "creator",
|
||||
"label": "Creator",
|
||||
"children": [
|
||||
{
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -123,6 +123,25 @@
|
|||
"label": "Enabled"
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"key": "ValidateAttributes",
|
||||
"label": "ValidateAttributes",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "raw-json",
|
||||
"key": "attributes",
|
||||
"label": "Attributes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -43,12 +43,12 @@
|
|||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "FTRACK_EVENTS_MONGO_DB",
|
||||
"key": "mongo_database_name",
|
||||
"label": "Event Mongo DB"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "FTRACK_EVENTS_MONGO_COL",
|
||||
"key": "mongo_collection_name",
|
||||
"label": "Events Mongo Collection"
|
||||
},
|
||||
{
|
||||
|
|
@ -151,14 +151,5 @@
|
|||
}]
|
||||
}
|
||||
}]
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"key": "environment",
|
||||
"label": "Environment",
|
||||
"type": "raw-json",
|
||||
"env_group_key": "ftrack"
|
||||
}]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@
|
|||
]
|
||||
}, {
|
||||
"type": "dict",
|
||||
"key": "Deadline",
|
||||
"key": "deadline",
|
||||
"label": "Deadline",
|
||||
"collapsable": true,
|
||||
"checkbox_key": "enabled",
|
||||
|
|
@ -115,7 +115,7 @@
|
|||
}]
|
||||
}, {
|
||||
"type": "dict",
|
||||
"key": "Muster",
|
||||
"key": "muster",
|
||||
"label": "Muster",
|
||||
"collapsable": true,
|
||||
"checkbox_key": "enabled",
|
||||
|
|
@ -126,7 +126,7 @@
|
|||
}, {
|
||||
"type": "text",
|
||||
"key": "MUSTER_REST_URL",
|
||||
"label": "Muster Resl URL"
|
||||
"label": "Muster Rest URL"
|
||||
}, {
|
||||
"type": "dict-modifiable",
|
||||
"object_type": {
|
||||
|
|
|
|||
|
|
@ -1,13 +1,19 @@
|
|||
/* :root {
|
||||
--border-color-: #464b54;
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
QWidget {
|
||||
color: #bfccd6;
|
||||
background-color: #293742;
|
||||
background-color: #282C34;
|
||||
font-size: 12px;
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
QMenu {
|
||||
border: 1px solid #555555;
|
||||
background-color: #1d272f;
|
||||
background-color: #21252B;
|
||||
}
|
||||
|
||||
QMenu::item {
|
||||
|
|
@ -26,24 +32,24 @@ QCheckBox::indicator {}
|
|||
QCheckBox::indicator:focus {}
|
||||
|
||||
QLineEdit, QSpinBox, QDoubleSpinBox, QPlainTextEdit, QTextEdit {
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #464b54;
|
||||
border-radius: 3px;
|
||||
background-color: #1d272f;
|
||||
background-color: #21252B;
|
||||
}
|
||||
|
||||
QLineEdit:disabled, QSpinBox:disabled, QDoubleSpinBox:disabled, QPlainTextEdit:disabled, QTextEdit:disabled, QPushButton:disabled {
|
||||
background-color: #4e6474;
|
||||
background-color: #464b54;
|
||||
}
|
||||
|
||||
QLineEdit:focus, QSpinBox:focus, QDoubleSpinBox:focus, QPlainTextEdit:focus, QTextEdit:focus {
|
||||
border: 1px solid #ffffff;
|
||||
border: 1px solid #839caf;
|
||||
}
|
||||
|
||||
QComboBox {
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #464b54;
|
||||
border-radius: 3px;
|
||||
padding: 2px 2px 4px 4px;
|
||||
background: #1d272f;
|
||||
background: #21252B;
|
||||
}
|
||||
|
||||
QComboBox QAbstractItemView::item {
|
||||
|
|
@ -56,25 +62,25 @@ QToolButton {
|
|||
|
||||
QLabel {
|
||||
background: transparent;
|
||||
color: #7390a5;
|
||||
color: #969b9e;
|
||||
}
|
||||
QLabel:hover {color: #839caf;}
|
||||
QLabel:hover {color: #b8c1c5;}
|
||||
|
||||
QLabel[state="studio"] {color: #bfccd6;}
|
||||
QLabel[state="studio"] {color: #73C990;}
|
||||
QLabel[state="studio"]:hover {color: #ffffff;}
|
||||
QLabel[state="modified"] {color: #137cbd;}
|
||||
QLabel[state="modified"]:hover {color: #1798e8;}
|
||||
QLabel[state="overriden-modified"] {color: #137cbd;}
|
||||
QLabel[state="overriden-modified"]:hover {color: #1798e8;}
|
||||
QLabel[state="modified"] {color: #189aea;}
|
||||
QLabel[state="modified"]:hover {color: #46b1f3;}
|
||||
QLabel[state="overriden-modified"] {color: #189aea;}
|
||||
QLabel[state="overriden-modified"]:hover {color: #46b1f3;}
|
||||
QLabel[state="overriden"] {color: #ff8c1a;}
|
||||
QLabel[state="overriden"]:hover {color: #ffa64d;}
|
||||
QLabel[state="invalid"] {color: #ad2e2e;}
|
||||
QLabel[state="invalid"]:hover {color: #ad2e2e;}
|
||||
|
||||
|
||||
QWidget[input-state="studio"] {border-color: #bfccd6;}
|
||||
QWidget[input-state="modified"] {border-color: #137cbd;}
|
||||
QWidget[input-state="overriden-modified"] {border-color: #137cbd;}
|
||||
QWidget[input-state="studio"] {border-color: #858a94;}
|
||||
QWidget[input-state="modified"] {border-color: #189aea;}
|
||||
QWidget[input-state="overriden-modified"] {border-color: #189aea;}
|
||||
QWidget[input-state="overriden"] {border-color: #ff8c1a;}
|
||||
QWidget[input-state="invalid"] {border-color: #ad2e2e;}
|
||||
|
||||
|
|
@ -84,7 +90,9 @@ QPushButton {
|
|||
padding: 5px;
|
||||
}
|
||||
QPushButton:hover {
|
||||
background-color: #31424e;
|
||||
background-color: #333840;
|
||||
border: 1px solid #fff;
|
||||
color: #fff;
|
||||
}
|
||||
QPushButton[btn-type="tool-item"] {
|
||||
border: 1px solid #bfccd6;
|
||||
|
|
@ -92,8 +100,8 @@ QPushButton[btn-type="tool-item"] {
|
|||
}
|
||||
|
||||
QPushButton[btn-type="tool-item"]:hover {
|
||||
border-color: #137cbd;
|
||||
color: #137cbd;
|
||||
border-color: #189aea;
|
||||
color: #46b1f3;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
|
|
@ -103,16 +111,16 @@ QPushButton[btn-type="tool-item-icon"] {
|
|||
}
|
||||
|
||||
QPushButton[btn-type="expand-toggle"] {
|
||||
background: #1d272f;
|
||||
background: #21252B;
|
||||
}
|
||||
|
||||
#GroupWidget {
|
||||
border-bottom: 1px solid #1d272f;
|
||||
border-bottom: 1px solid #21252B;
|
||||
}
|
||||
|
||||
#ProjectListWidget QListView {
|
||||
border: 1px solid #aaaaaa;
|
||||
background: #1d272f;
|
||||
border: 1px solid #464b54;
|
||||
background: #21252B;
|
||||
}
|
||||
#ProjectListWidget QLabel {
|
||||
background: transparent;
|
||||
|
|
@ -123,8 +131,8 @@ QPushButton[btn-type="expand-toggle"] {
|
|||
font-size: 12px;
|
||||
}
|
||||
|
||||
#DictKey[state="studio"] {border-color: #bfccd6;}
|
||||
#DictKey[state="modified"] {border-color: #137cbd;}
|
||||
#DictKey[state="studio"] {border-color: #464b54;}
|
||||
#DictKey[state="modified"] {border-color: #189aea;}
|
||||
#DictKey[state="overriden"] {border-color: #00f;}
|
||||
#DictKey[state="overriden-modified"] {border-color: #0f0;}
|
||||
#DictKey[state="invalid"] {border-color: #ad2e2e;}
|
||||
|
|
@ -141,9 +149,9 @@ QPushButton[btn-type="expand-toggle"] {
|
|||
}
|
||||
|
||||
#SideLineWidget {
|
||||
background-color: #31424e;
|
||||
background-color: #333942;
|
||||
border-style: solid;
|
||||
border-color: #3b4f5e;
|
||||
border-color: #4e5254;
|
||||
border-left-width: 3px;
|
||||
border-bottom-width: 0px;
|
||||
border-right-width: 0px;
|
||||
|
|
@ -151,14 +159,14 @@ QPushButton[btn-type="expand-toggle"] {
|
|||
}
|
||||
|
||||
#SideLineWidget:hover {
|
||||
border-color: #58768d;
|
||||
border-color: #7d8386;
|
||||
}
|
||||
|
||||
#SideLineWidget[state="child-studio"] {border-color: #455c6e;}
|
||||
#SideLineWidget[state="child-studio"]:hover {border-color: #62839d;}
|
||||
#SideLineWidget[state="child-studio"] {border-color: #56a06f;}
|
||||
#SideLineWidget[state="child-studio"]:hover {border-color: #73C990;}
|
||||
|
||||
#SideLineWidget[state="child-modified"] {border-color: #106aa2;}
|
||||
#SideLineWidget[state="child-modified"]:hover {border-color: #137cbd;}
|
||||
#SideLineWidget[state="child-modified"]:hover {border-color: #189aea;}
|
||||
|
||||
#SideLineWidget[state="child-invalid"] {border-color: #ad2e2e;}
|
||||
#SideLineWidget[state="child-invalid"]:hover {border-color: #c93636;}
|
||||
|
|
@ -167,7 +175,7 @@ QPushButton[btn-type="expand-toggle"] {
|
|||
#SideLineWidget[state="child-overriden"]:hover {border-color: #ff8c1a;}
|
||||
|
||||
#SideLineWidget[state="child-overriden-modified"] {border-color: #106aa2;}
|
||||
#SideLineWidget[state="child-overriden-modified"]:hover {border-color: #137cbd;}
|
||||
#SideLineWidget[state="child-overriden-modified"]:hover {border-color: #189aea;}
|
||||
|
||||
#MainWidget {
|
||||
background: #141a1f;
|
||||
|
|
@ -177,12 +185,12 @@ QPushButton[btn-type="expand-toggle"] {
|
|||
background: transparent;
|
||||
}
|
||||
#DictAsWidgetBody[show_borders="1"] {
|
||||
border: 2px solid #cccccc;
|
||||
border: 1px solid #4e5254;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#SplitterItem {
|
||||
background-color: #1d272f;
|
||||
background-color: #21252B;
|
||||
}
|
||||
|
||||
QTabWidget::pane {
|
||||
|
|
@ -200,18 +208,18 @@ QTabBar::tab {
|
|||
}
|
||||
|
||||
QTabBar::tab:selected {
|
||||
background: #293742;
|
||||
background: #282C34;
|
||||
border-color: #9B9B9B;
|
||||
border-bottom-color: #C2C7CB;
|
||||
}
|
||||
|
||||
QTabBar::tab:!selected {
|
||||
margin-top: 2px;
|
||||
background: #1d272f;
|
||||
background: #21252B;
|
||||
}
|
||||
|
||||
QTabBar::tab:!selected:hover {
|
||||
background: #3b4f5e;
|
||||
background: #333840;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -231,13 +239,13 @@ QTabBar::tab:only-one {
|
|||
QScrollBar:horizontal {
|
||||
height: 15px;
|
||||
margin: 3px 15px 3px 15px;
|
||||
border: 1px transparent #1d272f;
|
||||
border: 1px transparent #21252B;
|
||||
border-radius: 4px;
|
||||
background-color: #1d272f;
|
||||
background-color: #21252B;
|
||||
}
|
||||
|
||||
QScrollBar::handle:horizontal {
|
||||
background-color: #61839e;
|
||||
background-color: #4B5362;
|
||||
min-width: 5px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
|
@ -285,15 +293,15 @@ QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal {
|
|||
}
|
||||
|
||||
QScrollBar:vertical {
|
||||
background-color: #1d272f;
|
||||
background-color: #21252B;
|
||||
width: 15px;
|
||||
margin: 15px 3px 15px 3px;
|
||||
border: 1px transparent #1d272f;
|
||||
border: 1px transparent #21252B;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
QScrollBar::handle:vertical {
|
||||
background-color: #61839e;
|
||||
background-color: #4B5362;
|
||||
min-height: 5px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -412,7 +412,6 @@ class ProjectListWidget(QtWidgets.QWidget):
|
|||
self.setObjectName("ProjectListWidget")
|
||||
|
||||
label_widget = QtWidgets.QLabel("Projects")
|
||||
label_widget.setProperty("state", "studio")
|
||||
project_list = ProjectListView(self)
|
||||
project_list.setModel(QtGui.QStandardItemModel())
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit e55c3d10a63cb4d660fcb9eb11f7448b86b15443
|
||||
Subproject commit 6b53d813877b283ff534d8f2e998213866e016d6
|
||||
Loading…
Add table
Add a link
Reference in a new issue