Merge 2x/develop + convert config to settings across the board

This commit is contained in:
Milan Kolar 2020-12-04 10:36:24 +01:00
commit 206fab3a91
198 changed files with 8105 additions and 5952 deletions

2
.gitignore vendored
View file

@ -15,6 +15,8 @@ __pycache__/
Icon
# Thumbnails
._*
# rope project dir
.ropeproject
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd

View file

@ -2,7 +2,7 @@ import os
from pyblish import api as pyblish
from avalon import api as avalon
from .api import config, Anatomy
from .api import get_project_settings, Anatomy
from .lib import filter_pyblish_plugins
@ -48,16 +48,19 @@ def patched_discover(superclass):
elif superclass.__name__.split(".")[-1] == "Creator":
plugin_type = "create"
print(">>> trying to find presets for {}:{} ...".format(host, plugin_type))
print(">>> Finding presets for {}:{} ...".format(host, plugin_type))
try:
config_data = config.get_presets()['plugins'][host][plugin_type]
settings = (
get_project_settings(os.environ['AVALON_PROJECT'])
[host][plugin_type]
)
except KeyError:
print("*** no presets found.")
else:
for plugin in plugins:
if plugin.__name__ in config_data:
if plugin.__name__ in settings:
print(">>> We have preset for {}".format(plugin.__name__))
for option, value in config_data[plugin.__name__].items():
for option, value in settings[plugin.__name__].items():
if option == "enabled" and value is False:
setattr(plugin, "active", False)
print(" - is disabled by preset")
@ -104,6 +107,7 @@ def install():
anatomy.set_root_environments()
avalon.register_root(anatomy.roots)
# apply monkey patched discover to original one
log.info("Patching discovery")
avalon.discover = patched_discover

View file

@ -1,7 +1,9 @@
from .settings import (
system_settings,
project_settings,
environments
get_system_settings,
get_project_settings,
get_current_project_settings,
get_anatomy_settings,
get_environments
)
from pypeapp import (
Logger,
@ -50,9 +52,11 @@ from .lib import (
from .lib import _subprocess as subprocess
__all__ = [
"system_settings",
"project_settings",
"environments",
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_anatomy_settings",
"get_environments",
"Logger",
"Anatomy",

View file

@ -0,0 +1,45 @@
import os
from pype.lib import PreLaunchHook
class AfterEffectsPrelaunchHook(PreLaunchHook):
"""Launch arguments preparation.
Hook add python executable and execute python script of AfterEffects
implementation before AfterEffects executable.
"""
app_groups = ["aftereffects"]
def execute(self):
# Pop tvpaint executable
aftereffects_executable = self.launch_context.launch_args.pop(0)
# Pop rest of launch arguments - There should not be other arguments!
remainders = []
while self.launch_context.launch_args:
remainders.append(self.launch_context.launch_args.pop(0))
new_launch_args = [
self.python_executable(),
"-c",
(
"import avalon.aftereffects;"
"avalon.aftereffects.launch(\"{}\")"
).format(aftereffects_executable)
]
# Append as whole list as these areguments should not be separated
self.launch_context.launch_args.append(new_launch_args)
if remainders:
self.log.warning((
"There are unexpected launch arguments "
"in AfterEffects launch. {}"
).format(str(remainders)))
self.launch_context.launch_args.extend(remainders)
def python_executable(self):
"""Should lead to python executable."""
# TODO change in Pype 3
return os.environ["PYPE_PYTHON_EXE"]

View file

@ -0,0 +1,125 @@
import os
import shutil
import winreg
from pype.lib import PreLaunchHook
from pype.hosts import celaction
class CelactionPrelaunchHook(PreLaunchHook):
"""
This hook will check if current workfile path has Unreal
project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Unreal launcher
shell script.
"""
workfile_ext = "scn"
app_groups = ["celaction"]
platforms = ["windows"]
def execute(self):
# Add workfile path to launch arguments
workfile_path = self.workfile_path()
if workfile_path:
self.launch_context.launch_args.append(workfile_path)
project_name = self.data["project_name"]
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
# get publish version of celaction
app = "celaction_publish"
# setting output parameters
path = r"Software\CelAction\CelAction2D\User Settings"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\CelAction\\CelAction2D\\User Settings", 0,
winreg.KEY_ALL_ACCESS)
# TODO: change to root path and pyblish standalone to premiere way
pype_root_path = os.getenv("PYPE_SETUP_PATH")
path = os.path.join(pype_root_path, "pype.bat")
winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, path)
parameters = [
"launch",
f"--app {app}",
f"--project {project_name}",
f"--asset {asset_name}",
f"--task {task_name}",
"--currentFile \\\"\"*SCENE*\"\\\"",
"--chunk 10",
"--frameStart *START*",
"--frameEnd *END*",
"--resolutionWidth *X*",
"--resolutionHeight *Y*",
# "--programDir \"'*PROGPATH*'\""
]
winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
" ".join(parameters))
# setting resolution parameters
path = r"Software\CelAction\CelAction2D\User Settings\Dialogs"
path += r"\SubmitOutput"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920)
winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080)
# making sure message dialogs don't appear when overwriting
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
path += r"\OverwriteScene"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
path = r"Software\CelAction\CelAction2D\User Settings\Messages"
path += r"\SceneSaved"
winreg.CreateKey(winreg.HKEY_CURRENT_USER, path)
hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0,
winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1)
def workfile_path(self):
workfile_path = self.data["last_workfile_path"]
# copy workfile from template if doesnt exist any on path
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
pype_celaction_dir = os.path.dirname(
os.path.abspath(celaction.__file__)
)
template_path = os.path.join(
pype_celaction_dir,
"celaction_template_scene.scn"
)
if not os.path.exists(template_path):
self.log.warning(
"Couldn't find workfile template file in {}".format(
template_path
)
)
return
self.log.info(
f"Creating workfile from template: \"{template_path}\""
)
# Copy template workfile to new destinantion
shutil.copy2(
os.path.normpath(template_path),
os.path.normpath(workfile_path)
)
self.log.info(f"Workfile to open: \"{workfile_path}\"")
return workfile_path

View file

@ -0,0 +1,50 @@
import os
import importlib
from pype.lib import PreLaunchHook
from pype.hosts.fusion import utils
class FusionPrelaunch(PreLaunchHook):
"""
This hook will check if current workfile path has Fusion
project inside.
"""
app_groups = ["fusion"]
def execute(self):
# making sure pyton 3.6 is installed at provided path
py36_dir = os.path.normpath(self.env.get("PYTHON36", ""))
assert os.path.isdir(py36_dir), (
"Python 3.6 is not installed at the provided folder path. Either "
"make sure the `environments\resolve.json` is having correctly "
"set `PYTHON36` or make sure Python 3.6 is installed "
f"in given path. \nPYTHON36E: `{py36_dir}`"
)
self.log.info(f"Path to Fusion Python folder: `{py36_dir}`...")
self.env["PYTHON36"] = py36_dir
# setting utility scripts dir for scripts syncing
us_dir = os.path.normpath(
self.env.get("FUSION_UTILITY_SCRIPTS_DIR", "")
)
assert os.path.isdir(us_dir), (
"Fusion utility script dir does not exists. Either make sure "
"the `environments\fusion.json` is having correctly set "
"`FUSION_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
f"FUSION_UTILITY_SCRIPTS_DIR: `{us_dir}`"
)
try:
__import__("avalon.fusion")
__import__("pyblish")
except ImportError:
self.log.warning(
"pyblish: Could not load Fusion integration.",
exc_info=True
)
else:
# Resolve Setup integration
importlib.reload(utils)
utils.setup(self.env)

View file

@ -0,0 +1,197 @@
import os
import ftrack_api
from pype.api import get_project_settings
from pype.lib import PostLaunchHook
class PostFtrackHook(PostLaunchHook):
order = None
def execute(self):
project_name = self.data.get("project_name")
asset_name = self.data.get("asset_name")
task_name = self.data.get("task_name")
missing_context_keys = set()
if not project_name:
missing_context_keys.add("project_name")
if not asset_name:
missing_context_keys.add("asset_name")
if not task_name:
missing_context_keys.add("task_name")
if missing_context_keys:
missing_keys_str = ", ".join([
"\"{}\"".format(key) for key in missing_context_keys
])
self.log.debug("Hook {} skipped. Missing data keys: {}".format(
self.__class__.__name__, missing_keys_str
))
return
required_keys = ("FTRACK_SERVER", "FTRACK_API_USER", "FTRACK_API_KEY")
for key in required_keys:
if not os.environ.get(key):
self.log.debug((
"Missing required environment \"{}\""
" for Ftrack after launch procedure."
).format(key))
return
try:
session = ftrack_api.Session(auto_connect_event_hub=True)
self.log.debug("Ftrack session created")
except Exception:
self.log.warning("Couldn't create Ftrack session")
return
try:
entity = self.find_ftrack_task_entity(
session, project_name, asset_name, task_name
)
if entity:
self.ftrack_status_change(session, entity, project_name)
self.start_timer(session, entity, ftrack_api)
except Exception:
self.log.warning(
"Couldn't finish Ftrack procedure.", exc_info=True
)
return
finally:
session.close()
def find_ftrack_task_entity(
self, session, project_name, asset_name, task_name
):
project_entity = session.query(
"Project where full_name is \"{}\"".format(project_name)
).first()
if not project_entity:
self.log.warning(
"Couldn't find project \"{}\" in Ftrack.".format(project_name)
)
return
potential_task_entities = session.query((
"TypedContext where parent.name is \"{}\" and project_id is \"{}\""
).format(asset_name, project_entity["id"])).all()
filtered_entities = []
for _entity in potential_task_entities:
if (
_entity.entity_type.lower() == "task"
and _entity["name"] == task_name
):
filtered_entities.append(_entity)
if not filtered_entities:
self.log.warning((
"Couldn't find task \"{}\" under parent \"{}\" in Ftrack."
).format(task_name, asset_name))
return
if len(filtered_entities) > 1:
self.log.warning((
"Found more than one task \"{}\""
" under parent \"{}\" in Ftrack."
).format(task_name, asset_name))
return
return filtered_entities[0]
def ftrack_status_change(self, session, entity, project_name):
project_settings = get_project_settings(project_name)
status_update = project_settings["ftrack"]["events"]["status_update"]
if not status_update["enabled"]:
self.log.debug(
"Status changes are disabled for project \"{}\"".format(
project_name
)
)
return
status_mapping = status_update["mapping"]
if not status_mapping:
self.log.warning(
"Project \"{}\" does not have set status changes.".format(
project_name
)
)
return
actual_status = entity["status"]["name"].lower()
already_tested = set()
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in status_mapping.items():
if key in already_tested:
continue
if actual_status in value or "__any__" in value:
if key != "__ignore__":
next_status_name = key
already_tested.add(key)
break
already_tested.add(key)
if next_status_name is None:
break
try:
query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
))
break
except Exception:
session.rollback()
msg = (
"Status \"{}\" in presets wasn't found"
" on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)
def start_timer(self, session, entity, _ftrack_api):
"""Start Ftrack timer on task from context."""
self.log.debug("Triggering timer start.")
user_entity = session.query("User where username is \"{}\"".format(
os.environ["FTRACK_API_USER"]
)).first()
if not user_entity:
self.log.warning(
"Couldn't find user with username \"{}\" in Ftrack".format(
os.environ["FTRACK_API_USER"]
)
)
return
source = {
"user": {
"id": user_entity["id"],
"username": user_entity["username"]
}
}
event_data = {
"actionIdentifier": "start.timer",
"selection": [{"entityId": entity["id"], "entityType": "task"}]
}
session.event_hub.publish(
_ftrack_api.event.base.Event(
topic="ftrack.action.launch",
data=event_data,
source=source
),
on_error="ignore"
)
self.log.debug("Timer start triggered successfully.")

View file

@ -0,0 +1,354 @@
import os
import re
import json
import getpass
import copy
from pype.api import (
Anatomy,
get_project_settings
)
from pype.lib import (
env_value_to_bool,
PreLaunchHook,
ApplicationLaunchFailed
)
import acre
import avalon.api
class GlobalHostDataHook(PreLaunchHook):
order = -100
def execute(self):
"""Prepare global objects to `data` that will be used for sure."""
if not self.application.is_host:
self.log.info(
"Skipped hook {}. Application is not marked as host.".format(
self.__class__.__name__
)
)
return
self.prepare_global_data()
self.prepare_host_environments()
self.prepare_context_environments()
def prepare_global_data(self):
"""Prepare global objects to `data` that will be used for sure."""
# Mongo documents
project_name = self.data.get("project_name")
if not project_name:
self.log.info(
"Skipping global data preparation."
" Key `project_name` was not found in launch context."
)
return
self.log.debug("Project name is set to \"{}\"".format(project_name))
# Anatomy
self.data["anatomy"] = Anatomy(project_name)
# Mongo connection
dbcon = avalon.api.AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
dbcon.install()
self.data["dbcon"] = dbcon
# Project document
project_doc = dbcon.find_one({"type": "project"})
self.data["project_doc"] = project_doc
asset_name = self.data.get("asset_name")
if not asset_name:
self.log.warning(
"Asset name was not set. Skipping asset document query."
)
return
asset_doc = dbcon.find_one({
"type": "asset",
"name": asset_name
})
self.data["asset_doc"] = asset_doc
def _merge_env(self, env, current_env):
"""Modified function(merge) from acre module."""
result = current_env.copy()
for key, value in env.items():
# Keep missing keys by not filling `missing` kwarg
value = acre.lib.partial_format(value, data=current_env)
result[key] = value
return result
def prepare_host_environments(self):
"""Modify launch environments based on launched app and context."""
# Keys for getting environments
env_keys = [self.app_group, self.app_name]
asset_doc = self.data.get("asset_doc")
if asset_doc:
# Add tools environments
for key in asset_doc["data"].get("tools_env") or []:
tool = self.manager.tools.get(key)
if tool:
if tool.group_name not in env_keys:
env_keys.append(tool.group_name)
if tool.name not in env_keys:
env_keys.append(tool.name)
self.log.debug(
"Finding environment groups for keys: {}".format(env_keys)
)
settings_env = self.data["settings_env"]
env_values = {}
for env_key in env_keys:
_env_values = settings_env.get(env_key)
if not _env_values:
continue
# Choose right platform
tool_env = acre.parse(_env_values)
# Merge dictionaries
env_values = self._merge_env(tool_env, env_values)
final_env = self._merge_env(
acre.compute(env_values), self.launch_context.env
)
# Update env
self.launch_context.env.update(final_env)
def prepare_context_environments(self):
"""Modify launch environemnts with context data for launched host."""
# Context environments
project_doc = self.data.get("project_doc")
asset_doc = self.data.get("asset_doc")
task_name = self.data.get("task_name")
if (
not project_doc
or not asset_doc
or not task_name
):
self.log.info(
"Skipping context environments preparation."
" Launch context does not contain required data."
)
return
workdir_data = self._prepare_workdir_data(
project_doc, asset_doc, task_name
)
self.data["workdir_data"] = workdir_data
hierarchy = workdir_data["hierarchy"]
anatomy = self.data["anatomy"]
try:
anatomy_filled = anatomy.format(workdir_data)
workdir = os.path.normpath(anatomy_filled["work"]["folder"])
if not os.path.exists(workdir):
self.log.debug(
"Creating workdir folder: \"{}\"".format(workdir)
)
os.makedirs(workdir)
except Exception as exc:
raise ApplicationLaunchFailed(
"Error in anatomy.format: {}".format(str(exc))
)
context_env = {
"AVALON_PROJECT": project_doc["name"],
"AVALON_ASSET": asset_doc["name"],
"AVALON_TASK": task_name,
"AVALON_APP": self.host_name,
"AVALON_APP_NAME": self.app_name,
"AVALON_HIERARCHY": hierarchy,
"AVALON_WORKDIR": workdir
}
self.log.debug(
"Context environemnts set:\n{}".format(
json.dumps(context_env, indent=4)
)
)
self.launch_context.env.update(context_env)
self.prepare_last_workfile(workdir)
def _prepare_workdir_data(self, project_doc, asset_doc, task_name):
hierarchy = "/".join(asset_doc["data"]["parents"])
data = {
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code")
},
"task": task_name,
"asset": asset_doc["name"],
"app": self.host_name,
"hierarchy": hierarchy
}
return data
def prepare_last_workfile(self, workdir):
"""last workfile workflow preparation.
Function check if should care about last workfile workflow and tries
to find the last workfile. Both information are stored to `data` and
environments.
Last workfile is filled always (with version 1) even if any workfile
exists yet.
Args:
workdir (str): Path to folder where workfiles should be stored.
"""
_workdir_data = self.data.get("workdir_data")
if not _workdir_data:
self.log.info(
"Skipping last workfile preparation."
" Key `workdir_data` not filled."
)
return
workdir_data = copy.deepcopy(_workdir_data)
project_name = self.data["project_name"]
task_name = self.data["task_name"]
start_last_workfile = self.should_start_last_workfile(
project_name, self.host_name, task_name
)
self.data["start_last_workfile"] = start_last_workfile
# Store boolean as "0"(False) or "1"(True)
self.launch_context.env["AVALON_OPEN_LAST_WORKFILE"] = (
str(int(bool(start_last_workfile)))
)
_sub_msg = "" if start_last_workfile else " not"
self.log.debug(
"Last workfile should{} be opened on start.".format(_sub_msg)
)
# Last workfile path
last_workfile_path = ""
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS.get(
self.host_name
)
if extensions:
anatomy = self.data["anatomy"]
# Find last workfile
file_template = anatomy.templates["work"]["file"]
workdir_data.update({
"version": 1,
"user": os.environ.get("PYPE_USERNAME") or getpass.getuser(),
"ext": extensions[0]
})
last_workfile_path = avalon.api.last_workfile(
workdir, file_template, workdir_data, extensions, True
)
if os.path.exists(last_workfile_path):
self.log.debug((
"Workfiles for launch context does not exists"
" yet but path will be set."
))
self.log.debug(
"Setting last workfile path: {}".format(last_workfile_path)
)
self.launch_context.env["AVALON_LAST_WORKFILE"] = last_workfile_path
self.data["last_workfile_path"] = last_workfile_path
def should_start_last_workfile(self, project_name, host_name, task_name):
"""Define if host should start last version workfile if possible.
Default output is `False`. Can be overriden with environment variable
`AVALON_OPEN_LAST_WORKFILE`, valid values without case sensitivity are
`"0", "1", "true", "false", "yes", "no"`.
Args:
project_name (str): Name of project.
host_name (str): Name of host which is launched. In avalon's
application context it's value stored in app definition under
key `"application_dir"`. Is not case sensitive.
task_name (str): Name of task which is used for launching the host.
Task name is not case sensitive.
Returns:
bool: True if host should start workfile.
"""
project_settings = (
get_project_settings(project_name)['global']['tools'])
startup_presets = (
project_settings['Workfiles']['last_workfile_on_startup'])
if not startup_presets:
return default_output
host_name_lowered = host_name.lower()
task_name_lowered = task_name.lower()
max_points = 2
matching_points = -1
matching_item = None
for item in startup_presets:
hosts = item.get("hosts") or tuple()
tasks = item.get("tasks") or tuple()
hosts_lowered = tuple(_host_name.lower() for _host_name in hosts)
# Skip item if has set hosts and current host is not in
if hosts_lowered and host_name_lowered not in hosts_lowered:
continue
tasks_lowered = tuple(_task_name.lower() for _task_name in tasks)
# Skip item if has set tasks and current task is not in
if tasks_lowered:
task_match = False
for task_regex in self.compile_list_of_regexes(tasks_lowered):
if re.match(task_regex, task_name_lowered):
task_match = True
break
if not task_match:
continue
points = int(bool(hosts_lowered)) + int(bool(tasks_lowered))
if points > matching_points:
matching_item = item
matching_points = points
if matching_points == max_points:
break
if matching_item is not None:
output = matching_item.get("enabled")
if output is None:
output = default_output
return output
return default_output
@staticmethod
def compile_list_of_regexes(in_list):
"""Convert strings in entered list to compiled regex objects."""
regexes = list()
if not in_list:
return regexes
for item in in_list:
if item:
try:
regexes.append(re.compile(item))
except TypeError:
print((
"Invalid type \"{}\" value \"{}\"."
" Expected string based object. Skipping."
).format(str(type(item)), str(item)))
return regexes

View file

@ -0,0 +1,44 @@
import os
from pype.lib import PreLaunchHook
class HarmonyPrelaunchHook(PreLaunchHook):
"""Launch arguments preparation.
Hook add python executable and execute python script of harmony
implementation before harmony executable.
"""
app_groups = ["harmony"]
def execute(self):
# Pop tvpaint executable
harmony_executable = self.launch_context.launch_args.pop(0)
# Pop rest of launch arguments - There should not be other arguments!
remainders = []
while self.launch_context.launch_args:
remainders.append(self.launch_context.launch_args.pop(0))
new_launch_args = [
self.python_executable(),
"-c",
(
"import avalon.harmony;"
"avalon.harmony.launch(\"{}\")"
).format(harmony_executable)
]
# Append as whole list as these areguments should not be separated
self.launch_context.launch_args.append(new_launch_args)
if remainders:
self.log.warning((
"There are unexpected launch arguments in Harmony launch. {}"
).format(str(remainders)))
self.launch_context.launch_args.extend(remainders)
def python_executable(self):
"""Should lead to python executable."""
# TODO change in Pype 3
return os.environ["PYPE_PYTHON_EXE"]

View file

@ -0,0 +1,15 @@
import os
from pype.lib import PreLaunchHook
class HieroLaunchArguments(PreLaunchHook):
order = 0
app_groups = ["hiero"]
def execute(self):
"""Prepare suprocess launch arguments for Hiero."""
# Add path to workfile to arguments
if self.data.get("start_last_workfile"):
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.launch_context.launch_args.append(last_workfile)

View file

@ -0,0 +1,16 @@
import os
from pype.lib import PreLaunchHook
class MayaLaunchArguments(PreLaunchHook):
"""Add path to last workfile to launch arguments."""
order = 0
app_groups = ["maya"]
def execute(self):
"""Prepare suprocess launch arguments for Maya."""
# Add path to workfile to arguments
if self.data.get("start_last_workfile"):
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.launch_context.launch_args.append(last_workfile)

View file

@ -0,0 +1,15 @@
import os
from pype.lib import PreLaunchHook
class NukeStudioLaunchArguments(PreLaunchHook):
order = 0
app_groups = ["nukestudio"]
def execute(self):
"""Prepare suprocess launch arguments for NukeStudio."""
# Add path to workfile to arguments
if self.data.get("start_last_workfile"):
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.launch_context.launch_args.append(last_workfile)

View file

@ -0,0 +1,15 @@
import os
from pype.lib import PreLaunchHook
class NukeXLaunchArguments(PreLaunchHook):
order = 0
app_groups = ["nukex"]
def execute(self):
"""Prepare suprocess launch arguments for NukeX."""
# Add path to workfile to arguments
if self.data.get("start_last_workfile"):
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.launch_context.launch_args.append(last_workfile)

View file

@ -0,0 +1,44 @@
import os
from pype.lib import PreLaunchHook
class PhotoshopPrelaunchHook(PreLaunchHook):
"""Launch arguments preparation.
Hook add python executable and execute python script of photoshop
implementation before photoshop executable.
"""
app_groups = ["photoshop"]
def execute(self):
# Pop tvpaint executable
photoshop_executable = self.launch_context.launch_args.pop(0)
# Pop rest of launch arguments - There should not be other arguments!
remainders = []
while self.launch_context.launch_args:
remainders.append(self.launch_context.launch_args.pop(0))
new_launch_args = [
self.python_executable(),
"-c",
(
"import avalon.photoshop;"
"avalon.photoshop.launch(\"{}\")"
).format(photoshop_executable)
]
# Append as whole list as these areguments should not be separated
self.launch_context.launch_args.append(new_launch_args)
if remainders:
self.log.warning((
"There are unexpected launch arguments in Photoshop launch. {}"
).format(str(remainders)))
self.launch_context.launch_args.extend(remainders)
def python_executable(self):
"""Should lead to python executable."""
# TODO change in Pype 3
return os.environ["PYPE_PYTHON_EXE"]

View file

@ -0,0 +1,58 @@
import os
import importlib
from pype.lib import PreLaunchHook
from pype.hosts.resolve import utils
class ResolvePrelaunch(PreLaunchHook):
"""
This hook will check if current workfile path has Resolve
project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Premiere launcher
shell script.
"""
app_groups = ["resolve"]
def execute(self):
# making sure pyton 3.6 is installed at provided path
py36_dir = os.path.normpath(self.env.get("PYTHON36_RESOLVE", ""))
assert os.path.isdir(py36_dir), (
"Python 3.6 is not installed at the provided folder path. Either "
"make sure the `environments\resolve.json` is having correctly "
"set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed "
f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`"
)
self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...")
self.env["PYTHON36_RESOLVE"] = py36_dir
# setting utility scripts dir for scripts syncing
us_dir = os.path.normpath(
self.env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "")
)
assert os.path.isdir(us_dir), (
"Resolve utility script dir does not exists. Either make sure "
"the `environments\resolve.json` is having correctly set "
"`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`"
)
self.log.debug(f"-- us_dir: `{us_dir}`")
# correctly format path for pre python script
pre_py_sc = os.path.normpath(self.env.get("PRE_PYTHON_SCRIPT", ""))
self.env["PRE_PYTHON_SCRIPT"] = pre_py_sc
self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...")
try:
__import__("pype.hosts.resolve")
__import__("pyblish")
except ImportError:
self.log.warning(
"pyblish: Could not load Resolve integration.",
exc_info=True
)
else:
# Resolve Setup integration
importlib.reload(utils)
self.log.debug(f"-- utils.__file__: `{utils.__file__}`")
utils.setup(self.env)

View file

@ -0,0 +1,35 @@
from pype.lib import (
PreLaunchHook,
ApplicationLaunchFailed,
_subprocess
)
class PreInstallPyWin(PreLaunchHook):
"""Hook makes sure there is installed python module pywin32 on windows."""
# WARNING This hook will probably be deprecated in Pype 3 - kept for test
order = 10
app_groups = ["tvpaint"]
platforms = ["windows"]
def execute(self):
installed = False
try:
from win32com.shell import shell
self.log.debug("Python module `pywin32` already installed.")
installed = True
except Exception:
pass
if installed:
return
try:
output = _subprocess(
["pip", "install", "pywin32==227"]
)
self.log.debug("Pip install pywin32 output:\n{}'".format(output))
except RuntimeError:
msg = "Installation of python module `pywin32` crashed."
self.log.warning(msg, exc_info=True)
raise ApplicationLaunchFailed(msg)

View file

@ -0,0 +1,103 @@
import os
import shutil
from pype.hosts import tvpaint
from pype.lib import PreLaunchHook
import avalon
class TvpaintPrelaunchHook(PreLaunchHook):
"""Launch arguments preparation.
Hook add python executable and script path to tvpaint implementation before
tvpaint executable and add last workfile path to launch arguments.
Existence of last workfile is checked. If workfile does not exists tries
to copy templated workfile from predefined path.
"""
app_groups = ["tvpaint"]
def execute(self):
# Pop tvpaint executable
tvpaint_executable = self.launch_context.launch_args.pop(0)
# Pop rest of launch arguments - There should not be other arguments!
remainders = []
while self.launch_context.launch_args:
remainders.append(self.launch_context.launch_args.pop(0))
new_launch_args = [
self.main_executable(),
self.launch_script_path(),
tvpaint_executable
]
# Add workfile to launch arguments
workfile_path = self.workfile_path()
if workfile_path:
new_launch_args.append(workfile_path)
# How to create new command line
# if platform.system().lower() == "windows":
# new_launch_args = [
# "cmd.exe",
# "/c",
# "Call cmd.exe /k",
# *new_launch_args
# ]
# Append as whole list as these areguments should not be separated
self.launch_context.launch_args.append(new_launch_args)
if remainders:
self.log.warning((
"There are unexpected launch arguments in TVPaint launch. {}"
).format(str(remainders)))
self.launch_context.launch_args.extend(remainders)
def main_executable(self):
"""Should lead to python executable."""
# TODO change in Pype 3
return os.path.normpath(os.environ["PYPE_PYTHON_EXE"])
def launch_script_path(self):
avalon_dir = os.path.dirname(os.path.abspath(avalon.__file__))
script_path = os.path.join(
avalon_dir,
"tvpaint",
"launch_script.py"
)
return script_path
def workfile_path(self):
workfile_path = self.data["last_workfile_path"]
# copy workfile from template if doesnt exist any on path
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
pype_dir = os.path.dirname(os.path.abspath(tvpaint.__file__))
template_path = os.path.join(pype_dir, "template.tvpp")
if not os.path.exists(template_path):
self.log.warning(
"Couldn't find workfile template file in {}".format(
template_path
)
)
return
self.log.info(
f"Creating workfile from template: \"{template_path}\""
)
# Copy template workfile to new destinantion
shutil.copy2(
os.path.normpath(template_path),
os.path.normpath(workfile_path)
)
self.log.info(f"Workfile to open: \"{workfile_path}\"")
return workfile_path

View file

@ -0,0 +1,95 @@
import os
from pype.lib import (
PreLaunchHook,
ApplicationLaunchFailed
)
from pype.hosts.unreal import lib as unreal_lib
class UnrealPrelaunchHook(PreLaunchHook):
"""
This hook will check if current workfile path has Unreal
project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Unreal launcher
shell script.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.signature = "( {} )".format(self.__class__.__name__)
def execute(self):
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
workdir = self.env["AVALON_WORKDIR"]
engine_version = self.app_name.split("_")[-1]
unreal_project_name = f"{asset_name}_{task_name}"
# Unreal is sensitive about project names longer then 20 chars
if len(unreal_project_name) > 20:
self.log.warning((
f"Project name exceed 20 characters ({unreal_project_name})!"
))
# Unreal doesn't accept non alphabet characters at the start
# of the project name. This is because project name is then used
# in various places inside c++ code and there variable names cannot
# start with non-alpha. We append 'P' before project name to solve it.
# 😱
if not unreal_project_name[:1].isalpha():
self.log.warning((
"Project name doesn't start with alphabet "
f"character ({unreal_project_name}). Appending 'P'"
))
unreal_project_name = f"P{unreal_project_name}"
project_path = os.path.join(workdir, unreal_project_name)
self.log.info((
f"{self.signature} requested UE4 version: "
f"[ {engine_version} ]"
))
detected = unreal_lib.get_engine_versions()
detected_str = ', '.join(detected.keys()) or 'none'
self.log.info((
f"{self.signature} detected UE4 versions: "
f"[ {detected_str} ]"
))
engine_version = ".".join(engine_version.split(".")[:2])
if engine_version not in detected.keys():
raise ApplicationLaunchFailed((
f"{self.signature} requested version not "
f"detected [ {engine_version} ]"
))
os.makedirs(project_path, exist_ok=True)
project_file = os.path.join(
project_path,
f"{unreal_project_name}.uproject"
)
if not os.path.isfile(project_file):
engine_path = detected[engine_version]
self.log.info((
f"{self.signature} creating unreal "
f"project [ {unreal_project_name} ]"
))
# Set "AVALON_UNREAL_PLUGIN" to current process environment for
# execution of `create_unreal_project`
env_key = "AVALON_UNREAL_PLUGIN"
if self.env.get(env_key):
os.environ[env_key] = self.env[env_key]
unreal_lib.create_unreal_project(
unreal_project_name,
engine_version,
project_path,
engine_path=engine_path
)
# Append project file to launch arguments
self.launch_context.launch_args.append(f"\"{project_file}\"")

View file

@ -9,7 +9,7 @@ import avalon.tools.sceneinventory
import pyblish.api
from pype import lib
from pype.api import config
from pype.api import get_current_project_settings
def set_scene_settings(settings):
@ -50,10 +50,18 @@ def get_asset_settings():
}
try:
skip_resolution_check = \
config.get_presets()["harmony"]["general"]["skip_resolution_check"]
skip_timelines_check = \
config.get_presets()["harmony"]["general"]["skip_timelines_check"]
skip_resolution_check = (
get_current_project_settings()
["harmony"]
["general"]
["skip_resolution_check"]
)
skip_timelines_check = (
get_current_project_settings()
["harmony"]
["general"]
["skip_timelines_check"]
)
except KeyError:
skip_resolution_check = []
skip_timelines_check = []

View file

@ -1,13 +1,13 @@
import re
import avalon.api
import avalon.nuke
from pype.api import config
from pype.api import get_current_project_settings
class PypeCreator(avalon.nuke.pipeline.Creator):
"""Pype Nuke Creator class wrapper
"""
def __init__(self, *args, **kwargs):
super(PypeCreator, self).__init__(*args, **kwargs)
self.presets = config.get_presets()['plugins']["nuke"]["create"].get(
self.presets = get_current_project_settings()["nuke"]["create"].get(
self.__class__.__name__, {}
)

View file

@ -4,7 +4,7 @@ import platform
import json
from distutils import dir_util
import subprocess
from pype.api import config
from pype.api import get_project_settings
def get_engine_versions():
@ -150,7 +150,7 @@ def create_unreal_project(project_name: str,
:type dev_mode: bool
:returns: None
"""
preset = config.get_presets()["unreal"]["project_setup"]
preset = get_project_settings(project_name)["unreal"]["project_setup"]
if os.path.isdir(os.environ.get("AVALON_UNREAL_PLUGIN", "")):
# copy plugin to correct path under project
@ -246,15 +246,18 @@ def create_unreal_project(project_name: str,
with open(project_file, mode="w") as pf:
json.dump(data, pf, indent=4)
# ensure we have PySide installed in engine
# TODO: make it work for other platforms 🍎 🐧
if platform.system().lower() == "windows":
python_path = os.path.join(engine_path, "Engine", "Binaries",
"ThirdParty", "Python", "Win64",
"python.exe")
# UE < 4.26 have Python2 by default, so we need PySide
# but we will not need it in 4.26 and up
if int(ue_version.split(".")[1]) < 26:
# ensure we have PySide installed in engine
# TODO: make it work for other platforms 🍎 🐧
if platform.system().lower() == "windows":
python_path = os.path.join(engine_path, "Engine", "Binaries",
"ThirdParty", "Python", "Win64",
"python.exe")
subprocess.run([python_path, "-m",
"pip", "install", "pyside"])
subprocess.run([python_path, "-m",
"pip", "install", "pyside"])
if dev_mode or preset["dev_mode"]:
_prepare_cpp_project(project_file, engine_path)

View file

@ -1,16 +1,17 @@
# -*- coding: utf-8 -*-
"""Pype lib module."""
from .deprecated import (
get_avalon_database,
set_io_database
)
from .env_tools import (
env_value_to_bool,
get_paths_from_environ
)
from .python_module_tools import (
modules_from_path,
recursive_bases_from_class,
classes_from_module
)
from .avalon_context import (
is_latest,
any_outdated,
@ -28,8 +29,8 @@ from .applications import (
ApplictionExecutableNotFound,
ApplicationNotFound,
ApplicationManager,
launch_application,
ApplicationAction,
PreLaunchHook,
PostLaunchHook,
_subprocess
)
@ -47,12 +48,13 @@ from .ffmpeg_utils import (
)
__all__ = [
"get_avalon_database",
"set_io_database",
"env_value_to_bool",
"get_paths_from_environ",
"modules_from_path",
"recursive_bases_from_class",
"classes_from_module",
"is_latest",
"any_outdated",
"get_asset",
@ -68,8 +70,8 @@ __all__ = [
"ApplictionExecutableNotFound",
"ApplicationNotFound",
"ApplicationManager",
"launch_application",
"ApplicationAction",
"PreLaunchHook",
"PostLaunchHook",
"filter_pyblish_plugins",

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,7 @@ import logging
import collections
from avalon import io, pipeline
from ..api import config
from ..api import get_project_settings
import avalon.api
log = logging.getLogger("AvalonContext")
@ -410,12 +410,12 @@ class BuildWorkfile:
(dict): preset per entered task name
"""
host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1]
presets = config.get_presets(io.Session["AVALON_PROJECT"])
presets = get_project_settings(io.Session["AVALON_PROJECT"])
# Get presets for host
build_presets = (
presets["plugins"]
.get(host_name, {})
presets.get(host_name, {})
.get("workfile_build")
.get("profiles")
)
if not build_presets:
return

View file

@ -1,26 +0,0 @@
import os
from avalon import io
def get_avalon_database():
"""Mongo database used in avalon's io.
* Function is not used in pype 3.0 where was replaced with usage of
AvalonMongoDB.
"""
if io._database is None:
set_io_database()
return io._database
def set_io_database():
"""Set avalon's io context with environemnts.
* Function is not used in pype 3.0 where was replaced with usage of
AvalonMongoDB.
"""
required_keys = ["AVALON_PROJECT", "AVALON_ASSET", "AVALON_SILO"]
for key in required_keys:
os.environ[key] = os.environ.get(key, "")
io.install()

View file

@ -20,9 +20,9 @@ def env_value_to_bool(env_key=None, value=None, default=False):
if value is not None:
value = str(value).lower()
if value in ("true", "yes", "1"):
if value in ("true", "yes", "1", "on"):
return True
elif value in ("false", "no", "0"):
elif value in ("false", "no", "0", "off"):
return False
return default

View file

@ -55,7 +55,7 @@ def execute_hook(hook, *args, **kwargs):
module.__file__ = abspath
try:
with open(abspath) as f:
with open(abspath, errors='ignore') as f:
six.exec_(f.read(), module.__dict__)
sys.modules[abspath] = module

View file

@ -4,7 +4,7 @@ import os
import inspect
import logging
from ..api import config
from ..api import get_project_settings
log = logging.getLogger(__name__)
@ -25,7 +25,7 @@ def filter_pyblish_plugins(plugins):
host = api.current_host()
presets = config.get_presets().get('plugins', {})
presets = get_project_settings(os.environ['AVALON_PROJECT']) or {}
# iterate over plugins
for plugin in plugins[:]:
@ -53,7 +53,7 @@ def filter_pyblish_plugins(plugins):
log.info('removing plugin {}'.format(plugin.__name__))
plugins.remove(plugin)
else:
log.info('setting {}:{} on plugin {}'.format(
log.info('setting XXX {}:{} on plugin {}'.format(
option, value, plugin.__name__))
setattr(plugin, option, value)

View file

@ -0,0 +1,113 @@
import os
import sys
import types
import importlib
import inspect
import logging
log = logging.getLogger(__name__)
PY3 = sys.version_info[0] == 3
def modules_from_path(folder_path):
"""Get python scripts as modules from a path.
Arguments:
path (str): Path to folder containing python scripts.
Returns:
List of modules.
"""
folder_path = os.path.normpath(folder_path)
modules = []
if not os.path.isdir(folder_path):
log.warning("Not a directory path: {}".format(folder_path))
return modules
for filename in os.listdir(folder_path):
# Ignore files which start with underscore
if filename.startswith("_"):
continue
mod_name, mod_ext = os.path.splitext(filename)
if not mod_ext == ".py":
continue
full_path = os.path.join(folder_path, filename)
if not os.path.isfile(full_path):
continue
try:
# Prepare module object where content of file will be parsed
module = types.ModuleType(mod_name)
if PY3:
# Use loader so module has full specs
module_loader = importlib.machinery.SourceFileLoader(
mod_name, full_path
)
module_loader.exec_module(module)
else:
# Execute module code and store content to module
with open(full_path) as _stream:
# Execute content and store it to module object
exec(_stream.read(), module.__dict__)
module.__file__ = full_path
modules.append(module)
except Exception:
log.warning(
"Failed to load path: \"{0}\"".format(full_path),
exc_info=True
)
continue
return modules
def recursive_bases_from_class(klass):
"""Extract all bases from entered class."""
result = []
bases = klass.__bases__
result.extend(bases)
for base in bases:
result.extend(recursive_bases_from_class(base))
return result
def classes_from_module(superclass, module):
"""Return plug-ins from module
Arguments:
superclass (superclass): Superclass of subclasses to look for
module (types.ModuleType): Imported module from which to
parse valid Avalon plug-ins.
Returns:
List of plug-ins, or empty list if none is found.
"""
classes = list()
for name in dir(module):
# It could be anything at this point
obj = getattr(module, name)
if not inspect.isclass(obj):
continue
# These are subclassed from nothing, not even `object`
if not len(obj.__bases__) > 0:
continue
# Use string comparison rather than `issubclass`
# in order to support reloading of this module.
bases = recursive_bases_from_class(obj)
if not any(base.__name__ == superclass.__name__ for base in bases):
continue
classes.append(obj)
return classes

View file

@ -1,105 +0,0 @@
import os
import toml
import time
from pype.modules.ftrack.lib import AppAction
from avalon import lib, api
from pype.api import Logger, config
log = Logger().get_logger(__name__)
def register_app(app, dbcon, session, plugins_presets):
name = app['name']
variant = ""
try:
variant = app['name'].split("_")[1]
except Exception:
pass
abspath = lib.which_app(app['name'])
if abspath is None:
log.error(
"'{0}' - App don't have config toml file".format(app['name'])
)
return
apptoml = toml.load(abspath)
''' REQUIRED '''
executable = apptoml['executable']
''' OPTIONAL '''
label = apptoml.get('ftrack_label', app.get('label', name))
icon = apptoml.get('ftrack_icon', None)
description = apptoml.get('description', None)
preactions = apptoml.get('preactions', [])
if icon:
icon = icon.format(os.environ.get('PYPE_STATICS_SERVER', ''))
# register action
AppAction(
session, dbcon, label, name, executable, variant,
icon, description, preactions, plugins_presets
).register()
if not variant:
log.info('- Variant is not set')
def register(session, plugins_presets={}):
from pype.lib import env_value_to_bool
if env_value_to_bool("PYPE_USE_APP_MANAGER", default=False):
return
app_usages = (
config.get_presets()
.get("global", {})
.get("applications")
) or {}
apps = []
missing_app_names = []
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
for file in os.listdir(launchers_path):
filename, ext = os.path.splitext(file)
if ext.lower() != ".toml":
continue
app_usage = app_usages.get(filename)
if not app_usage:
if app_usage is None:
missing_app_names.append(filename)
continue
loaded_data = toml.load(os.path.join(launchers_path, file))
app_data = {
"name": filename,
"label": loaded_data.get("label", filename)
}
apps.append(app_data)
if missing_app_names:
log.debug(
"Apps not defined in applications usage. ({})".format(
", ".join((
"\"{}\"".format(app_name)
for app_name in missing_app_names
))
)
)
dbcon = api.AvalonMongoDB()
apps = sorted(apps, key=lambda app: app["name"])
app_counter = 0
for app in apps:
try:
register_app(app, dbcon, session, plugins_presets)
if app_counter % 5 == 0:
time.sleep(0.1)
app_counter += 1
except Exception as exc:
log.warning(
"\"{}\" - not a proper App ({})".format(app['name'], str(exc)),
exc_info=True
)

View file

@ -1,7 +1,6 @@
import os
from uuid import uuid4
from pype.api import config
from pype.modules.ftrack.lib import BaseAction
from pype.lib import (
ApplicationManager,
@ -205,55 +204,6 @@ class AppplicationsAction(BaseAction):
"message": msg
}
# TODO Move to prelaunch/afterlaunch hooks
# TODO change to settings
# Change status of task to In progress
presets = config.get_presets()["ftrack"]["ftrack_config"]
if "status_update" in presets:
statuses = presets["status_update"]
actual_status = entity["status"]["name"].lower()
already_tested = []
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in statuses.items():
if key in already_tested:
continue
if actual_status in value or "_any_" in value:
if key != "_ignore_":
next_status_name = key
already_tested.append(key)
break
already_tested.append(key)
if next_status_name is None:
break
try:
query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
))
break
except Exception:
session.rollback()
msg = (
"Status \"{}\" in presets wasn't found"
" on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)
return {
"success": True,
"message": "Launching {0}".format(self.label)
@ -261,7 +211,5 @@ class AppplicationsAction(BaseAction):
def register(session, plugins_presets=None):
'''Register action. Called when used as an event plugin.'''
from pype.lib import env_value_to_bool
if env_value_to_bool("PYPE_USE_APP_MANAGER", default=False):
AppplicationsAction(session, plugins_presets).register()
"""Register action. Called when used as an event plugin."""
AppplicationsAction(session, plugins_presets).register()

View file

@ -1,6 +1,4 @@
import os
import collections
import toml
import json
import arrow
import ftrack_api
@ -8,8 +6,8 @@ from pype.modules.ftrack.lib import BaseAction, statics_icon
from pype.modules.ftrack.lib.avalon_sync import (
CUST_ATTR_ID_KEY, CUST_ATTR_GROUP, default_custom_attributes_definition
)
from pype.api import config
from pype.lib import ApplicationManager, env_value_to_bool
from pype.api import get_system_settings
from pype.lib import ApplicationManager
"""
This action creates/updates custom attributes.
@ -146,9 +144,6 @@ class CustomAttributes(BaseAction):
"text", "boolean", "date", "enumerator",
"dynamic enumerator", "number"
)
# Pype 3 features
use_app_manager = env_value_to_bool("PYPE_USE_APP_MANAGER", default=False)
app_manager = None
def discover(self, session, entities, event):
'''
@ -171,8 +166,7 @@ class CustomAttributes(BaseAction):
})
session.commit()
if self.use_app_manager:
self.app_manager = ApplicationManager()
self.app_manager = ApplicationManager()
try:
self.prepare_global_data(session)
@ -217,15 +211,12 @@ class CustomAttributes(BaseAction):
self.groups = {}
self.presets = config.get_presets()
self.ftrack_settings = get_system_settings()["modules"]["Ftrack"]
self.attrs_presets = self.prepare_attribute_pressets()
def prepare_attribute_pressets(self):
output = {}
attr_presets = (
self.presets.get("ftrack", {}).get("ftrack_custom_attributes")
) or {}
attr_presets = self.ftrack_settings["custom_attributes"]
for entity_type, preset in attr_presets.items():
# Lower entity type
entity_type = entity_type.lower()
@ -391,54 +382,8 @@ class CustomAttributes(BaseAction):
app_definitions.append({"empty": "< Empty >"})
return app_definitions
def application_definitions(self):
app_usages = self.presets.get("global", {}).get("applications") or {}
app_definitions = []
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
missing_app_names = []
for file in os.listdir(launchers_path):
app_name, ext = os.path.splitext(file)
if ext.lower() != ".toml":
continue
if not app_usages.get(app_name):
missing_app_names.append(app_name)
continue
loaded_data = toml.load(os.path.join(launchers_path, file))
ftrack_label = loaded_data.get("ftrack_label")
if ftrack_label:
parts = app_name.split("_")
if len(parts) > 1:
ftrack_label = " ".join((ftrack_label, parts[-1]))
else:
ftrack_label = loaded_data.get("label", app_name)
app_definitions.append({app_name: ftrack_label})
if missing_app_names:
self.log.warning(
"Apps not defined in applications usage. ({})".format(
", ".join((
"\"{}\"".format(app_name)
for app_name in missing_app_names
))
)
)
# Make sure there is at least one item
if not app_definitions:
app_definitions.append({"empty": "< Empty >"})
return app_definitions
def applications_attribute(self, event):
if self.use_app_manager:
apps_data = self.app_defs_from_app_manager()
else:
apps_data = self.application_definitions()
apps_data = self.app_defs_from_app_manager()
applications_custom_attr_data = {
"label": "Applications",
@ -453,28 +398,13 @@ class CustomAttributes(BaseAction):
}
self.process_attr_data(applications_custom_attr_data, event)
def tools_from_app_manager(self):
def tools_attribute(self, event):
tools_data = []
for tool_name, tool in self.app_manager.tools.items():
if tool.enabled:
tools_data.append({
tool_name: tool_name
})
return tools_data
def tools_data(self):
tool_usages = self.presets.get("global", {}).get("tools") or {}
tools_data = []
for tool_name, usage in tool_usages.items():
if usage:
tools_data.append({tool_name: tool_name})
return tools_data
def tools_attribute(self, event):
if self.use_app_manager:
tools_data = self.tools_from_app_manager()
else:
tools_data = self.tools_data()
# Make sure there is at least one item
if not tools_data:
@ -494,12 +424,7 @@ class CustomAttributes(BaseAction):
self.process_attr_data(tools_custom_attr_data, event)
def intent_attribute(self, event):
intent_key_values = (
self.presets
.get("global", {})
.get("intent", {})
.get("items", {})
) or {}
intent_key_values = self.ftrack_settings["intent"]["items"]
intent_values = []
for key, label in intent_key_values.items():
@ -805,6 +730,9 @@ class CustomAttributes(BaseAction):
return default
err_msg = 'Default value is not'
if type == 'number':
if isinstance(default, (str)) and default.isnumeric():
default = float(default)
if not isinstance(default, (float, int)):
raise CustAttrException('{} integer'.format(err_msg))
elif type == 'text':

View file

@ -1,7 +1,11 @@
import os
from pype.modules.ftrack.lib import BaseAction, statics_icon
from avalon import lib as avalonlib
from pype.api import config, Anatomy
from pype.api import (
Anatomy,
get_project_settings
)
from pype.lib import ApplicationManager
class CreateFolders(BaseAction):
@ -93,6 +97,7 @@ class CreateFolders(BaseAction):
all_entities = self.get_notask_children(entity)
anatomy = Anatomy(project_name)
project_settings = get_project_settings(project_name)
work_keys = ["work", "folder"]
work_template = anatomy.templates
@ -106,10 +111,13 @@ class CreateFolders(BaseAction):
publish_template = publish_template[key]
publish_has_apps = "{app" in publish_template
presets = config.get_presets()
app_presets = presets.get("tools", {}).get("sw_folders")
cached_apps = {}
tools_settings = project_settings["global"]["tools"]
app_presets = tools_settings["Workfiles"]["sw_folders"]
app_manager_apps = None
if app_presets and (work_has_apps or publish_has_apps):
app_manager_apps = ApplicationManager().applications
cached_apps = {}
collected_paths = []
for entity in all_entities:
if entity.entity_type.lower() == "project":
@ -140,18 +148,20 @@ class CreateFolders(BaseAction):
task_data["task"] = child["name"]
apps = []
if app_presets and (work_has_apps or publish_has_apps):
possible_apps = app_presets.get(task_type_name, [])
for app in possible_apps:
if app in cached_apps:
app_dir = cached_apps[app]
if app_manager_apps:
possible_apps = app_presets.get(task_type_name) or []
for app_name in possible_apps:
if app_name in cached_apps:
apps.append(cached_apps[app_name])
continue
app_def = app_manager_apps.get(app_name)
if app_def and app_def.is_host:
app_dir = app_def.host_name
else:
try:
app_data = avalonlib.get_application(app)
app_dir = app_data["application_dir"]
except ValueError:
app_dir = app
cached_apps[app] = app_dir
app_dir = app_name
cached_apps[app_name] = app_dir
apps.append(app_dir)
# Template wok

View file

@ -2,7 +2,7 @@ import os
import re
from pype.modules.ftrack.lib import BaseAction, statics_icon
from pype.api import config, Anatomy
from pype.api import Anatomy, get_project_settings
class CreateProjectFolders(BaseAction):
@ -69,25 +69,26 @@ class CreateProjectFolders(BaseAction):
return True
def launch(self, session, entities, event):
entity = entities[0]
project = self.get_project_from_entity(entity)
project_folder_presets = (
config.get_presets()
.get("tools", {})
.get("project_folder_structure")
# Get project entity
project_entity = self.get_project_from_entity(entities[0])
# Load settings for project
project_name = project_entity["full_name"]
project_settings = get_project_settings(project_name)
project_folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not project_folder_presets:
if not project_folder_structure:
return {
"success": False,
"message": "Project structure presets are not set."
"message": "Project structure is not set."
}
try:
# Get paths based on presets
basic_paths = self.get_path_items(project_folder_presets)
anatomy = Anatomy(project["full_name"])
self.create_folders(basic_paths, entity, project, anatomy)
self.create_ftrack_entities(basic_paths, project)
basic_paths = self.get_path_items(project_folder_structure)
anatomy = Anatomy(project_entity["full_name"])
self.create_folders(basic_paths, project_entity, anatomy)
self.create_ftrack_entities(basic_paths, project_entity)
except Exception as exc:
session.rollback()
@ -219,7 +220,7 @@ class CreateProjectFolders(BaseAction):
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def create_folders(self, basic_paths, entity, project, anatomy):
def create_folders(self, basic_paths, project, anatomy):
roots_paths = []
if isinstance(anatomy.roots, dict):
for root in anatomy.roots:

View file

@ -396,6 +396,13 @@ class Delivery(BaseAction):
session.commit()
self.db_con.uninstall()
if job["status"] == "failed":
return {
"success": False,
"message": "Delivery failed. Check logs for more information."
}
return True
def real_launch(self, session, entities, event):
self.log.info("Delivery action just started.")
report_items = collections.defaultdict(list)

View file

@ -3,7 +3,6 @@ import subprocess
import traceback
import json
from pype.api import config
from pype.modules.ftrack.lib import BaseAction, statics_icon
import ftrack_api
from avalon import io, api
@ -11,7 +10,6 @@ from avalon import io, api
class RVAction(BaseAction):
""" Launch RV action """
ignore_me = "rv" not in config.get_presets()
identifier = "rv.launch.action"
label = "rv"
description = "rv Launcher"
@ -19,6 +17,8 @@ class RVAction(BaseAction):
type = 'Application'
allowed_types = ["img", "mov", "exr", "mp4"]
def __init__(self, session, plugins_presets):
""" Constructor
@ -26,36 +26,30 @@ class RVAction(BaseAction):
:type session: :class:`ftrack_api.Session`
"""
super().__init__(session, plugins_presets)
self.rv_path = None
self.config_data = None
# QUESTION load RV application data from AppplicationManager?
rv_path = None
# RV_HOME should be set if properly installed
if os.environ.get('RV_HOME'):
self.rv_path = os.path.join(
rv_path = os.path.join(
os.environ.get('RV_HOME'),
'bin',
'rv'
)
else:
# if not, fallback to config file location
if "rv" in config.get_presets():
self.config_data = config.get_presets()['rv']['config']
self.set_rv_path()
if not os.path.exists(rv_path):
rv_path = None
if self.rv_path is None:
return
if not rv_path:
self.log.info("RV path was not found.")
self.ignore_me = True
self.allowed_types = self.config_data.get(
'file_ext', ["img", "mov", "exr", "mp4"]
)
self.rv_path = rv_path
def discover(self, session, entities, event):
"""Return available actions based on *event*. """
return True
def set_rv_path(self):
self.rv_path = self.config_data.get("rv_path")
def preregister(self):
if self.rv_path is None:
return (

View file

@ -8,7 +8,7 @@ from avalon.api import AvalonMongoDB
from bson.objectid import ObjectId
from pype.api import config, Anatomy
from pype.api import Anatomy, get_project_settings
class UserAssigmentEvent(BaseEvent):
@ -173,26 +173,50 @@ class UserAssigmentEvent(BaseEvent):
return t_data
def launch(self, session, event):
# load shell scripts presets
presets = config.get_presets()['ftrack'].get("user_assigment_event")
if not presets:
if not event.get("data"):
return
for entity in event.get('data', {}).get('entities', []):
if entity.get('entity_type') != 'Appointment':
entities_info = event["data"].get("entities")
if not entities_info:
return
# load shell scripts presets
tmp_by_project_name = {}
for entity_info in entities_info:
if entity_info.get('entity_type') != 'Appointment':
continue
task, user = self._get_task_and_user(session,
entity.get('action'),
entity.get('changes'))
task_entity, user_entity = self._get_task_and_user(
session,
entity_info.get('action'),
entity_info.get('changes')
)
if not task or not user:
self.log.error(
'Task or User was not found.')
if not task_entity or not user_entity:
self.log.error("Task or User was not found.")
continue
data = self._get_template_data(task)
# format directories to pass to shell script
anatomy = Anatomy(data["project"]["name"])
project_name = task_entity["project"]["full_name"]
project_data = tmp_by_project_name.get(project_name) or {}
if "scripts_by_action" not in project_data:
project_settings = get_project_settings(project_name)
_settings = (
project_settings["ftrack"]["events"]["user_assignment"]
)
project_data["scripts_by_action"] = _settings.get("scripts")
tmp_by_project_name[project_name] = project_data
scripts_by_action = project_data["scripts_by_action"]
if not scripts_by_action:
continue
if "anatomy" not in project_data:
project_data["anatomy"] = Anatomy(project_name)
tmp_by_project_name[project_name] = project_data
anatomy = project_data["anatomy"]
data = self._get_template_data(task_entity)
anatomy_filled = anatomy.format(data)
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy_filled["work"]["folder"]
@ -201,8 +225,10 @@ class UserAssigmentEvent(BaseEvent):
publish = anatomy_filled["publish"]["folder"]
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),
publish)
m = re.search(
"(^.+?{})".format(data["asset"]),
publish
)
if not m:
msg = 'Cannot get part of publish path {}'.format(publish)
@ -213,12 +239,13 @@ class UserAssigmentEvent(BaseEvent):
}
publish_dir = m.group(1)
for script in presets.get(entity.get('action')):
self.log.info(
'[{}] : running script for user {}'.format(
entity.get('action'), user["username"]))
self._run_script(script, [user["username"],
work_dir, publish_dir])
username = user_entity["username"]
event_entity_action = entity_info["action"]
for script in scripts_by_action.get(event_entity_action):
self.log.info((
"[{}] : running script for user {}"
).format(event_entity_action, username))
self._run_script(script, [username, work_dir, publish_dir])
return True

View file

@ -1,12 +1,8 @@
from pype.modules.ftrack import BaseEvent
from pype.api import config
from pype.api import get_project_settings
class VersionToTaskStatus(BaseEvent):
# Presets usage
default_status_mapping = {}
def launch(self, session, event):
'''Propagates status from version to task when changed'''
@ -48,14 +44,19 @@ class VersionToTaskStatus(BaseEvent):
version_status_orig = version_status["name"]
# Get entities necessary for processing
version = session.get("AssetVersion", entity["entityId"])
task = version.get("task")
if not task:
continue
project_entity = self.get_project_from_entity(task)
project_name = project_entity["full_name"]
project_settings = get_project_settings(project_name)
# Load status mapping from presets
status_mapping = (
config.get_presets()
.get("ftrack", {})
.get("ftrack_config", {})
.get("status_version_to_task")
) or self.default_status_mapping
project_settings["ftrack"]["events"]["status_version_to_task"])
# Skip if mapping is empty
if not status_mapping:
continue
@ -78,16 +79,10 @@ class VersionToTaskStatus(BaseEvent):
# Lower all names from presets
new_status_names = [name.lower() for name in new_status_names]
# Get entities necessary for processing
version = session.get("AssetVersion", entity["entityId"])
task = version.get("task")
if not task:
continue
if version["asset"]["type"]["short"].lower() == "scene":
continue
project_schema = task["project"]["project_schema"]
project_schema = project_entity["project_schema"]
# Get all available statuses for Task
statuses = project_schema.get_statuses("Task", task["type_id"])
# map lowered status name with it's object

View file

@ -2,11 +2,13 @@ import os
import sys
import types
import importlib
import ftrack_api
import time
import logging
import inspect
from pype.api import Logger, config
import ftrack_api
from pype.api import Logger
log = Logger().get_logger(__name__)
@ -109,9 +111,8 @@ class FtrackServer:
key = "user"
if self.server_type.lower() == "event":
key = "server"
plugins_presets = config.get_presets().get(
"ftrack", {}
).get("plugins", {}).get(key, {})
# TODO replace with settings or get rid of passing the dictionary
plugins_presets = {}
function_counter = 0
for function_dict in register_functions_dict:

View file

@ -9,7 +9,7 @@ from pype.modules.ftrack.ftrack_server.lib import (
SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER
)
import ftrack_api
from pype.api import Logger, config
from pype.api import Logger
log = Logger().get_logger("Event processor")
@ -56,32 +56,16 @@ def register(session):
def clockify_module_registration():
module_name = "Clockify"
menu_items = config.get_presets()["tray"]["menu_items"]
if not menu_items["item_usage"][module_name]:
return
api_key = os.environ.get("CLOCKIFY_API_KEY")
if not api_key:
log.warning("Clockify API key is not set.")
return
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
if not workspace_name:
workspace_name = (
menu_items
.get("attributes", {})
.get(module_name, {})
.get("workspace_name", {})
)
if not workspace_name:
log.warning("Clockify Workspace is not set.")
return
os.environ["CLOCKIFY_WORKSPACE"] = workspace_name
from pype.modules.clockify.constants import CLOCKIFY_FTRACK_SERVER_PATH
current = os.environ.get("FTRACK_EVENTS_PATH") or ""

View file

@ -3,7 +3,6 @@ from . import credentials
from .ftrack_base_handler import BaseHandler
from .ftrack_event_handler import BaseEvent
from .ftrack_action_handler import BaseAction, ServerAction, statics_icon
from .ftrack_app_handler import AppAction
__all__ = (
"avalon_sync",
@ -12,6 +11,5 @@ __all__ = (
"BaseEvent",
"BaseAction",
"ServerAction",
"statics_icon",
"AppAction"
"statics_icon"
)

View file

@ -8,16 +8,13 @@ import copy
from avalon.api import AvalonMongoDB
import avalon
import avalon.api
from avalon.vendor import toml
from pype.api import Logger, Anatomy
from pype.api import Logger, Anatomy, get_anatomy_settings
from bson.objectid import ObjectId
from bson.errors import InvalidId
from pymongo import UpdateOne
import ftrack_api
from pype.api import config
from pype.lib import ApplicationManager
log = Logger().get_logger(__name__)
@ -175,40 +172,29 @@ def get_avalon_project_template(project_name):
def get_project_apps(in_app_list):
"""
Returns metadata information about apps in 'in_app_list' enhanced
from toml files.
""" Application definitions for app name.
Args:
in_app_list: (list) - names of applications
Returns:
tuple (list, dictionary) - list of dictionaries about apps
dictionary of warnings
tuple (list, dictionary) - list of dictionaries with apps definitions
dictionary of warnings
"""
apps = []
# TODO report
missing_toml_msg = "Missing config file for application"
error_msg = (
"Unexpected error happend during preparation of application"
)
warnings = collections.defaultdict(list)
for app in in_app_list:
try:
toml_path = avalon.lib.which_app(app)
if not toml_path:
log.warning(missing_toml_msg + ' "{}"'.format(app))
warnings[missing_toml_msg].append(app)
continue
missing_app_msg = "Missing definition of application"
application_manager = ApplicationManager()
for app_name in in_app_list:
app = application_manager.applications.get(app_name)
if app:
apps.append({
"name": app,
"label": toml.load(toml_path)["label"]
"name": app_name,
"label": app.full_label
})
except Exception:
warnings[error_msg].append(app)
log.warning((
"Error has happened during preparing application \"{}\""
).format(app), exc_info=True)
else:
warnings[missing_app_msg].append(app_name)
return apps, warnings
@ -289,28 +275,6 @@ def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
return hier_values
def get_task_short_name(task_type):
"""
Returns short name (code) for 'task_type'. Short name stored in
metadata dictionary in project.config per each 'task_type'.
Could be used in anatomy, paths etc.
If no appropriate short name is found in mapping, 'task_type' is
returned back unchanged.
Currently stores data in:
'pype-config/presets/ftrack/project_defaults.json'
Args:
task_type: (string) - Animation | Modeling ...
Returns:
(string) - anim | model ...
"""
presets = config.get_presets()['ftrack']['project_defaults']\
.get("task_short_names")
return presets.get(task_type, task_type)
class SyncEntitiesFactory:
dbcon = AvalonMongoDB()
@ -1131,6 +1095,13 @@ class SyncEntitiesFactory:
)
def prepare_ftrack_ent_data(self):
project_name = self.entities_dict[self.ft_project_id]["name"]
project_anatomy_data = get_anatomy_settings(project_name)
task_type_mapping = (
project_anatomy_data["attributes"]["task_short_names"]
)
not_set_ids = []
for id, entity_dict in self.entities_dict.items():
entity = entity_dict["entity"]
@ -1167,10 +1138,12 @@ class SyncEntitiesFactory:
continue
self.report_items["warning"][msg] = items
tasks = {}
for tt in task_types:
tasks[tt["name"]] = {
"short_name": get_task_short_name(tt["name"])
}
for task_type in task_types:
task_type_name = task_type["name"]
short_name = task_type_mapping.get(task_type_name)
tasks[task_type_name] = {
"short_name": short_name or task_type_name
}
self.entities_dict[id]["final_entity"]["config"] = {
"tasks": tasks,
"apps": proj_apps

View file

@ -1,227 +0,0 @@
from pype import lib as pypelib
from pype.api import config
from .ftrack_action_handler import BaseAction
class AppAction(BaseAction):
"""Application Action class.
Args:
session (ftrack_api.Session): Session where action will be registered.
label (str): A descriptive string identifing your action.
varaint (str, optional): To group actions together, give them the same
label and specify a unique variant per action.
identifier (str): An unique identifier for app.
description (str): A verbose descriptive text for you action.
icon (str): Url path to icon which will be shown in Ftrack web.
"""
type = "Application"
preactions = ["start.timer"]
def __init__(
self, session, dbcon, label, name, executable, variant=None,
icon=None, description=None, preactions=[], plugins_presets={}
):
self.label = label
self.identifier = name
self.executable = executable
self.variant = variant
self.icon = icon
self.description = description
self.preactions.extend(preactions)
self.dbcon = dbcon
super().__init__(session, plugins_presets)
if label is None:
raise ValueError("Action missing label.")
if name is None:
raise ValueError("Action missing identifier.")
if executable is None:
raise ValueError("Action missing executable.")
def register(self):
"""Registers the action, subscribing the discover and launch topics."""
discovery_subscription = (
"topic=ftrack.action.discover and source.user.username={0}"
).format(self.session.api_user)
self.session.event_hub.subscribe(
discovery_subscription,
self._discover,
priority=self.priority
)
launch_subscription = (
"topic=ftrack.action.launch"
" and data.actionIdentifier={0}"
" and source.user.username={1}"
).format(
self.identifier,
self.session.api_user
)
self.session.event_hub.subscribe(
launch_subscription,
self._launch
)
def discover(self, session, entities, event):
"""Return true if we can handle the selected entities.
Args:
session (ftrack_api.Session): Helps to query necessary data.
entities (list): Object of selected entities.
event (ftrack_api.Event): Ftrack event causing discover callback.
"""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "task"
):
return False
entity = entities[0]
if entity["parent"].entity_type.lower() == "project":
return False
avalon_project_apps = event["data"].get("avalon_project_apps", None)
avalon_project_doc = event["data"].get("avalon_project_doc", None)
if avalon_project_apps is None:
if avalon_project_doc is None:
ft_project = self.get_project_from_entity(entity)
project_name = ft_project["full_name"]
self.dbcon.install()
database = self.dbcon.database
avalon_project_doc = database[project_name].find_one({
"type": "project"
}) or False
event["data"]["avalon_project_doc"] = avalon_project_doc
if not avalon_project_doc:
return False
project_apps_config = avalon_project_doc["config"].get("apps", [])
avalon_project_apps = [
app["name"] for app in project_apps_config
] or False
event["data"]["avalon_project_apps"] = avalon_project_apps
if not avalon_project_apps:
return False
return self.identifier in avalon_project_apps
def _launch(self, event):
entities = self._translate_event(event)
preactions_launched = self._handle_preactions(
self.session, event
)
if preactions_launched is False:
return
response = self.launch(self.session, entities, event)
return self._handle_result(response)
def launch(self, session, entities, event):
"""Callback method for the custom action.
return either a bool (True if successful or False if the action failed)
or a dictionary with they keys `message` and `success`, the message
should be a string and will be displayed as feedback to the user,
success should be a bool, True if successful or False if the action
failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and
the entity id. If the entity is a hierarchical you will always get
the entity type TypedContext, once retrieved through a get operation
you will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
"""
entity = entities[0]
task_name = entity["name"]
asset_name = entity["parent"]["name"]
project_name = entity["project"]["full_name"]
try:
pypelib.launch_application(
project_name, asset_name, task_name, self.identifier
)
except pypelib.ApplicationLaunchFailed as exc:
self.log.error(str(exc))
return {
"success": False,
"message": str(exc)
}
except Exception:
msg = "Unexpected failure of application launch {}".format(
self.label
)
self.log.error(msg, exc_info=True)
return {
"success": False,
"message": msg
}
# Change status of task to In progress
presets = config.get_presets()["ftrack"]["ftrack_config"]
if "status_update" in presets:
statuses = presets["status_update"]
actual_status = entity["status"]["name"].lower()
already_tested = []
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in statuses.items():
if key in already_tested:
continue
if actual_status in value or "_any_" in value:
if key != "_ignore_":
next_status_name = key
already_tested.append(key)
break
already_tested.append(key)
if next_status_name is None:
break
try:
query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
))
break
except Exception:
session.rollback()
msg = (
"Status \"{}\" in presets wasn't found"
" on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)
return {
"success": True,
"message": "Launching {0}".format(self.label)
}

View file

@ -3,7 +3,7 @@ import inspect
import pype.modules
from pype.modules import PypeModule
from pype.settings import system_settings
from pype.settings import get_system_settings
from pype.api import Logger
@ -24,7 +24,7 @@ class PypeModuleManager:
return environments
def find_pype_modules(self):
settings = system_settings()
settings = get_system_settings()
modules = []
dirpath = os.path.dirname(pype.modules.__file__)
for module_name in os.listdir(dirpath):

View file

@ -2,7 +2,7 @@ import tempfile
import os
import pyblish.api
from pype.api import config
from pype.api import get_project_settings
import inspect
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
@ -24,12 +24,14 @@ def imprint_attributes(plugin):
plugin_host = file.split(os.path.sep)[-3:-2][0]
plugin_name = type(plugin).__name__
try:
config_data = config.get_presets()['plugins'][plugin_host][plugin_kind][plugin_name] # noqa: E501
settings = get_project_settings(os.environ['AVALON_PROJECT'])
settings_data = settings[plugin_host][plugin_kind][plugin_name] # noqa: E501
print(settings_data)
except KeyError:
print("preset not found")
return
for option, value in config_data.items():
for option, value in settings_data.items():
if option == "enabled" and value is False:
setattr(plugin, "active", False)
else:

View file

@ -8,7 +8,7 @@ Provides:
"""
from pyblish import api
from pype.api import config
from pype.api import get_current_project_settings
class CollectPresets(api.ContextPlugin):
@ -18,23 +18,7 @@ class CollectPresets(api.ContextPlugin):
label = "Collect Presets"
def process(self, context):
presets = config.get_presets()
try:
# try if it is not in projects custom directory
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
# init.json define preset names to be used
p_init = presets["init"]
presets["colorspace"] = presets["colorspace"][p_init["colorspace"]]
presets["dataflow"] = presets["dataflow"][p_init["dataflow"]]
except KeyError:
self.log.warning("No projects custom preset available...")
presets["colorspace"] = presets["colorspace"]["default"]
presets["dataflow"] = presets["dataflow"]["default"]
self.log.info(
"Presets `colorspace` and `dataflow` loaded from `default`..."
)
project_settings = get_current_project_settings()
context.data["presets"] = project_settings
context.data["presets"] = presets
# self.log.info(context.data["presets"])
return

View file

@ -26,10 +26,10 @@ class CreateRenderSetup(avalon.maya.Creator):
# \__| |
# \_____/
# from pype.api import config
# from pype.api import get_project_settings
# import maya.app.renderSetup.model.renderSetup as renderSetup
# presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
# layer = presets['plugins']['maya']['create']['renderSetup']["layer"]
# settings = get_project_settings(os.environ['AVALON_PROJECT'])
# layer = settings['maya']['create']['renderSetup']["layer"]
# rs = renderSetup.instance()
# rs.createRenderLayer(layer)

View file

@ -1,7 +1,7 @@
from avalon import api
import pype.hosts.maya.plugin
import os
from pype.api import config
from pype.api import get_project_settings
import clique
@ -74,8 +74,8 @@ class AssProxyLoader(pype.hosts.maya.plugin.ReferenceLoader):
proxyShape.dso.set(path)
proxyShape.aiOverrideShaders.set(0)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
@ -196,8 +196,8 @@ class AssStandinLoader(api.Loader):
label = "{}:{}".format(namespace, name)
root = pm.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get('ass')
if c is not None:

View file

@ -1,7 +1,7 @@
from avalon import api
import pype.hosts.maya.plugin
import os
from pype.api import config
from pype.api import get_project_settings
reload(config)
@ -35,8 +35,8 @@ class GpuCacheLoader(api.Loader):
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)

View file

@ -2,7 +2,7 @@ import pype.hosts.maya.plugin
from avalon import api, maya
from maya import cmds
import os
from pype.api import config
from pype.api import get_project_settings
class ReferenceLoader(pype.hosts.maya.plugin.ReferenceLoader):
@ -77,8 +77,8 @@ class ReferenceLoader(pype.hosts.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".displayHandle", 1)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
groupNode.useOutlinerColor.set(1)

View file

@ -1,6 +1,6 @@
from avalon import api
import os
from pype.api import config
from pype.api import get_project_settings
class LoadVDBtoRedShift(api.Loader):
"""Load OpenVDB in a Redshift Volume Shape"""
@ -55,8 +55,8 @@ class LoadVDBtoRedShift(api.Loader):
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:

View file

@ -1,5 +1,5 @@
from avalon import api
from pype.api import config
from pype.api import get_project_settings
import os
@ -48,8 +48,8 @@ class LoadVDBtoVRay(api.Loader):
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:

View file

@ -1,6 +1,6 @@
from avalon.maya import lib
from avalon import api
from pype.api import config
from pype.api import get_project_settings
import os
import maya.cmds as cmds
@ -47,8 +47,8 @@ class VRayProxyLoader(api.Loader):
return
# colour the group node
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)

View file

@ -9,7 +9,7 @@ from maya import cmds
from avalon import api, io
from avalon.maya import lib as avalon_lib, pipeline
from pype.hosts.maya import lib
from pype.api import config
from pype.api import get_project_settings
from pprint import pprint
@ -59,8 +59,8 @@ class YetiCacheLoader(api.Loader):
group_name = "{}:{}".format(namespace, name)
group_node = cmds.group(nodes, name=group_name)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:

View file

@ -1,7 +1,7 @@
import os
from collections import defaultdict
from pype.api import config
from pype.api import get_project_settings
import pype.hosts.maya.plugin
from pype.hosts.maya import lib
@ -77,8 +77,8 @@ class YetiRigLoader(pype.hosts.maya.plugin.ReferenceLoader):
groupName = "{}:{}".format(namespace, name)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
settings = get_project_settings(os.environ['AVALON_PROJECT'])
colors = settings['maya']['load']['colors']
c = colors.get('yetiRig')
if c is not None:

View file

@ -102,7 +102,8 @@ class ExtractCameraMayaScene(pype.api.Extractor):
def process(self, instance):
"""Plugin entry point."""
# get settings
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
ext_mapping = (instance.context.data["presets"]["maya"]
.get("ext_mapping")) # noqa: E501
if ext_mapping:
self.log.info("Looking in presets for scene type ...")
# use extension mapping for first family found

View file

@ -172,10 +172,11 @@ class ExtractLook(pype.api.Extractor):
cspace = files_metadata[filepath]["color_space"]
linearise = False
if cspace == "sRGB":
linearise = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
if do_maketx:
if cspace == "sRGB":
linearise = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
source, mode, hash = self._process_texture(
filepath,

View file

@ -11,7 +11,7 @@ from avalon.vendor import requests
import pyblish.api
from pype.hosts.maya import lib
from pype.api import config
from pype.api import get_system_settings
# mapping between Maya renderer names and Muster template ids
@ -25,10 +25,10 @@ def _get_template_id(renderer):
:rtype: int
"""
templates = config.get_presets()["muster"]["templates_mapping"]
templates = get_system_settings()["modules"]["Muster"]["templates_mapping"]
if not templates:
raise RuntimeError(("Muster template mapping missing in pype-config "
"`presets/muster/templates_mapping.json`"))
raise RuntimeError(("Muster template mapping missing in "
"pype-settings"))
try:
template_id = templates[renderer]
except KeyError:

View file

@ -4,7 +4,7 @@ import contextlib
from avalon import api, io
from pype.hosts.nuke import presets
from pype.api import config
from pype.api import get_project_settings
@contextlib.contextmanager
@ -73,7 +73,8 @@ def add_review_presets_config():
"families": list(),
"representations": list()
}
review_presets = config.get_presets()["plugins"]["global"]["publish"].get(
settings = get_project_settings(io.Session["AVALON_PROJECT"])
review_presets = settings["global"]["publish"].get(
"ExtractReview", {})
outputs = review_presets.get("outputs", {})

View file

@ -5,7 +5,7 @@ import subprocess
import platform
import json
import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins
from pype.api import config, resources
from pype.api import resources
import pype.lib
@ -428,12 +428,6 @@ def burnins_from_data(
}
"""
# Use legacy processing when options are not set
if options is None or burnin_values is None:
presets = config.get_presets().get("tools", {}).get("burnins", {})
options = presets.get("options")
burnin_values = presets.get("burnins") or {}
burnin = ModifiedBurnins(input_path, options_init=options)
frame_start = data.get("frame_start")

View file

@ -12,11 +12,6 @@ from .items import (
ItemTable, ItemImage, ItemRectangle, ItemPlaceHolder
)
try:
from pype.api.config import get_presets
except Exception:
get_presets = dict
log = logging.getLogger(__name__)
@ -41,11 +36,7 @@ def create_slates(
)
elif slate_data is None:
slate_presets = (
get_presets()
.get("tools", {})
.get("slates")
) or {}
slate_presets = {}
slate_data = slate_presets.get(slate_name)
if slate_data is None:
raise ValueError(

View file

@ -1,11 +1,15 @@
from .lib import (
system_settings,
project_settings,
environments
get_system_settings,
get_project_settings,
get_current_project_settings,
get_anatomy_settings,
get_environments
)
__all__ = (
"system_settings",
"project_settings",
"environments"
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_anatomy_settings",
"get_environments"
)

View file

@ -1,16 +0,0 @@
{
"AVALON_CONFIG": "pype",
"AVALON_PROJECTS": "{PYPE_PROJECTS_PATH}",
"AVALON_USERNAME": "avalon",
"AVALON_PASSWORD": "secret",
"AVALON_DEBUG": "1",
"AVALON_MONGO": "mongodb://localhost:2707",
"AVALON_DB": "avalon",
"AVALON_DB_DATA": "{PYPE_SETUP_PATH}/../mongo_db_data",
"AVALON_EARLY_ADOPTER": "1",
"AVALON_SCHEMA": "{PYPE_MODULE_ROOT}/schema",
"AVALON_LOCATION": "http://127.0.0.1",
"AVALON_LABEL": "Pype",
"AVALON_TIMEOUT": "1000",
"AVALON_THUMBNAIL_ROOT": ""
}

View file

@ -1,32 +0,0 @@
{
"PYPE_STUDIO_NAME": "Studio Name",
"PYPE_STUDIO_CODE": "stu",
"PYPE_APP_ROOT": "{PYPE_SETUP_PATH}/pypeapp",
"PYPE_MODULE_ROOT": "{PYPE_SETUP_PATH}/repos/pype",
"PYPE_PROJECT_PLUGINS": "",
"STUDIO_SOFT": "{PYP_SETUP_ROOT}/soft",
"FFMPEG_PATH": {
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/windows/bin",
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/darwin/bin",
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/linux"
},
"PATH": [
"{PYPE_CONFIG}/launchers",
"{PYPE_APP_ROOT}",
"{FFMPEG_PATH}",
"{PATH}"
],
"PYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs",
"PYTHONPATH": {
"windows": "{VIRTUAL_ENV}/Lib/site-packages;{PYPE_MODULE_ROOT}/pype/tools;{PYTHONPATH}",
"linux": "{VIRTUAL_ENV}/lib/python{PYTHON_VERSION}/site-packages:{PYPE_MODULE_ROOT}/pype/tools:{PYTHONPATH}",
"darwin": "{VIRTUAL_ENV}/lib/python{PYTHON_VERSION}/site-packages:{PYPE_MODULE_ROOT}/pype/tools:{PYTHONPATH}"
},
"PYPE_PROJECT_CONFIGS": "{PYPE_SETUP_PATH}/../studio-project-configs",
"PYPE_PYTHON_EXE": {
"windows": "{VIRTUAL_ENV}/Scripts/python.exe",
"linux": "{VIRTUAL_ENV}/Scripts/python",
"darwin": "{VIRTUAL_ENV}/bin/python"
},
"PYBLISH_GUI": "pyblish_pype"
}

View file

@ -0,0 +1,29 @@
{
"fps": 25,
"frameStart": 1001,
"frameEnd": 1001,
"clipIn": 1,
"clipOut": 1,
"handleStart": 0,
"handleEnd": 0,
"resolutionWidth": 1920,
"resolutionHeight": 1080,
"pixelAspect": 1,
"applications": [],
"task_short_names": {
"Generic": "gener",
"Art": "art",
"Modeling": "mdl",
"Texture": "tex",
"Lookdev": "look",
"Rigging": "rig",
"Edit": "edit",
"Layout": "lay",
"Setdress": "dress",
"Animation": "anim",
"FX": "fx",
"Lighting": "lgt",
"Paint": "paint",
"Compositing": "comp"
}
}

View file

@ -1,42 +0,0 @@
{
"nuke": {
"root": {
"colorManagement": "Nuke",
"OCIO_config": "nuke-default",
"defaultViewerLUT": "Nuke Root LUTs",
"monitorLut": "sRGB",
"int8Lut": "sRGB",
"int16Lut": "sRGB",
"logLut": "Cineon",
"floatLut": "linear"
},
"viewer": {
"viewerProcess": "sRGB"
},
"write": {
"render": {
"colorspace": "linear"
},
"prerender": {
"colorspace": "linear"
},
"still": {
"colorspace": "sRGB"
}
},
"read": {
"[^-a-zA-Z0-9]beauty[^-a-zA-Z0-9]": "linear",
"[^-a-zA-Z0-9](P|N|Z|crypto)[^-a-zA-Z0-9]": "linear",
"[^-a-zA-Z0-9](plateRef)[^-a-zA-Z0-9]": "sRGB"
}
},
"maya": {
},
"houdini": {
},
"resolve": {
}
}

View file

@ -1,55 +0,0 @@
{
"nuke": {
"nodes": {
"connected": true,
"modifymetadata": {
"_id": "connect_metadata",
"_previous": "ENDING",
"metadata.set.pype_studio_name": "{PYPE_STUDIO_NAME}",
"metadata.set.avalon_project_name": "{AVALON_PROJECT}",
"metadata.set.avalon_project_code": "{PYPE_STUDIO_CODE}",
"metadata.set.avalon_asset_name": "{AVALON_ASSET}"
},
"crop": {
"_id": "connect_crop",
"_previous": "connect_metadata",
"box": [
"{metadata.crop.x}",
"{metadata.crop.y}",
"{metadata.crop.right}",
"{metadata.crop.top}"
]
},
"write": {
"render": {
"_id": "output_write",
"_previous": "connect_crop",
"file_type": "exr",
"datatype": "16 bit half",
"compression": "Zip (1 scanline)",
"autocrop": true,
"tile_color": "0xff0000ff",
"channels": "rgb"
},
"prerender": {
"_id": "output_write",
"_previous": "connect_crop",
"file_type": "exr",
"datatype": "16 bit half",
"compression": "Zip (1 scanline)",
"autocrop": false,
"tile_color": "0xc9892aff",
"channels": "rgba"
},
"still": {
"_previous": "connect_crop",
"channels": "rgba",
"file_type": "tiff",
"datatype": "16 bit",
"compression": "LZW",
"tile_color": "0x4145afff"
}
}
}
}
}

View file

@ -0,0 +1,129 @@
{
"hiero": {
"workfile": {
"ocioConfigName": "nuke-default",
"ocioconfigpath": {
"windows": [],
"darwin": [],
"linux": []
},
"workingSpace": "linear",
"sixteenBitLut": "sRGB",
"eightBitLut": "sRGB",
"floatLut": "linear",
"logLut": "Cineon",
"viewerLut": "sRGB",
"thumbnailLut": "sRGB"
},
"regexInputs": {
"inputs": [
{
"regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)",
"colorspace": "sRGB"
}
]
}
},
"nuke": {
"workfile": {
"colorManagement": "Nuke",
"OCIO_config": "nuke-default",
"customOCIOConfigPath": {
"windows": [],
"darwin": [],
"linux": []
},
"workingSpaceLUT": "linear",
"monitorLut": "sRGB",
"int8Lut": "sRGB",
"int16Lut": "sRGB",
"logLut": "Cineon",
"floatLut": "linear"
},
"nodes": {
"requiredNodes": [
{
"plugins": [
"CreateWriteRender"
],
"nukeNodeClass": "Write",
"knobs": [
{
"name": "file_type",
"value": "exr"
},
{
"name": "datatype",
"value": "16 bit half"
},
{
"name": "compression",
"value": "Zip (1 scanline)"
},
{
"name": "autocrop",
"value": "True"
},
{
"name": "tile_color",
"value": "0xff0000ff"
},
{
"name": "channels",
"value": "rgb"
},
{
"name": "colorspace",
"value": "linear"
}
]
},
{
"plugins": [
"CreateWritePrerender"
],
"nukeNodeClass": "Write",
"knobs": [
{
"name": "file_type",
"value": "exr"
},
{
"name": "datatype",
"value": "16 bit half"
},
{
"name": "compression",
"value": "Zip (1 scanline)"
},
{
"name": "autocrop",
"value": "False"
},
{
"name": "tile_color",
"value": "0xff0000ff"
},
{
"name": "channels",
"value": "rgb"
},
{
"name": "colorspace",
"value": "linear"
}
]
}
],
"customNodes": []
},
"regexInputs": {
"inputs": [
{
"regex": "[^-a-zA-Z0-9]beauty[^-a-zA-Z0-9]",
"colorspace": "linear"
}
]
}
}
}

View file

@ -13,9 +13,6 @@
"file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{representation}",
"path": "{@folder}/{@file}"
},
"texture": {
"path": "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}"
},
"publish": {
"folder": "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}",
"file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{representation}",
@ -26,5 +23,7 @@
"folder": "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/master",
"file": "{project[code]}_{asset}_{subset}_master<_{output}><.{frame}>.{representation}",
"path": "{@folder}/{@file}"
}
},
"delivery": {},
"other": {}
}

View file

@ -0,0 +1,13 @@
{
"publish": {
"ExtractCelactionDeadline": {
"enabled": true,
"deadline_department": "",
"deadline_priority": 50,
"deadline_pool": "",
"deadline_pool_secondary": "",
"deadline_group": "",
"deadline_chunk_size": 10
}
}
}

View file

@ -0,0 +1,98 @@
{
"ftrack_actions_path": [],
"ftrack_events_path": [],
"events": {
"sync_to_avalon": {
"enabled": true,
"statuses_name_change": [
"ready",
"not ready"
]
},
"push_frame_values_to_task": {
"enabled": true,
"interest_entity_types": [
"shot",
"asset build"
],
"interest_attributess": [
"frameStart",
"frameEnd"
]
},
"thumbnail_updates": {
"enabled": true,
"levels": 2
},
"user_assignment": {
"enabled": true
},
"status_update": {
"enabled": true,
"mapping": {
"In Progress": [
"__any__"
],
"Ready": [
"Not Ready"
],
"__ignore__": [
"in prgoress",
"omitted",
"on hold"
]
}
},
"status_task_to_parent": {
"enabled": true,
"parent_status_match_all_task_statuses": {
"Completed": [
"Approved",
"Omitted"
]
},
"parent_status_by_task_status": {
"In Progress": [
"in progress",
"change requested",
"retake",
"pending review"
]
}
},
"status_task_to_version": {
"enabled": true,
"mapping": {
"Approved": [
"Complete"
]
}
},
"status_version_to_task": {
"enabled": true,
"mapping": {
"Complete": [
"Approved",
"Complete"
]
}
},
"first_version_status": {
"enabled": true,
"status": ""
},
"next_task_update": {
"enabled": true,
"mapping": {
"Ready": "Not Ready"
}
}
},
"publish": {
"IntegrateFtrackNote": {
"enabled": true,
"note_with_intent_template": "",
"note_labels": []
}
}
}

View file

@ -1,16 +0,0 @@
{
"sync_to_avalon": {
"statuses_name_change": ["not ready", "ready"]
},
"status_update": {
"_ignore_": ["in progress", "ommited", "on hold"],
"Ready": ["not ready"],
"In Progress" : ["_any_"]
},
"status_version_to_task": {
"__description__": "Status `from` (key) must be lowered!",
"in progress": "in progress",
"approved": "approved"
}
}

View file

@ -1,165 +0,0 @@
[{
"label": "FPS",
"key": "fps",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"write_security_role": ["ALL"],
"read_security_role": ["ALL"],
"default": null,
"config": {
"isdecimal": true
}
}, {
"label": "Applications",
"key": "applications",
"type": "enumerator",
"entity_type": "show",
"group": "avalon",
"config": {
"multiselect": true,
"data": [
{"blender_2.80": "Blender 2.80"},
{"blender_2.81": "Blender 2.81"},
{"blender_2.82": "Blender 2.82"},
{"blender_2.83": "Blender 2.83"},
{"celaction_local": "CelAction2D Local"},
{"maya_2017": "Maya 2017"},
{"maya_2018": "Maya 2018"},
{"maya_2019": "Maya 2019"},
{"nuke_10.0": "Nuke 10.0"},
{"nuke_11.2": "Nuke 11.2"},
{"nuke_11.3": "Nuke 11.3"},
{"nuke_12.0": "Nuke 12.0"},
{"nukex_10.0": "NukeX 10.0"},
{"nukex_11.2": "NukeX 11.2"},
{"nukex_11.3": "NukeX 11.3"},
{"nukex_12.0": "NukeX 12.0"},
{"nukestudio_10.0": "NukeStudio 10.0"},
{"nukestudio_11.2": "NukeStudio 11.2"},
{"nukestudio_11.3": "NukeStudio 11.3"},
{"nukestudio_12.0": "NukeStudio 12.0"},
{"harmony_17": "Harmony 17"},
{"houdini_16.5": "Houdini 16.5"},
{"houdini_17": "Houdini 17"},
{"houdini_18": "Houdini 18"},
{"photoshop_2020": "Photoshop 2020"},
{"python_3": "Python 3"},
{"python_2": "Python 2"},
{"premiere_2019": "Premiere Pro 2019"},
{"premiere_2020": "Premiere Pro 2020"},
{"resolve_16": "BM DaVinci Resolve 16"}
]
}
}, {
"label": "Avalon auto-sync",
"key": "avalon_auto_sync",
"type": "boolean",
"entity_type": "show",
"group": "avalon",
"write_security_role": ["API", "Administrator"],
"read_security_role": ["API", "Administrator"]
}, {
"label": "Intent",
"key": "intent",
"type": "enumerator",
"entity_type": "assetversion",
"group": "avalon",
"config": {
"multiselect": false,
"data": [
{"test": "Test"},
{"wip": "WIP"},
{"final": "Final"}
]
}
}, {
"label": "Library Project",
"key": "library_project",
"type": "boolean",
"entity_type": "show",
"group": "avalon",
"write_security_role": ["API", "Administrator"],
"read_security_role": ["API", "Administrator"]
}, {
"label": "Clip in",
"key": "clipIn",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Clip out",
"key": "clipOut",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Frame start",
"key": "frameStart",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Frame end",
"key": "frameEnd",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Tools",
"key": "tools_env",
"type": "enumerator",
"is_hierarchical": true,
"group": "avalon",
"config": {
"multiselect": true,
"data": [
{"mtoa_3.0.1": "mtoa_3.0.1"},
{"mtoa_3.1.1": "mtoa_3.1.1"},
{"mtoa_3.2.0": "mtoa_3.2.0"},
{"yeti_2.1.2": "yeti_2.1"}
]
}
}, {
"label": "Resolution Width",
"key": "resolutionWidth",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Resolution Height",
"key": "resolutionHeight",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Pixel aspect",
"key": "pixelAspect",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"config": {
"isdecimal": true
}
}, {
"label": "Frame handles start",
"key": "handleStart",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}, {
"label": "Frame handles end",
"key": "handleEnd",
"type": "number",
"is_hierarchical": true,
"group": "avalon",
"default": null
}
]

View file

@ -1,5 +0,0 @@
{
"server_url": "",
"api_key": "",
"api_user": ""
}

View file

@ -1,5 +0,0 @@
{
"TestAction": {
"ignore_me": true
}
}

View file

@ -1,18 +0,0 @@
{
"fps": 25,
"frameStart": 1001,
"frameEnd": 1100,
"clipIn": 1001,
"clipOut": 1100,
"handleStart": 10,
"handleEnd": 10,
"resolutionHeight": 1080,
"resolutionWidth": 1920,
"pixelAspect": 1.0,
"applications": [
"maya_2019", "nuke_11.3", "nukex_11.3", "nukestudio_11.3", "deadline"
],
"tools_env": [],
"avalon_auto_sync": true
}

View file

@ -0,0 +1,39 @@
{
"object_types": ["Milestone", "Task", "Folder", "Asset Build", "Shot", "Library", "Sequence"],
"version_workflow": ["Pending Review", "Client Review", "On Farm", "Reviewed", "Render Complete", "Approved", "CBB", "Delivered", "Render Failed", "data"],
"task_workflow": ["Not Ready", "Ready", "Change Requested", "In progress", "Pending Review", "On Farm", "Waiting", "Render Complete", "Complete", "CBB", "On Hold", "Render Failed", "Omitted"],
"overrides": [{
"task_types": ["Animation"],
"statuses": ["Not Ready", "Ready", "Change Requested", "Blocking", "Animating", "blocking review", "anim review", "Complete", "CBB", "On Hold", "Omitted"]
}, {
"task_types": ["Lighting"],
"statuses": ["Not Ready", "Ready", "Change Requested", "In progress", "To render", "On Farm", "Render Complete", "Complete", "CBB", "On Hold", "Render Failed", "Omitted"]
}],
"task_type_schema": ["Layout", "Animation", "Modeling", "Previz", "Lookdev", "FX", "Lighting", "Compositing", "Rigging", "Texture", "Matte-paint", "Roto-paint", "Art", "Match-moving", "Production", "Build", "Setdress", "Edit", "R&D", "Boards"],
"schemas": [{
"object_type": "Shot",
"statuses": ["Omitted", "Normal", "Complete"],
"task_types": []
}, {
"object_type": "Asset Build",
"statuses": ["Omitted", "Normal", "Complete"],
"task_types": ["Setups", "Sets", "Characters", "Props", "Locations", "Assembly", "R&D", "Elements"]
}, {
"object_type": "Milestone",
"statuses": ["Normal", "Complete"],
"task_types": ["Generic"]
}],
"task_templates": [{
"name": "Character",
"task_types": ["Art", "Modeling", "Lookdev", "Rigging"]
}, {
"name": "Element",
"task_types": ["Modeling", "Lookdev"]
}, {
"name": "Prop",
"task_types": ["Modeling", "Lookdev", "Rigging"]
}, {
"name": "Location",
"task_types": ["Layout", "Setdress"]
}]
}

View file

@ -0,0 +1,182 @@
{
"publish": {
"IntegrateMasterVersion": {
"enabled": true
},
"ExtractJpegEXR": {
"enabled": true,
"ffmpeg_args": {
"input": [],
"output": []
}
},
"ExtractReview": {
"enabled": true,
"profiles": [
{
"families": [],
"hosts": [],
"outputs": {
"h264": {
"ext": "mp4",
"tags": [
"burnin",
"ftrackreview"
],
"ffmpeg_args": {
"video_filters": [],
"audio_filters": [],
"input": [
"-gamma 2.2"
],
"output": [
"-pix_fmt yuv420p",
"-crf 18",
"-intra"
]
},
"filter": {
"families": [
"render",
"review",
"ftrack"
]
}
}
}
}
]
},
"ExtractBurnin": {
"enabled": true,
"options": {
"font_size": 42,
"opacity": 1,
"bg_opacity": 0,
"x_offset": 5,
"y_offset": 5,
"bg_padding": 5
},
"profiles": [
{
"families": [],
"hosts": [],
"burnins": {
"burnin": {
"TOP_LEFT": "{yy}-{mm}-{dd}",
"TOP_CENTERED": "",
"TOP_RIGHT": "{anatomy[version]}",
"BOTTOM_LEFT": "{username}",
"BOTTOM_CENTERED": "{asset}",
"BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}"
}
}
}
]
},
"IntegrateAssetNew": {
"template_name_profiles": {
"publish": {
"families": [],
"tasks": []
},
"render": {
"families": [
"review",
"render",
"prerender"
]
}
}
},
"ProcessSubmittedJobOnFarm": {
"enabled": true,
"deadline_department": "",
"deadline_pool": "",
"deadline_group": "",
"deadline_chunk_size": "",
"deadline_priority": "",
"aov_filter": {
"maya": [
".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*"
],
"nuke": [],
"aftereffects": [
".*"
],
"celaction": [
".*"
]
}
}
},
"tools": {
"Creator": {
"families_smart_select": {
"Render": [
"light",
"render"
],
"Model": [
"model"
],
"Layout": [
"layout"
],
"Look": [
"look"
],
"Rig": [
"rigging",
"rig"
]
}
},
"Workfiles": {
"last_workfile_on_startup": [
{
"hosts": [],
"tasks": [],
"enabled": true
}
],
"sw_folders": {
"compositing": [
"nuke",
"ae"
],
"modeling": [
"maya",
"blender",
"zbrush"
],
"lookdev": [
"substance",
"textures"
]
}
}
},
"project_folder_structure": {
"__project_root__": {
"prod": {},
"resources": {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial": {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
}
}

View file

@ -1,8 +0,0 @@
{
"Model": ["model"],
"Render Globals": ["light", "render"],
"Layout": ["layout"],
"Set Dress": ["setdress"],
"Look": ["look"],
"Rig": ["rigging"]
}

View file

@ -1,22 +0,0 @@
{
"__project_root__": {
"prod" : {},
"resources" : {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial" : {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
}

View file

@ -1,8 +0,0 @@
{
"compositing": ["nuke", "ae"],
"modeling": ["maya", "app2"],
"lookdev": ["substance"],
"animation": [],
"lighting": [],
"rigging": []
}

View file

@ -1,7 +0,0 @@
{
"last_workfile_on_startup": [
{
"enabled": false
}
]
}

View file

@ -0,0 +1,7 @@
{
"publish": {},
"general": {
"skip_resolution_check": false,
"skip_timelines_check": false
}
}

View file

@ -0,0 +1,14 @@
{
"publish": {
"CollectInstanceVersion": {
"enabled": false
},
"ExtractReviewCutUpVideo": {
"enabled": true,
"tags_addition": [
"review"
]
}
},
"filters": {}
}

View file

@ -0,0 +1,319 @@
{
"maya_capture": {
"Codec": {
"compression": "jpg",
"format": "image",
"quality": 95
},
"Display Options": {
"background": [
0.7,
0.7,
0.7
],
"backgroundBottom": [
0.7,
0.7,
0.7
],
"backgroundTop": [
0.7,
0.7,
0.7
],
"override_display": true
},
"Generic": {
"isolate_view": true,
"off_screen": true
},
"IO": {
"name": "",
"open_finished": true,
"raw_frame_numbers": true,
"recent_playblasts": [],
"save_file": true
},
"PanZoom": {
"pan_zoom": true
},
"Renderer": {
"rendererName": "vp2Renderer"
},
"Resolution": {
"width": 1080,
"height": 1920,
"percent": 1.0,
"mode": "Custom"
},
"Time Range": {
"start_frame": 0,
"end_frame": 0,
"frame": "",
"time": "Time Slider"
},
"Viewport Options": {
"cameras": false,
"clipGhosts": false,
"controlVertices": false,
"deformers": false,
"dimensions": false,
"displayLights": 0,
"dynamicConstraints": false,
"dynamics": false,
"fluids": false,
"follicles": false,
"gpuCacheDisplayFilter": false,
"greasePencils": false,
"grid": false,
"hairSystems": true,
"handles": false,
"high_quality": true,
"hud": false,
"hulls": false,
"ikHandles": false,
"imagePlane": true,
"joints": false,
"lights": false,
"locators": false,
"manipulators": false,
"motionTrails": false,
"nCloths": false,
"nParticles": false,
"nRigids": false,
"nurbsCurves": false,
"nurbsSurfaces": false,
"override_viewport_options": true,
"particleInstancers": false,
"pivots": false,
"planes": false,
"pluginShapes": false,
"polymeshes": true,
"shadows": true,
"strokes": false,
"subdivSurfaces": false,
"textures": false,
"twoSidedLighting": true
},
"Camera Options": {
"displayGateMask": false,
"displayResolution": false,
"displayFilmGate": false,
"displayFieldChart": false,
"displaySafeAction": false,
"displaySafeTitle": false,
"displayFilmPivot": false,
"displayFilmOrigin": false,
"overscan": 1.0
}
},
"publish": {
"CollectMayaRender": {
"sync_workfile_version": true
},
"ValidateCameraAttributes": {
"enabled": true,
"optional": true
},
"ValidateModelName": {
"enabled": true,
"material_file": {
"windows": "",
"darwin": "",
"linux": ""
},
"regex": ""
},
"ValidateAssemblyName": {
"enabled": true
},
"ValidateShaderName": {
"enabled": true,
"regex": ""
},
"ValidateMeshHasOverlappingUVs": {
"enabled": false
},
"ExtractCameraAlembic": {
"enabled": true,
"optional": true,
"bake_attributes": []
},
"MayaSubmitDeadline": {
"enabled": true,
"tile_assembler_plugin": "DraftTileAssembler"
}
},
"load": {
"colors": {
"model": [
0.821,
0.518,
0.117
],
"rig": [
0.144,
0.443,
0.463
],
"pointcache": [
0.368,
0.821,
0.117
],
"animation": [
0.368,
0.821,
0.117
],
"ass": [
1.0,
0.332,
0.312
],
"camera": [
0.447,
0.312,
1.0
],
"fbx": [
1.0,
0.931,
0.312
],
"mayaAscii": [
0.312,
1.0,
0.747
],
"setdress": [
0.312,
1.0,
0.747
],
"layout": [
0.312,
1.0,
0.747
],
"vdbcache": [
0.312,
1.0,
0.428
],
"vrayproxy": [
0.258,
0.95,
0.541
],
"yeticache": [
0.2,
0.8,
0.3
],
"yetiRig": [
0.0,
0.8,
0.5
]
}
},
"workfile_build": {
"profiles": [
{
"tasks": [
"Lighting"
],
"current_context": [
{
"subset_name_filters": [
"\".+[Mm]ain\""
],
"families": [
"model"
],
"repre_names": [
"abc",
"ma"
],
"loaders": [
"ReferenceLoader"
]
},
{
"subset_name_filters": [],
"families": [
"animation",
"pointcache"
],
"repre_names": [
"abc"
],
"loaders": [
"ReferenceLoader"
]
},
{
"subset_name_filters": [],
"families": [
"rendersetup"
],
"repre_names": [
"json"
],
"loaders": [
"RenderSetupLoader"
]
},
{
"subset_name_filters": [],
"families": [
"camera"
],
"repre_names": [
"abc"
],
"loaders": [
"ReferenceLoader"
]
}
],
"linked_assets": [
{
"subset_name_filters": [],
"families": [
"sedress"
],
"repre_names": [
"ma"
],
"loaders": [
"ReferenceLoader"
]
},
{
"subset_name_filters": [],
"families": [
"ArnoldStandin"
],
"repre_names": [
"ass"
],
"loaders": [
"assLoader"
]
}
]
}
]
},
"filters": {
"preset 1": {
"ValidateNoAnimation": false,
"ValidateShapeDefaultNames": false
},
"preset 2": {
"ValidateNoAnimation": false
}
}
}

View file

@ -1,108 +0,0 @@
{
"Codec": {
"compression": "jpg",
"format": "image",
"quality": 95
},
"Display Options": {
"background": [
0.7137254901960784,
0.7137254901960784,
0.7137254901960784
],
"backgroundBottom": [
0.7137254901960784,
0.7137254901960784,
0.7137254901960784
],
"backgroundTop": [
0.7137254901960784,
0.7137254901960784,
0.7137254901960784
],
"override_display": true
},
"Generic": {
"isolate_view": true,
"off_screen": true
},
"IO": {
"name": "",
"open_finished": false,
"raw_frame_numbers": false,
"recent_playblasts": [],
"save_file": false
},
"PanZoom": {
"pan_zoom": true
},
"Renderer": {
"rendererName": "vp2Renderer"
},
"Resolution": {
"height": 1080,
"mode": "Custom",
"percent": 1.0,
"width": 1920
},
"Time Range": {
"end_frame": 25,
"frame": "",
"start_frame": 0,
"time": "Time Slider"
},
"Viewport Options": {
"cameras": false,
"clipGhosts": false,
"controlVertices": false,
"deformers": false,
"dimensions": false,
"displayLights": 0,
"dynamicConstraints": false,
"dynamics": false,
"fluids": false,
"follicles": false,
"gpuCacheDisplayFilter": false,
"greasePencils": false,
"grid": false,
"hairSystems": false,
"handles": false,
"high_quality": true,
"hud": false,
"hulls": false,
"ikHandles": false,
"imagePlane": false,
"joints": false,
"lights": false,
"locators": false,
"manipulators": false,
"motionTrails": false,
"nCloths": false,
"nParticles": false,
"nRigids": false,
"nurbsCurves": false,
"nurbsSurfaces": false,
"override_viewport_options": true,
"particleInstancers": false,
"pivots": false,
"planes": false,
"pluginShapes": false,
"polymeshes": true,
"shadows": false,
"strokes": false,
"subdivSurfaces": false,
"textures": false,
"twoSidedLighting": true
},
"Camera Options": {
"displayGateMask": false,
"displayResolution": false,
"displayFilmGate": false,
"displayFieldChart": false,
"displaySafeAction": false,
"displaySafeTitle": false,
"displayFilmPivot": false,
"displayFilmOrigin": false,
"overscan": 1.0
}
}

View file

@ -1,19 +0,0 @@
{
"3delight": 41,
"arnold": 46,
"arnold_sf": 57,
"gelato": 30,
"harware": 3,
"krakatoa": 51,
"file_layers": 7,
"mentalray": 2,
"mentalray_sf": 6,
"redshift": 55,
"renderman": 29,
"software": 1,
"software_sf": 5,
"turtle": 10,
"vector": 4,
"vray": 37,
"ffmpeg": 48
}

View file

@ -0,0 +1,90 @@
{
"create": {
"CreateWriteRender": {
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"
},
"CreateWritePrerender": {
"fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}"
}
},
"publish": {
"ExtractThumbnail": {
"enabled": true,
"nodes": {
"Reformat": [
[
"type",
"to format"
],
[
"format",
"HD_1080"
],
[
"filter",
"Lanczos6"
],
[
"black_outside",
true
],
[
"pbb",
false
]
]
}
},
"ValidateNukeWriteKnobs": {
"enabled": true,
"knobs": {
"render": {
"review": true
}
}
},
"ExtractReviewDataLut": {
"enabled": true
},
"ExtractReviewDataMov": {
"enabled": true,
"viewer_lut_raw": false
},
"ExtractSlateFrame": {
"viewer_lut_raw": false
},
"NukeSubmitDeadline": {
"deadline_priority": 50,
"deadline_pool": "",
"deadline_pool_secondary": "",
"deadline_chunk_size": 1
}
},
"workfile_build": {
"profiles": [
{
"tasks": [
"compositing"
],
"current_context": [
{
"subset_name_filters": [],
"families": [
"render",
"plate"
],
"repre_names": [
"exr",
"dpx"
],
"loaders": [
"LoadSequence"
]
}
],
"linked_assets": []
}
]
},
"filters": {}
}

View file

@ -1,11 +0,0 @@
{
"ExtractCelactionDeadline": {
"enabled": true,
"deadline_department": "",
"deadline_priority": 60,
"deadline_pool": "",
"deadline_pool_secondary": "",
"deadline_group": "",
"deadline_chunk_size": 10
}
}

View file

@ -1,7 +0,0 @@
{
"IntegrateFtrackNote": {
"enabled": true,
"note_with_intent_template": "{intent}: {comment}",
"note_labels": []
}
}

View file

@ -1,97 +0,0 @@
{
"IntegrateMasterVersion": {
"enabled": false
},
"ExtractJpegEXR": {
"enabled": true,
"ffmpeg_args": {
"input": [
"-gamma 2.2"
],
"output": []
}
},
"ExtractReview": {
"enabled": true,
"profiles": [
{
"families": [],
"hosts": [],
"outputs": {
"h264": {
"ext": "mp4",
"tags": [
"burnin",
"ftrackreview"
],
"ffmpeg_args": {
"video_filters": [],
"audio_filters": [],
"input": [
"-gamma 2.2"
],
"output": [
"-pix_fmt yuv420p",
"-crf 18",
"-intra"
]
},
"filter": {
"families": [
"render",
"review",
"ftrack"
]
}
}
}
}
]
},
"ExtractBurnin": {
"enabled": true,
"options": {
"font_size": 42,
"opacity": 1,
"bg_opacity": 0,
"x_offset": 5,
"y_offset": 5,
"bg_padding": 5
},
"profiles": [
{
"burnins": {
"burnin": {
"TOP_LEFT": "{yy}-{mm}-{dd}",
"TOP_RIGHT": "{anatomy[version]}",
"TOP_CENTERED": "",
"BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}",
"BOTTOM_CENTERED": "{asset}",
"BOTTOM_LEFT": "{username}"
}
}
}
]
},
"IntegrateAssetNew": {
"template_name_profiles": {
"publish": {
"families": [],
"tasks": []
},
"render": {
"families": [
"review",
"render",
"prerender"
]
}
}
},
"ProcessSubmittedJobOnFarm": {
"enabled": true,
"deadline_department": "",
"deadline_pool": "",
"deadline_group": ""
}
}

View file

@ -1,9 +0,0 @@
{
"Preset n1": {
"ValidateNoAnimation": false,
"ValidateShapeDefaultNames": false
},
"Preset n2": {
"ValidateNoAnimation": false
}
}

View file

@ -1,18 +0,0 @@
{
"colors": {
"model": [0.821, 0.518, 0.117],
"rig": [0.144, 0.443, 0.463],
"pointcache": [0.368, 0.821, 0.117],
"animation": [0.368, 0.821, 0.117],
"ass": [1.0, 0.332, 0.312],
"camera": [0.447, 0.312, 1.0],
"fbx": [1.0, 0.931, 0.312],
"mayaAscii": [0.312, 1.0, 0.747],
"setdress": [0.312, 1.0, 0.747],
"layout": [0.312, 1.0, 0.747],
"vdbcache": [0.312, 1.0, 0.428],
"vrayproxy": [0.258, 0.95, 0.541],
"yeticache": [0.2, 0.8, 0.3],
"yetiRig": [0, 0.8, 0.5]
}
}

Some files were not shown because too many files have changed in this diff Show more