diff --git a/.gitignore b/.gitignore index b1f6306a99..28d1013fc0 100644 --- a/.gitignore +++ b/.gitignore @@ -71,6 +71,10 @@ package-lock.json pype/premiere/ppro/js/debug.log -# Idea +# IDEA ###### .idea/ + +# VScode files +.vscode/ +.env \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index cb3627aa20..f08a36506c 100644 --- a/.gitmodules +++ b/.gitmodules @@ -15,3 +15,9 @@ path = repos/acre url = git@github.com:antirotor/acre.git branch = fix/unformatted-tokens +[submodule "pype/modules/ftrack/python2_vendor/ftrack-python-api"] + path = pype/modules/ftrack/python2_vendor/ftrack-python-api + url = https://bitbucket.org/ftrack/ftrack-python-api.git +[submodule "pype/modules/ftrack/python2_vendor/arrow"] + path = pype/modules/ftrack/python2_vendor/arrow + url = git@github.com:arrow-py/arrow.git diff --git a/pype.py b/pype.py index 769e8c8f6f..3d700aebdb 100644 --- a/pype.py +++ b/pype.py @@ -218,7 +218,7 @@ def boot(): def get_info() -> list: """Print additional information to console.""" from pype.lib.mongo import get_default_components - from pype.lib.log import LOG_DATABASE_NAME, LOG_COLLECTION_NAME + from pype.lib.log import PypeLogger components = get_default_components() @@ -242,14 +242,18 @@ def get_info() -> list: infos.append(("Using Muster at", os.environ.get("MUSTER_REST_URL"))) - if components["host"]: - infos.append(("Logging to MongoDB", components["host"])) - infos.append((" - port", components["port"] or "")) - infos.append((" - database", LOG_DATABASE_NAME)) - infos.append((" - collection", LOG_COLLECTION_NAME)) - infos.append((" - user", components["username"] or "")) - if components["auth_db"]: - infos.append((" - auth source", components["auth_db"])) + # Reinitialize + PypeLogger.initialize() + + log_components = PypeLogger.log_mongo_url_components + if log_components["host"]: + infos.append(("Logging to MongoDB", log_components["host"])) + infos.append((" - port", log_components["port"] or "")) + infos.append((" - database", PypeLogger.log_database_name)) + infos.append((" - collection", PypeLogger.log_collection_name)) + infos.append((" - user", log_components["username"] or "")) + if log_components["auth_db"]: + infos.append((" - auth source", log_components["auth_db"])) maximum = max([len(i[0]) for i in infos]) formatted = [] diff --git a/pype/api.py b/pype/api.py index a2b4f22e72..9c0d796128 100644 --- a/pype/api.py +++ b/pype/api.py @@ -9,7 +9,15 @@ from .lib import ( PypeLogger, Anatomy, config, - execute + execute, + run_subprocess, + version_up, + get_asset, + get_hierarchy, + get_version_from_path, + get_last_version_from_path, + source_hash, + get_latest_version ) from .lib.mongo import ( @@ -37,19 +45,6 @@ from .action import ( RepairContextAction ) -from .lib import ( - version_up, - get_asset, - get_hierarchy, - get_version_from_path, - get_last_version_from_path, - source_hash, - get_latest_version -) - -# Special naming case for subprocess since its a built-in method. -from .lib import _subprocess as subprocess - # for backward compatibility with Pype 2 Logger = PypeLogger @@ -94,6 +89,6 @@ __all__ = [ "get_last_version_from_path", "source_hash", - "subprocess", + "run_subprocess", "get_latest_version" ] diff --git a/pype/hooks/global/pre_global_host_data.py b/pype/hooks/global/pre_global_host_data.py index 4910d08010..cb497814f5 100644 --- a/pype/hooks/global/pre_global_host_data.py +++ b/pype/hooks/global/pre_global_host_data.py @@ -11,7 +11,9 @@ from pype.api import ( from pype.lib import ( env_value_to_bool, PreLaunchHook, - ApplicationLaunchFailed + ApplicationLaunchFailed, + get_workdir_data, + get_workdir_with_workdir_data, ) import acre @@ -140,17 +142,15 @@ class GlobalHostDataHook(PreLaunchHook): ) return - workdir_data = self._prepare_workdir_data( - project_doc, asset_doc, task_name + workdir_data = get_workdir_data( + project_doc, asset_doc, task_name, self.host_name ) self.data["workdir_data"] = workdir_data - hierarchy = workdir_data["hierarchy"] anatomy = self.data["anatomy"] try: - anatomy_filled = anatomy.format(workdir_data) - workdir = os.path.normpath(anatomy_filled["work"]["folder"]) + workdir = get_workdir_with_workdir_data(workdir_data, anatomy) if not os.path.exists(workdir): self.log.debug( "Creating workdir folder: \"{}\"".format(workdir) @@ -168,7 +168,6 @@ class GlobalHostDataHook(PreLaunchHook): "AVALON_TASK": task_name, "AVALON_APP": self.host_name, "AVALON_APP_NAME": self.app_name, - "AVALON_HIERARCHY": hierarchy, "AVALON_WORKDIR": workdir } self.log.debug( @@ -180,21 +179,6 @@ class GlobalHostDataHook(PreLaunchHook): self.prepare_last_workfile(workdir) - def _prepare_workdir_data(self, project_doc, asset_doc, task_name): - hierarchy = "/".join(asset_doc["data"]["parents"]) - - data = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "task": task_name, - "asset": asset_doc["name"], - "app": self.host_name, - "hierarchy": hierarchy - } - return data - def prepare_last_workfile(self, workdir): """last workfile workflow preparation. diff --git a/pype/hooks/global/pre_with_windows_shell.py b/pype/hooks/global/pre_with_windows_shell.py index 918c0d63fd..d675c9bf5b 100644 --- a/pype/hooks/global/pre_with_windows_shell.py +++ b/pype/hooks/global/pre_with_windows_shell.py @@ -11,7 +11,7 @@ class LaunchWithWindowsShell(PreLaunchHook): """ order = 10 - app_groups = ["nuke", "nukex", "hiero", "nukestudio"] + app_groups = ["resolve", "nuke", "nukex", "hiero", "nukestudio"] platforms = ["windows"] def execute(self): diff --git a/pype/hooks/resolve/pre_resolve_setup.py b/pype/hooks/resolve/pre_resolve_setup.py index 4f6d33c6eb..19a0817a0d 100644 --- a/pype/hooks/resolve/pre_resolve_setup.py +++ b/pype/hooks/resolve/pre_resolve_setup.py @@ -14,8 +14,10 @@ class ResolvePrelaunch(PreLaunchHook): app_groups = ["resolve"] def execute(self): + # TODO: add OTIO installation from `pype/requirements.py` # making sure pyton 3.6 is installed at provided path - py36_dir = os.path.normpath(self.env.get("PYTHON36_RESOLVE", "")) + py36_dir = os.path.normpath( + self.launch_context.env.get("PYTHON36_RESOLVE", "")) assert os.path.isdir(py36_dir), ( "Python 3.6 is not installed at the provided folder path. Either " "make sure the `environments\resolve.json` is having correctly " @@ -23,11 +25,10 @@ class ResolvePrelaunch(PreLaunchHook): f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`" ) self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...") - self.env["PYTHON36_RESOLVE"] = py36_dir # setting utility scripts dir for scripts syncing us_dir = os.path.normpath( - self.env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") + self.launch_context.env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") ) assert os.path.isdir(us_dir), ( "Resolve utility script dir does not exists. Either make sure " @@ -38,8 +39,9 @@ class ResolvePrelaunch(PreLaunchHook): self.log.debug(f"-- us_dir: `{us_dir}`") # correctly format path for pre python script - pre_py_sc = os.path.normpath(self.env.get("PRE_PYTHON_SCRIPT", "")) - self.env["PRE_PYTHON_SCRIPT"] = pre_py_sc + pre_py_sc = os.path.normpath( + self.launch_context.env.get("PRE_PYTHON_SCRIPT", "")) + self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") try: __import__("pype.hosts.resolve") @@ -55,4 +57,4 @@ class ResolvePrelaunch(PreLaunchHook): # Resolve Setup integration importlib.reload(utils) self.log.debug(f"-- utils.__file__: `{utils.__file__}`") - utils.setup(self.env) + utils.setup(self.launch_context.env) diff --git a/pype/hosts/fusion/pipeline.py b/pype/hosts/fusion/pipeline.py index 22a80076da..b2918675ea 100644 --- a/pype/hosts/fusion/pipeline.py +++ b/pype/hosts/fusion/pipeline.py @@ -9,7 +9,7 @@ from pyblish import api as pyblish from pype.api import Logger from pype import PLUGINS_DIR -log = Logger().get_logger(__name__, "fusion") +log = Logger().get_logger(__name__) AVALON_CONFIG = os.environ["AVALON_CONFIG"] diff --git a/pype/hosts/fusion/utils.py b/pype/hosts/fusion/utils.py index 15fa52e938..6eee54b5b1 100644 --- a/pype/hosts/fusion/utils.py +++ b/pype/hosts/fusion/utils.py @@ -9,7 +9,7 @@ import shutil from pype.api import Logger -log = Logger().get_logger(__name__, "fusion") +log = Logger().get_logger(__name__) def _sync_utility_scripts(env=None): diff --git a/pype/hosts/hiero/__init__.py b/pype/hosts/hiero/__init__.py index 50fd39440b..9eb3cd551a 100644 --- a/pype/hosts/hiero/__init__.py +++ b/pype/hosts/hiero/__init__.py @@ -31,7 +31,7 @@ __all__ = [ ] # get logger -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) ''' Creating all important host related variables ''' diff --git a/pype/hosts/hiero/events.py b/pype/hosts/hiero/events.py index d78f8d54d4..4f9851cbac 100644 --- a/pype/hosts/hiero/events.py +++ b/pype/hosts/hiero/events.py @@ -4,7 +4,7 @@ from pype.api import Logger from .lib import sync_avalon_data_to_workfile, launch_workfiles_app from .tags import add_tags_from_presets -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) def startupCompleted(event): diff --git a/pype/hosts/hiero/lib.py b/pype/hosts/hiero/lib.py index 0db9b92b3d..182d7e53c0 100644 --- a/pype/hosts/hiero/lib.py +++ b/pype/hosts/hiero/lib.py @@ -9,7 +9,7 @@ from avalon.vendor.Qt import (QtWidgets, QtGui) import pype.api as pype from pype.api import Logger, Anatomy -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) cached_process = None diff --git a/pype/hosts/hiero/menu.py b/pype/hosts/hiero/menu.py index 697381f3cb..853ee8cf7e 100644 --- a/pype/hosts/hiero/menu.py +++ b/pype/hosts/hiero/menu.py @@ -12,7 +12,7 @@ from .lib import ( set_workfiles ) -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) self = sys.modules[__name__] self._change_context_menu = None diff --git a/pype/hosts/hiero/tags.py b/pype/hosts/hiero/tags.py index 551dc1698d..af1a9b947a 100644 --- a/pype/hosts/hiero/tags.py +++ b/pype/hosts/hiero/tags.py @@ -8,7 +8,7 @@ from pprint import pformat from pype.api import Logger from avalon import io -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) def tag_data(): diff --git a/pype/hosts/hiero/workio.py b/pype/hosts/hiero/workio.py index f11a34c9a8..c3505ef1bc 100644 --- a/pype/hosts/hiero/workio.py +++ b/pype/hosts/hiero/workio.py @@ -4,7 +4,7 @@ from avalon import api from pype.api import Logger -log = Logger().get_logger(__name__, "hiero") +log = Logger().get_logger(__name__) def file_extensions(): diff --git a/pype/hosts/maya/expected_files.py b/pype/hosts/maya/expected_files.py index 52c8893e4b..d39e5fa204 100644 --- a/pype/hosts/maya/expected_files.py +++ b/pype/hosts/maya/expected_files.py @@ -32,6 +32,9 @@ Attributes: ImagePrefixes (dict): Mapping between renderers and their respective image prefix atrribute names. +Todo: + Determine `multipart` from render instance. + """ import types @@ -94,6 +97,10 @@ class ExpectedFiles: multipart = False + def __init__(self, render_instance): + """Constructor.""" + self._render_instance = render_instance + def get(self, renderer, layer): """Get expected files for given renderer and render layer. @@ -114,15 +121,20 @@ class ExpectedFiles: renderSetup.instance().switchToLayerUsingLegacyName(layer) if renderer.lower() == "arnold": - return self._get_files(ExpectedFilesArnold(layer)) + return self._get_files(ExpectedFilesArnold(layer, + self._render_instance)) elif renderer.lower() == "vray": - return self._get_files(ExpectedFilesVray(layer)) + return self._get_files(ExpectedFilesVray( + layer, self._render_instance)) elif renderer.lower() == "redshift": - return self._get_files(ExpectedFilesRedshift(layer)) + return self._get_files(ExpectedFilesRedshift( + layer, self._render_instance)) elif renderer.lower() == "mentalray": - return self._get_files(ExpectedFilesMentalray(layer)) + return self._get_files(ExpectedFilesMentalray( + layer, self._render_instance)) elif renderer.lower() == "renderman": - return self._get_files(ExpectedFilesRenderman(layer)) + return self._get_files(ExpectedFilesRenderman( + layer, self._render_instance)) else: raise UnsupportedRendererException( "unsupported {}".format(renderer) @@ -149,9 +161,10 @@ class AExpectedFiles: layer = None multipart = False - def __init__(self, layer): + def __init__(self, layer, render_instance): """Constructor.""" self.layer = layer + self.render_instance = render_instance @abstractmethod def get_aovs(self): @@ -460,9 +473,9 @@ class ExpectedFilesArnold(AExpectedFiles): "maya": "", } - def __init__(self, layer): + def __init__(self, layer, render_instance): """Constructor.""" - super(ExpectedFilesArnold, self).__init__(layer) + super(ExpectedFilesArnold, self).__init__(layer, render_instance) self.renderer = "arnold" def get_aovs(self): @@ -531,9 +544,9 @@ class ExpectedFilesArnold(AExpectedFiles): class ExpectedFilesVray(AExpectedFiles): """Expected files for V-Ray renderer.""" - def __init__(self, layer): + def __init__(self, layer, render_instance): """Constructor.""" - super(ExpectedFilesVray, self).__init__(layer) + super(ExpectedFilesVray, self).__init__(layer, render_instance) self.renderer = "vray" def get_renderer_prefix(self): @@ -614,24 +627,25 @@ class ExpectedFilesVray(AExpectedFiles): if default_ext == "exr (multichannel)" or default_ext == "exr (deep)": default_ext = "exr" + # add beauty as default enabled_aovs.append( (u"beauty", default_ext) ) - if not self.maya_is_true( - cmds.getAttr("vraySettings.relements_enableall") - ): - return enabled_aovs + # handle aovs from references + use_ref_aovs = self.render_instance.data.get( + "vrayUseReferencedAovs", False) or False - # filter all namespace prefixed AOVs - they are pulled in from - # references and are not rendered. - vr_aovs = [ - n - for n in cmds.ls( - type=["VRayRenderElement", "VRayRenderElementSet"] - ) - if len(n.split(":")) == 1 - ] + # this will have list of all aovs no matter if they are coming from + # reference or not. + vr_aovs = cmds.ls( + type=["VRayRenderElement", "VRayRenderElementSet"]) or [] + if not use_ref_aovs: + ref_aovs = cmds.ls( + type=["VRayRenderElement", "VRayRenderElementSet"], + referencedNodes=True) or [] + # get difference + vr_aovs = list(set(vr_aovs) - set(ref_aovs)) for aov in vr_aovs: enabled = self.maya_is_true(cmds.getAttr("{}.enabled".format(aov))) @@ -703,9 +717,9 @@ class ExpectedFilesRedshift(AExpectedFiles): ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - def __init__(self, layer): + def __init__(self, layer, render_instance): """Construtor.""" - super(ExpectedFilesRedshift, self).__init__(layer) + super(ExpectedFilesRedshift, self).__init__(layer, render_instance) self.renderer = "redshift" def get_renderer_prefix(self): @@ -822,9 +836,9 @@ class ExpectedFilesRenderman(AExpectedFiles): This is very rudimentary and needs more love and testing. """ - def __init__(self, layer): + def __init__(self, layer, render_instance): """Constructor.""" - super(ExpectedFilesRenderman, self).__init__(layer) + super(ExpectedFilesRenderman, self).__init__(layer, render_instance) self.renderer = "renderman" def get_aovs(self): @@ -887,7 +901,7 @@ class ExpectedFilesRenderman(AExpectedFiles): class ExpectedFilesMentalray(AExpectedFiles): """Skeleton unimplemented class for Mentalray renderer.""" - def __init__(self, layer): + def __init__(self, layer, render_instance): """Constructor. Raises: diff --git a/pype/hosts/maya/menu.py b/pype/hosts/maya/menu.py index 288502a1e1..fa7690bca7 100644 --- a/pype/hosts/maya/menu.py +++ b/pype/hosts/maya/menu.py @@ -13,12 +13,14 @@ self._menu = os.environ.get('PYPE_STUDIO_NAME') or "Pype" log = logging.getLogger(__name__) -def _get_menu(): +def _get_menu(menu_name=None): """Return the menu instance if it currently exists in Maya""" + if menu_name is None: + menu_name = self._menu widgets = dict(( w.objectName(), w) for w in QtWidgets.QApplication.allWidgets()) - menu = widgets.get(self._menu) + menu = widgets.get(menu_name) return menu @@ -40,10 +42,51 @@ def deferred(): command=lambda *args: mayalookassigner.show() ) + def modify_workfiles(): + from pype.tools import workfiles + + def launch_workfiles_app(*_args, **_kwargs): + workfiles.show( + os.path.join( + cmds.workspace(query=True, rootDirectory=True), + cmds.workspace(fileRuleEntry="scene") + ), + parent=pipeline._parent + ) + + # Find the pipeline menu + top_menu = _get_menu(pipeline._menu) + + # Try to find workfile tool action in the menu + workfile_action = None + for action in top_menu.actions(): + if action.text() == "Work Files": + workfile_action = action + break + + # Add at the top of menu if "Work Files" action was not found + after_action = "" + if workfile_action: + # Use action's object name for `insertAfter` argument + after_action = workfile_action.objectName() + + # Insert action to menu + cmds.menuItem( + "Work Files", + parent=pipeline._menu, + command=launch_workfiles_app, + insertAfter=after_action + ) + + # Remove replaced action + if workfile_action: + top_menu.removeAction(workfile_action) + log.info("Attempting to install scripts menu..") add_build_workfiles_item() add_look_assigner_item() + modify_workfiles() try: import scriptsmenu.launchformaya as launchformaya diff --git a/pype/hosts/nuke/__init__.py b/pype/hosts/nuke/__init__.py index 787f69f635..d0f3577c2e 100644 --- a/pype/hosts/nuke/__init__.py +++ b/pype/hosts/nuke/__init__.py @@ -15,7 +15,7 @@ from . import lib self = sys.modules[__name__] self.workfiles_launched = False -log = Logger().get_logger(__name__, "nuke") +log = Logger().get_logger(__name__) AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") diff --git a/pype/hosts/nuke/lib.py b/pype/hosts/nuke/lib.py index 50b9697d8e..ddfdd260e2 100644 --- a/pype/hosts/nuke/lib.py +++ b/pype/hosts/nuke/lib.py @@ -20,7 +20,7 @@ from .presets import ( from .utils import set_context_favorites -log = pype.Logger().get_logger(__name__, "nuke") +log = pype.Logger().get_logger(__name__) self = sys.modules[__name__] self._project = None diff --git a/pype/hosts/nuke/menu.py b/pype/hosts/nuke/menu.py index b1ef7f47c4..a8d5090da9 100644 --- a/pype/hosts/nuke/menu.py +++ b/pype/hosts/nuke/menu.py @@ -1,17 +1,37 @@ +import os import nuke from avalon.api import Session from pype.hosts.nuke import lib from ...lib import BuildWorkfile from pype.api import Logger +from pype.tools import workfiles -log = Logger().get_logger(__name__, "nuke") +log = Logger().get_logger(__name__) def install(): menubar = nuke.menu("Nuke") menu = menubar.findItem(Session["AVALON_LABEL"]) workfile_settings = lib.WorkfileSettings + + # replace reset resolution from avalon core to pype's + name = "Work Files..." + rm_item = [ + (i, item) for i, item in enumerate(menu.items()) if name in item.name() + ][0] + + log.debug("Changing Item: {}".format(rm_item)) + + menu.removeItem(rm_item[1].name()) + menu.addCommand( + name, + lambda: workfiles.show( + os.environ["AVALON_WORKDIR"] + ), + index=(rm_item[0]) + ) + # replace reset resolution from avalon core to pype's name = "Reset Resolution" new_name = "Set Resolution" diff --git a/pype/hosts/nuke/presets.py b/pype/hosts/nuke/presets.py index 2a296afc88..e40459f400 100644 --- a/pype/hosts/nuke/presets.py +++ b/pype/hosts/nuke/presets.py @@ -1,7 +1,7 @@ from pype.api import Anatomy, config, Logger import nuke -log = Logger().get_logger(__name__, "nuke") +log = Logger().get_logger(__name__) def get_anatomy(**kwarg): diff --git a/pype/hosts/premiere/__init__.py b/pype/hosts/premiere/__init__.py index acba84496b..8a9a032c54 100644 --- a/pype/hosts/premiere/__init__.py +++ b/pype/hosts/premiere/__init__.py @@ -18,7 +18,7 @@ __all__ = [ "ls" ] -log = Logger().get_logger(__name__, "premiere") +log = Logger().get_logger(__name__) def install(): diff --git a/pype/hosts/premiere/lib.py b/pype/hosts/premiere/lib.py index d76d27270a..5282e2c747 100644 --- a/pype/hosts/premiere/lib.py +++ b/pype/hosts/premiere/lib.py @@ -10,7 +10,7 @@ from pype.widgets.message_window import message from pype import PLUGINS_DIR from pype.api import Logger -log = Logger().get_logger(__name__, "premiere") +log = Logger().get_logger(__name__) self = sys.modules[__name__] self._has_been_setup = False diff --git a/pype/hosts/resolve/__init__.py b/pype/hosts/resolve/__init__.py index c8f45259ff..b6c43a58c2 100644 --- a/pype/hosts/resolve/__init__.py +++ b/pype/hosts/resolve/__init__.py @@ -14,20 +14,32 @@ from .pipeline import ( ) from .lib import ( + publish_clip_color, get_project_manager, get_current_project, get_current_sequence, + get_video_track_names, get_current_track_items, + get_track_item_pype_tag, + set_track_item_pype_tag, + imprint, + set_publish_attribute, + get_publish_attribute, create_current_sequence_media_bin, create_compound_clip, swap_clips, get_pype_clip_metadata, - set_project_manager_to_folder_name + set_project_manager_to_folder_name, + get_reformated_path, + get_otio_clip_instance_data ) from .menu import launch_pype_menu -from .plugin import Creator +from .plugin import ( + Creator, + PublishClip +) from .workio import ( open_file, @@ -57,21 +69,31 @@ __all__ = [ "get_resolve_module", # lib + "publish_clip_color", "get_project_manager", "get_current_project", "get_current_sequence", + "get_video_track_names", "get_current_track_items", + "get_track_item_pype_tag", + "set_track_item_pype_tag", + "imprint", + "set_publish_attribute", + "get_publish_attribute", "create_current_sequence_media_bin", "create_compound_clip", "swap_clips", "get_pype_clip_metadata", "set_project_manager_to_folder_name", + "get_reformated_path", + "get_otio_clip_instance_data", # menu "launch_pype_menu", # plugin "Creator", + "PublishClip", # workio "open_file", diff --git a/pype/hosts/resolve/lib.py b/pype/hosts/resolve/lib.py index deb4fa6339..2cf228d854 100644 --- a/pype/hosts/resolve/lib.py +++ b/pype/hosts/resolve/lib.py @@ -1,31 +1,47 @@ import sys import json +import re from opentimelineio import opentime -from pprint import pformat +import pype + +from .otio import davinci_export as otio_export from pype.api import Logger -log = Logger().get_logger(__name__, "resolve") +log = Logger().get_logger(__name__) self = sys.modules[__name__] -self.pm = None +self.project_manager = None + +# Pype sequencial rename variables self.rename_index = 0 self.rename_add = 0 -self.pype_metadata_key = "VFX Notes" + +self.publish_clip_color = "Pink" +self.pype_marker_workflow = True + +# Pype compound clip workflow variable +self.pype_tag_name = "VFX Notes" + +# Pype marker workflow variables +self.pype_marker_name = "PYPEDATA" +self.pype_marker_duration = 1 +self.pype_marker_color = "Mint" +self.temp_marker_frame = None def get_project_manager(): from . import bmdvr - if not self.pm: - self.pm = bmdvr.GetProjectManager() - return self.pm + if not self.project_manager: + self.project_manager = bmdvr.GetProjectManager() + return self.project_manager def get_current_project(): # initialize project manager get_project_manager() - return self.pm.GetCurrentProject() + return self.project_manager.GetCurrentProject() def get_current_sequence(): @@ -35,6 +51,22 @@ def get_current_sequence(): return project.GetCurrentTimeline() +def get_video_track_names(): + tracks = list() + track_type = "video" + sequence = get_current_sequence() + + # get all tracks count filtered by track type + selected_track_count = sequence.GetTrackCount(track_type) + + # loop all tracks and get items + for track_index in range(1, (int(selected_track_count) + 1)): + track_name = sequence.GetTrackName("video", track_index) + tracks.append(track_name) + + return tracks + + def get_current_track_items( filter=False, track_type=None, @@ -77,13 +109,168 @@ def get_current_track_items( if filter is True: if selecting_color in ti_color: selected_clips.append(data) - # ti.ClearClipColor() else: selected_clips.append(data) return selected_clips +def get_track_item_pype_tag(track_item): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (resolve.TimelineItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + return_tag = None + + if self.pype_marker_workflow: + return_tag = get_pype_marker(track_item) + else: + media_pool_item = track_item.GetMediaPoolItem() + + # get all tags from track item + _tags = media_pool_item.GetMetadata() + if not _tags: + return None + for key, data in _tags.items(): + # return only correct tag defined by global name + if key in self.pype_tag_name: + return_tag = json.loads(data) + + return return_tag + + +def set_track_item_pype_tag(track_item, data=None): + """ + Set pype track item tag to input track_item. + + Attributes: + trackItem (resolve.TimelineItem): resolve api object + + Returns: + dict: json loaded data + """ + data = data or dict() + + # get available pype tag if any + tag_data = get_track_item_pype_tag(track_item) + + if self.pype_marker_workflow: + # delete tag as it is not updatable + if tag_data: + delete_pype_marker(track_item) + + tag_data.update(data) + set_pype_marker(track_item, tag_data) + else: + if tag_data: + media_pool_item = track_item.GetMediaPoolItem() + # it not tag then create one + tag_data.update(data) + media_pool_item.SetMetadata( + self.pype_tag_name, json.dumps(tag_data)) + else: + tag_data = data + # if pype tag available then update with input data + # add it to the input track item + track_item.SetMetadata(self.pype_tag_name, json.dumps(tag_data)) + + return tag_data + + +def imprint(track_item, data=None): + """ + Adding `Avalon data` into a hiero track item tag. + + Also including publish attribute into tag. + + Arguments: + track_item (hiero.core.TrackItem): hiero track item object + data (dict): Any data which needst to be imprinted + + Examples: + data = { + 'asset': 'sq020sh0280', + 'family': 'render', + 'subset': 'subsetMain' + } + """ + data = data or {} + + set_track_item_pype_tag(track_item, data) + + # add publish attribute + set_publish_attribute(track_item, True) + + +def set_publish_attribute(track_item, value): + """ Set Publish attribute in input Tag object + + Attribute: + tag (hiero.core.Tag): a tag object + value (bool): True or False + """ + tag_data = get_track_item_pype_tag(track_item) + tag_data["publish"] = value + # set data to the publish attribute + set_track_item_pype_tag(track_item, tag_data) + + +def get_publish_attribute(track_item): + """ Get Publish attribute from input Tag object + + Attribute: + tag (hiero.core.Tag): a tag object + value (bool): True or False + """ + tag_data = get_track_item_pype_tag(track_item) + return tag_data["publish"] + + +def set_pype_marker(track_item, tag_data): + source_start = track_item.GetLeftOffset() + item_duration = track_item.GetDuration() + frame = int(source_start + (item_duration / 2)) + + # marker attributes + frameId = (frame / 10) * 10 + color = self.pype_marker_color + name = self.pype_marker_name + note = json.dumps(tag_data) + duration = (self.pype_marker_duration / 10) * 10 + + track_item.AddMarker( + frameId, + color, + name, + note, + duration + ) + + +def get_pype_marker(track_item): + track_item_markers = track_item.GetMarkers() + for marker_frame in track_item_markers: + note = track_item_markers[marker_frame]["note"] + color = track_item_markers[marker_frame]["color"] + name = track_item_markers[marker_frame]["name"] + print(f"_ marker data: {marker_frame} | {name} | {color} | {note}") + if name == self.pype_marker_name and color == self.pype_marker_color: + self.temp_marker_frame = marker_frame + return json.loads(note) + + return dict() + + +def delete_pype_marker(track_item): + track_item.DeleteMarkerAtFrame(self.temp_marker_frame) + self.temp_marker_frame = None + + def create_current_sequence_media_bin(sequence): seq_name = sequence.GetName() media_pool = get_current_project().GetMediaPool() @@ -178,7 +365,7 @@ def get_name_with_data(clip_data, presets): }) -def create_compound_clip(clip_data, folder, rename=False, **kwargs): +def create_compound_clip(clip_data, name, folder): """ Convert timeline object into nested timeline object @@ -186,8 +373,7 @@ def create_compound_clip(clip_data, folder, rename=False, **kwargs): clip_data (dict): timeline item object packed into dict with project, timeline (sequence) folder (resolve.MediaPool.Folder): media pool folder object, - rename (bool)[optional]: renaming in sequence or not - kwargs (optional): additional data needed for rename=True (presets) + name (str): name for compound clip Returns: resolve.MediaPoolItem: media pool item with compound clip timeline(cct) @@ -199,34 +385,12 @@ def create_compound_clip(clip_data, folder, rename=False, **kwargs): # get details of objects clip_item = clip["item"] - track = clip_data["track"] mp = project.GetMediaPool() # get clip attributes clip_attributes = get_clip_attributes(clip_item) - print(f"_ clip_attributes: {pformat(clip_attributes)}") - if rename: - presets = kwargs.get("presets") - if presets: - name, data = get_name_with_data(clip_data, presets) - # add hirarchy data to clip attributes - clip_attributes.update(data) - else: - name = "{:0>3}_{:0>4}".format( - int(track["index"]), int(clip["index"])) - else: - # build name - clip_name_split = clip_item.GetName().split(".") - name = "_".join([ - track["name"], - str(track["index"]), - clip_name_split[0], - str(clip["index"])] - ) - - # get metadata mp_item = clip_item.GetMediaPoolItem() mp_props = mp_item.GetClipProperty() @@ -283,9 +447,9 @@ def create_compound_clip(clip_data, folder, rename=False, **kwargs): project.SetCurrentTimeline(sq_origin) # Add collected metadata and attributes to the comound clip: - if mp_item.GetMetadata(self.pype_metadata_key): - clip_attributes[self.pype_metadata_key] = mp_item.GetMetadata( - self.pype_metadata_key)[self.pype_metadata_key] + if mp_item.GetMetadata(self.pype_tag_name): + clip_attributes[self.pype_tag_name] = mp_item.GetMetadata( + self.pype_tag_name)[self.pype_tag_name] # stringify clip_attributes = json.dumps(clip_attributes) @@ -295,7 +459,7 @@ def create_compound_clip(clip_data, folder, rename=False, **kwargs): cct.SetMetadata(k, v) # add metadata to cct - cct.SetMetadata(self.pype_metadata_key, clip_attributes) + cct.SetMetadata(self.pype_tag_name, clip_attributes) # reset start timecode of the compound clip cct.SetClipProperty("Start TC", mp_props["Start TC"]) @@ -314,7 +478,7 @@ def swap_clips(from_clip, to_clip, to_clip_name, to_in_frame, to_out_frame): It will add take and activate it to the frame range which is inputted Args: - from_clip (resolve.mediaPoolItem) + from_clip (resolve.TimelineItem) to_clip (resolve.mediaPoolItem) to_clip_name (str): name of to_clip to_in_frame (float): cut in frame, usually `GetLeftOffset()` @@ -373,7 +537,7 @@ def get_pype_clip_metadata(clip): mp_item = clip.GetMediaPoolItem() metadata = mp_item.GetMetadata() - return metadata.get(self.pype_metadata_key) + return metadata.get(self.pype_tag_name) def get_clip_attributes(clip): @@ -424,16 +588,16 @@ def set_project_manager_to_folder_name(folder_name): set_folder = False # go back to root folder - if self.pm.GotoRootFolder(): + if self.project_manager.GotoRootFolder(): log.info(f"Testing existing folder: {folder_name}") folders = convert_resolve_list_type( - self.pm.GetFoldersInCurrentFolder()) + self.project_manager.GetFoldersInCurrentFolder()) log.info(f"Testing existing folders: {folders}") # get me first available folder object # with the same name as in `folder_name` else return False if next((f for f in folders if f in folder_name), False): log.info(f"Found existing folder: {folder_name}") - set_folder = self.pm.OpenFolder(folder_name) + set_folder = self.project_manager.OpenFolder(folder_name) if set_folder: return True @@ -441,11 +605,11 @@ def set_project_manager_to_folder_name(folder_name): # if folder by name is not existent then create one # go back to root folder log.info(f"Folder `{folder_name}` not found and will be created") - if self.pm.GotoRootFolder(): + if self.project_manager.GotoRootFolder(): try: # create folder by given name - self.pm.CreateFolder(folder_name) - self.pm.OpenFolder(folder_name) + self.project_manager.CreateFolder(folder_name) + self.project_manager.OpenFolder(folder_name) return True except NameError as e: log.error((f"Folder with name `{folder_name}` cannot be created!" @@ -462,3 +626,80 @@ def convert_resolve_list_type(resolve_list): "Input argument should be dict() type") return [resolve_list[i] for i in sorted(resolve_list.keys())] + + +def get_reformated_path(path, padded=True): + """ + Return fixed python expression path + + Args: + path (str): path url or simple file name + + Returns: + type: string with reformated path + + Example: + get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr + + """ + num_pattern = "(\\[\\d+\\-\\d+\\])" + padding_pattern = "(\\d+)(?=-)" + if "[" in path: + padding = len(re.findall(padding_pattern, path).pop()) + if padded: + path = re.sub(num_pattern, f"%0{padding}d", path) + else: + path = re.sub(num_pattern, f"%d", path) + return path + + +def create_otio_time_range_from_track_item_data(track_item_data): + track_item = track_item_data["clip"]["item"] + project = track_item_data["project"] + timeline = track_item_data["sequence"] + timeline_start = timeline.GetStartFrame() + + frame_start = int(track_item.GetStart() - timeline_start) + frame_duration = int(track_item.GetDuration()) + fps = project.GetSetting("timelineFrameRate") + + return otio_export.create_otio_time_range( + frame_start, frame_duration, fps) + + +def get_otio_clip_instance_data(otio_timeline, track_item_data): + """ + Return otio objects for timeline, track and clip + + Args: + track_item_data (dict): track_item_data from list returned by + resolve.get_current_track_items() + otio_timeline (otio.schema.Timeline): otio object + + Returns: + dict: otio clip object + + """ + + track_item = track_item_data["clip"]["item"] + track_name = track_item_data["track"]["name"] + timeline_range = create_otio_time_range_from_track_item_data( + track_item_data) + + for otio_clip in otio_timeline.each_clip(): + track_name = otio_clip.parent().name + parent_range = otio_clip.range_in_parent() + if track_name not in track_name: + continue + if otio_clip.name not in track_item.GetName(): + continue + if pype.lib.is_overlapping_otio_ranges( + parent_range, timeline_range, strict=True): + + # add pypedata marker to otio_clip metadata + for marker in otio_clip.markers: + if self.pype_marker_name in marker.name: + otio_clip.metadata.update(marker.metadata) + return {"otioClip": otio_clip} + + return None diff --git a/pype/hosts/resolve/menu_style.qss b/pype/hosts/resolve/menu_style.qss index ea11c4ca2e..5a1d39fe79 100644 --- a/pype/hosts/resolve/menu_style.qss +++ b/pype/hosts/resolve/menu_style.qss @@ -4,6 +4,17 @@ QWidget { font-size: 13px; } +QComboBox { + border: 1px solid #090909; + background-color: #201f1f; + color: #ffffff; +} + +QComboBox QAbstractItemView +{ + color: white; +} + QPushButton { border: 1px solid #090909; background-color: #201f1f; diff --git a/pype/modules/logging/tray/__init__.py b/pype/hosts/resolve/otio/__init__.py similarity index 100% rename from pype/modules/logging/tray/__init__.py rename to pype/hosts/resolve/otio/__init__.py diff --git a/pype/hosts/resolve/otio/davinci_export.py b/pype/hosts/resolve/otio/davinci_export.py new file mode 100644 index 0000000000..7912b1abd8 --- /dev/null +++ b/pype/hosts/resolve/otio/davinci_export.py @@ -0,0 +1,324 @@ +""" compatibility OpenTimelineIO 0.12.0 and older +""" + +import os +import re +import sys +import json +import opentimelineio as otio +from . import utils +import clique + +self = sys.modules[__name__] +self.track_types = { + "video": otio.schema.TrackKind.Video, + "audio": otio.schema.TrackKind.Audio +} +self.project_fps = None + + +def create_otio_rational_time(frame, fps): + return otio.opentime.RationalTime( + float(frame), + float(fps) + ) + + +def create_otio_time_range(start_frame, frame_duration, fps): + return otio.opentime.TimeRange( + start_time=create_otio_rational_time(start_frame, fps), + duration=create_otio_rational_time(frame_duration, fps) + ) + + +def create_otio_reference(media_pool_item): + metadata = _get_metadata_media_pool_item(media_pool_item) + mp_clip_property = media_pool_item.GetClipProperty() + path = mp_clip_property["File Path"] + reformat_path = utils.get_reformated_path(path, padded=True) + padding = utils.get_padding_from_path(path) + + if padding: + metadata.update({ + "isSequence": True, + "padding": padding + }) + + # get clip property regarding to type + mp_clip_property = media_pool_item.GetClipProperty() + fps = float(mp_clip_property["FPS"]) + if mp_clip_property["Type"] == "Video": + frame_start = int(mp_clip_property["Start"]) + frame_duration = int(mp_clip_property["Frames"]) + else: + audio_duration = str(mp_clip_property["Duration"]) + frame_start = 0 + frame_duration = int(utils.timecode_to_frames( + audio_duration, float(fps))) + + otio_ex_ref_item = None + + if padding: + # if it is file sequence try to create `ImageSequenceReference` + # the OTIO might not be compatible so return nothing and do it old way + try: + dirname, filename = os.path.split(path) + collection = clique.parse(filename, '{head}[{ranges}]{tail}') + padding_num = len(re.findall("(\\d+)(?=-)", filename).pop()) + otio_ex_ref_item = otio.schema.ImageSequenceReference( + target_url_base=dirname + os.sep, + name_prefix=collection.format("{head}"), + name_suffix=collection.format("{tail}"), + start_frame=frame_start, + frame_zero_padding=padding_num, + rate=fps, + available_range=create_otio_time_range( + frame_start, + frame_duration, + fps + ) + ) + except AttributeError: + pass + + if not otio_ex_ref_item: + # in case old OTIO or video file create `ExternalReference` + otio_ex_ref_item = otio.schema.ExternalReference( + target_url=reformat_path, + available_range=create_otio_time_range( + frame_start, + frame_duration, + fps + ) + ) + + # add metadata to otio item + add_otio_metadata(otio_ex_ref_item, media_pool_item, **metadata) + + return otio_ex_ref_item + + +def create_otio_markers(track_item, fps): + track_item_markers = track_item.GetMarkers() + markers = [] + for marker_frame in track_item_markers: + note = track_item_markers[marker_frame]["note"] + if "{" in note and "}" in note: + metadata = json.loads(note) + else: + metadata = {"note": note} + markers.append( + otio.schema.Marker( + name=track_item_markers[marker_frame]["name"], + marked_range=create_otio_time_range( + marker_frame, + track_item_markers[marker_frame]["duration"], + fps + ), + color=track_item_markers[marker_frame]["color"].upper(), + metadata=metadata + ) + ) + return markers + + +def create_otio_clip(track_item): + media_pool_item = track_item.GetMediaPoolItem() + mp_clip_property = media_pool_item.GetClipProperty() + + if not self.project_fps: + fps = mp_clip_property["FPS"] + else: + fps = self.project_fps + + name = track_item.GetName() + + media_reference = create_otio_reference(media_pool_item) + source_range = create_otio_time_range( + int(track_item.GetLeftOffset()), + int(track_item.GetDuration()), + fps + ) + + if mp_clip_property["Type"] == "Audio": + return_clips = list() + audio_chanels = mp_clip_property["Audio Ch"] + for channel in range(0, int(audio_chanels)): + clip = otio.schema.Clip( + name=f"{name}_{channel}", + source_range=source_range, + media_reference=media_reference + ) + for marker in create_otio_markers(track_item, fps): + clip.markers.append(marker) + return_clips.append(clip) + return return_clips + else: + clip = otio.schema.Clip( + name=name, + source_range=source_range, + media_reference=media_reference + ) + for marker in create_otio_markers(track_item, fps): + clip.markers.append(marker) + + return clip + + +def create_otio_gap(gap_start, clip_start, tl_start_frame, fps): + return otio.schema.Gap( + source_range=create_otio_time_range( + gap_start, + (clip_start - tl_start_frame) - gap_start, + fps + ) + ) + + +def _create_otio_timeline(project, timeline, fps): + metadata = _get_timeline_metadata(project, timeline) + start_time = create_otio_rational_time( + timeline.GetStartFrame(), fps) + otio_timeline = otio.schema.Timeline( + name=timeline.GetName(), + global_start_time=start_time, + metadata=metadata + ) + return otio_timeline + + +def _get_timeline_metadata(project, timeline): + media_pool = project.GetMediaPool() + root_folder = media_pool.GetRootFolder() + ls_folder = root_folder.GetClipList() + timeline = project.GetCurrentTimeline() + timeline_name = timeline.GetName() + for tl in ls_folder: + if tl.GetName() not in timeline_name: + continue + return _get_metadata_media_pool_item(tl) + + +def _get_metadata_media_pool_item(media_pool_item): + data = dict() + data.update({k: v for k, v in media_pool_item.GetMetadata().items()}) + property = media_pool_item.GetClipProperty() or {} + for name, value in property.items(): + if "Resolution" in name and "" != value: + width, height = value.split("x") + data.update({ + "width": int(width), + "height": int(height) + }) + if "PAR" in name and "" != value: + try: + data.update({"pixelAspect": float(value)}) + except ValueError: + if "Square" in value: + data.update({"pixelAspect": float(1)}) + else: + data.update({"pixelAspect": float(1)}) + + return data + + +def create_otio_track(track_type, track_name): + return otio.schema.Track( + name=track_name, + kind=self.track_types[track_type] + ) + + +def add_otio_gap(clip_start, otio_track, track_item, timeline): + # if gap between track start and clip start + if clip_start > otio_track.available_range().duration.value: + # create gap and add it to track + otio_track.append( + create_otio_gap( + otio_track.available_range().duration.value, + track_item.GetStart(), + timeline.GetStartFrame(), + self.project_fps + ) + ) + + +def add_otio_metadata(otio_item, media_pool_item, **kwargs): + mp_metadata = media_pool_item.GetMetadata() + # add additional metadata from kwargs + if kwargs: + mp_metadata.update(kwargs) + + # add metadata to otio item metadata + for key, value in mp_metadata.items(): + otio_item.metadata.update({key: value}) + + +def create_otio_timeline(resolve_project): + + # get current timeline + self.project_fps = resolve_project.GetSetting("timelineFrameRate") + timeline = resolve_project.GetCurrentTimeline() + + # convert timeline to otio + otio_timeline = _create_otio_timeline( + resolve_project, timeline, self.project_fps) + + # loop all defined track types + for track_type in list(self.track_types.keys()): + # get total track count + track_count = timeline.GetTrackCount(track_type) + + # loop all tracks by track indexes + for track_index in range(1, int(track_count) + 1): + # get current track name + track_name = timeline.GetTrackName(track_type, track_index) + + # convert track to otio + otio_track = create_otio_track( + track_type, track_name) + + # get all track items in current track + current_track_items = timeline.GetItemListInTrack( + track_type, track_index) + + # loop available track items in current track items + for track_item in current_track_items: + # skip offline track items + if track_item.GetMediaPoolItem() is None: + continue + + # calculate real clip start + clip_start = track_item.GetStart() - timeline.GetStartFrame() + + add_otio_gap( + clip_start, otio_track, track_item, timeline) + + # create otio clip and add it to track + otio_clip = create_otio_clip(track_item) + + if not isinstance(otio_clip, list): + otio_track.append(otio_clip) + else: + for index, clip in enumerate(otio_clip): + if index == 0: + otio_track.append(clip) + else: + # add previouse otio track to timeline + otio_timeline.tracks.append(otio_track) + # convert track to otio + otio_track = create_otio_track( + track_type, track_name) + add_otio_gap( + clip_start, otio_track, + track_item, timeline) + otio_track.append(clip) + + # add track to otio timeline + otio_timeline.tracks.append(otio_track) + + return otio_timeline + + +def write_to_file(otio_timeline, path): + otio.adapters.write_to_file(otio_timeline, path) diff --git a/pype/hosts/resolve/otio/davinci_import.py b/pype/hosts/resolve/otio/davinci_import.py new file mode 100644 index 0000000000..3bbb007b25 --- /dev/null +++ b/pype/hosts/resolve/otio/davinci_import.py @@ -0,0 +1,108 @@ +import sys +import json +import DaVinciResolveScript +import opentimelineio as otio + + +self = sys.modules[__name__] +self.resolve = DaVinciResolveScript.scriptapp('Resolve') +self.fusion = DaVinciResolveScript.scriptapp('Fusion') +self.project_manager = self.resolve.GetProjectManager() +self.current_project = self.project_manager.GetCurrentProject() +self.media_pool = self.current_project.GetMediaPool() +self.track_types = { + "video": otio.schema.TrackKind.Video, + "audio": otio.schema.TrackKind.Audio +} +self.project_fps = None + + +def build_timeline(otio_timeline): + # TODO: build timeline in mediapool `otioImport` folder + # TODO: loop otio tracks and build them in the new timeline + for clip in otio_timeline.each_clip(): + # TODO: create track item + print(clip.name) + print(clip.parent().name) + print(clip.range_in_parent()) + + +def _build_track(otio_track): + # TODO: _build_track + pass + + +def _build_media_pool_item(otio_media_reference): + # TODO: _build_media_pool_item + pass + + +def _build_track_item(otio_clip): + # TODO: _build_track_item + pass + + +def _build_gap(otio_clip): + # TODO: _build_gap + pass + + +def _build_marker(track_item, otio_marker): + frame_start = otio_marker.marked_range.start_time.value + frame_duration = otio_marker.marked_range.duration.value + + # marker attributes + frameId = (frame_start / 10) * 10 + color = otio_marker.color + name = otio_marker.name + note = otio_marker.metadata.get("note") or json.dumps(otio_marker.metadata) + duration = (frame_duration / 10) * 10 + + track_item.AddMarker( + frameId, + color, + name, + note, + duration + ) + + +def _build_media_pool_folder(name): + """ + Returns folder with input name and sets it as current folder. + + It will create new media bin if none is found in root media bin + + Args: + name (str): name of bin + + Returns: + resolve.api.MediaPool.Folder: description + + """ + + root_folder = self.media_pool.GetRootFolder() + sub_folders = root_folder.GetSubFolderList() + testing_names = list() + + for subfolder in sub_folders: + subf_name = subfolder.GetName() + if name in subf_name: + testing_names.append(subfolder) + else: + testing_names.append(False) + + matching = next((f for f in testing_names if f is not False), None) + + if not matching: + new_folder = self.media_pool.AddSubFolder(root_folder, name) + self.media_pool.SetCurrentFolder(new_folder) + else: + self.media_pool.SetCurrentFolder(matching) + + return self.media_pool.GetCurrentFolder() + + +def read_from_file(otio_file): + otio_timeline = otio.adapters.read_from_file(otio_file) + build_timeline(otio_timeline) diff --git a/pype/hosts/resolve/otio/utils.py b/pype/hosts/resolve/otio/utils.py new file mode 100644 index 0000000000..ec514289f5 --- /dev/null +++ b/pype/hosts/resolve/otio/utils.py @@ -0,0 +1,63 @@ +import re +import opentimelineio as otio + + +def timecode_to_frames(timecode, framerate): + rt = otio.opentime.from_timecode(timecode, 24) + return int(otio.opentime.to_frames(rt)) + + +def frames_to_timecode(frames, framerate): + rt = otio.opentime.from_frames(frames, framerate) + return otio.opentime.to_timecode(rt) + + +def frames_to_secons(frames, framerate): + rt = otio.opentime.from_frames(frames, framerate) + return otio.opentime.to_seconds(rt) + + +def get_reformated_path(path, padded=True): + """ + Return fixed python expression path + + Args: + path (str): path url or simple file name + + Returns: + type: string with reformated path + + Example: + get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr + + """ + num_pattern = "(\\[\\d+\\-\\d+\\])" + padding_pattern = "(\\d+)(?=-)" + if "[" in path: + padding = len(re.findall(padding_pattern, path).pop()) + if padded: + path = re.sub(num_pattern, f"%0{padding}d", path) + else: + path = re.sub(num_pattern, f"%d", path) + return path + + +def get_padding_from_path(path): + """ + Return padding number from DaVinci Resolve sequence path style + + Args: + path (str): path url or simple file name + + Returns: + int: padding number + + Example: + get_padding_from_path("plate.[0001-1008].exr") > 4 + + """ + padding_pattern = "(\\d+)(?=-)" + if "[" in path: + return len(re.findall(padding_pattern, path).pop()) + + return None diff --git a/pype/hosts/resolve/pipeline.py b/pype/hosts/resolve/pipeline.py index 92bef2e13b..2517c29426 100644 --- a/pype/hosts/resolve/pipeline.py +++ b/pype/hosts/resolve/pipeline.py @@ -3,13 +3,17 @@ Basic avalon integration """ import os import contextlib +from collections import OrderedDict from avalon.tools import workfiles from avalon import api as avalon +from avalon import schema +from avalon.pipeline import AVALON_CONTAINER_ID from pyblish import api as pyblish import pype from pype.api import Logger +from . import lib -log = Logger().get_logger(__name__, "resolve") +log = Logger().get_logger(__name__) AVALON_CONFIG = os.environ["AVALON_CONFIG"] @@ -57,6 +61,9 @@ def install(): avalon.register_plugin_path(avalon.Creator, CREATE_PATH) avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) + # register callback for switching publishable + pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) + get_resolve_module() @@ -79,30 +86,50 @@ def uninstall(): avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH) + # register callback for switching publishable + pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) -def containerise(obj, + +def containerise(track_item, name, namespace, context, loader=None, data=None): - """Bundle Resolve's object into an assembly and imprint it with metadata + """Bundle Hiero's object into an assembly and imprint it with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: - obj (obj): Resolve's object to imprint as container + track_item (hiero.core.TrackItem): object to imprint as container name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of node used to produce this container. Returns: - obj (obj): containerised object + track_item (hiero.core.TrackItem): containerised object """ - pass + + data_imprint = OrderedDict({ + "schema": "avalon-core:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": str(name), + "namespace": str(namespace), + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + }) + + if data: + for k, v in data.items(): + data_imprint.update({k: v}) + + print("_ data_imprint: {}".format(data_imprint)) + lib.set_track_item_pype_tag(track_item, data_imprint) + + return track_item def ls(): @@ -115,20 +142,77 @@ def ls(): See the `container.json` schema for details on how it should look, and the Maya equivalent, which is in `avalon.maya.pipeline` """ - pass + + # get all track items from current timeline + all_track_items = lib.get_current_track_items(filter=False) + + for track_item_data in all_track_items: + track_item = track_item_data["clip"]["item"] + container = parse_container(track_item) + if container: + yield container -def parse_container(container): - """Return the container node's full container data. +def parse_container(track_item, validate=True): + """Return container data from track_item's pype tag. Args: - container (str): A container node name. + track_item (hiero.core.TrackItem): A containerised track item. + validate (bool)[optional]: validating with avalon scheme Returns: - dict: The container schema data for this container node. + dict: The container schema data for input containerized track item. """ - pass + # convert tag metadata to normal keys names + data = lib.get_track_item_pype_tag(track_item) + + if validate and data and data.get("schema"): + schema.validate(data) + + if not isinstance(data, dict): + return + + # If not all required data return the empty container + required = ['schema', 'id', 'name', + 'namespace', 'loader', 'representation'] + + if not all(key in data for key in required): + return + + container = {key: data[key] for key in required} + + container["objectName"] = track_item.name() + + # Store reference to the node object + container["_track_item"] = track_item + + return container + + +def update_container(track_item, data=None): + """Update container data to input track_item's pype tag. + + Args: + track_item (hiero.core.TrackItem): A containerised track item. + data (dict)[optional]: dictionery with data to be updated + + Returns: + bool: True if container was updated correctly + + """ + data = data or dict() + + container = lib.get_track_item_pype_tag(track_item) + + for _key, _value in container.items(): + try: + container[_key] = data[_key] + except KeyError: + pass + + log.info("Updating container: `{}`".format(track_item)) + return bool(lib.set_track_item_pype_tag(track_item, container)) def launch_workfiles_app(*args): @@ -163,3 +247,18 @@ def reset_selection(): """Deselect all selected nodes """ pass + + +def on_pyblish_instance_toggled(instance, old_value, new_value): + """Toggle node passthrough states on instance toggles.""" + + log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( + instance, old_value, new_value)) + + from pype.hosts.resolve import ( + set_publish_attribute + ) + + # Whether instances should be passthrough based on new value + track_item = instance.data["item"] + set_publish_attribute(track_item, new_value) diff --git a/pype/hosts/resolve/plugin.py b/pype/hosts/resolve/plugin.py index 72eec04896..fa4559efac 100644 --- a/pype/hosts/resolve/plugin.py +++ b/pype/hosts/resolve/plugin.py @@ -2,7 +2,7 @@ import re from avalon import api from pype.hosts import resolve from avalon.vendor import qargparse -from pype.api import config +from . import lib from Qt import QtWidgets, QtCore @@ -12,7 +12,7 @@ class CreatorWidget(QtWidgets.QDialog): # output items items = dict() - def __init__(self, name, info, presets, parent=None): + def __init__(self, name, info, ui_inputs, parent=None): super(CreatorWidget, self).__init__(parent) self.setObjectName(name) @@ -25,6 +25,7 @@ class CreatorWidget(QtWidgets.QDialog): | QtCore.Qt.WindowStaysOnTopHint ) self.setWindowTitle(name or "Pype Creator Input") + self.resize(500, 700) # Where inputs and labels are set self.content_widget = [QtWidgets.QWidget(self)] @@ -35,14 +36,25 @@ class CreatorWidget(QtWidgets.QDialog): # first add widget tag line top_layout.addWidget(QtWidgets.QLabel(info)) - top_layout.addWidget(Spacer(5, self)) - # main dynamic layout - self.content_widget.append(QtWidgets.QWidget(self)) - content_layout = QtWidgets.QFormLayout(self.content_widget[-1]) + self.scroll_area = QtWidgets.QScrollArea(self, widgetResizable=True) + self.scroll_area.setVerticalScrollBarPolicy( + QtCore.Qt.ScrollBarAsNeeded) + self.scroll_area.setVerticalScrollBarPolicy( + QtCore.Qt.ScrollBarAlwaysOn) + self.scroll_area.setHorizontalScrollBarPolicy( + QtCore.Qt.ScrollBarAlwaysOff) + self.scroll_area.setWidgetResizable(True) + + self.content_widget.append(self.scroll_area) + + scroll_widget = QtWidgets.QWidget(self) + in_scroll_area = QtWidgets.QVBoxLayout(scroll_widget) + self.content_layout = [in_scroll_area] # add preset data into input widget layout - self.items = self.add_presets_to_layout(content_layout, presets) + self.items = self.populate_widgets(ui_inputs) + self.scroll_area.setWidget(scroll_widget) # Confirmation buttons btns_widget = QtWidgets.QWidget(self) @@ -79,20 +91,33 @@ class CreatorWidget(QtWidgets.QDialog): self.result = None self.close() - def value(self, data): + def value(self, data, new_data=None): + new_data = new_data or dict() for k, v in data.items(): - if isinstance(v, dict): - print(f"nested: {k}") - data[k] = self.value(v) - elif getattr(v, "value", None): - print(f"normal int: {k}") - result = v.value() - data[k] = result() - else: - print(f"normal text: {k}") - result = v.text() - data[k] = result() - return data + new_data[k] = { + "target": None, + "value": None + } + if v["type"] == "dict": + new_data[k]["target"] = v["target"] + new_data[k]["value"] = self.value(v["value"]) + if v["type"] == "section": + new_data.pop(k) + new_data = self.value(v["value"], new_data) + elif getattr(v["value"], "currentText", None): + new_data[k]["target"] = v["target"] + new_data[k]["value"] = v["value"].currentText() + elif getattr(v["value"], "isChecked", None): + new_data[k]["target"] = v["target"] + new_data[k]["value"] = v["value"].isChecked() + elif getattr(v["value"], "value", None): + new_data[k]["target"] = v["target"] + new_data[k]["value"] = v["value"].value() + elif getattr(v["value"], "text", None): + new_data[k]["target"] = v["target"] + new_data[k]["value"] = v["value"].text() + + return new_data def camel_case_split(self, text): matches = re.finditer( @@ -124,41 +149,115 @@ class CreatorWidget(QtWidgets.QDialog): for func, val in kwargs.items(): if getattr(item, func): func_attr = getattr(item, func) - func_attr(val) + if isinstance(val, tuple): + func_attr(*val) + else: + func_attr(val) # add to layout layout.addRow(label, item) return item - def add_presets_to_layout(self, content_layout, data): + def populate_widgets(self, data, content_layout=None): + """ + Populate widget from input dict. + + Each plugin has its own set of widget rows defined in dictionary + each row values should have following keys: `type`, `target`, + `label`, `order`, `value` and optionally also `toolTip`. + + Args: + data (dict): widget rows or organized groups defined + by types `dict` or `section` + content_layout (QtWidgets.QFormLayout)[optional]: used when nesting + + Returns: + dict: redefined data dict updated with created widgets + + """ + + content_layout = content_layout or self.content_layout[-1] + # fix order of process by defined order value + ordered_keys = list(data.keys()) for k, v in data.items(): - if isinstance(v, dict): + try: + # try removing a key from index which should + # be filled with new + ordered_keys.pop(v["order"]) + except IndexError: + pass + # add key into correct order + ordered_keys.insert(v["order"], k) + + # process ordered + for k in ordered_keys: + v = data[k] + tool_tip = v.get("toolTip", "") + if v["type"] == "dict": # adding spacer between sections - self.content_widget.append(QtWidgets.QWidget(self)) - devider = QtWidgets.QVBoxLayout(self.content_widget[-1]) - devider.addWidget(Spacer(5, self)) - devider.setObjectName("Devider") + self.content_layout.append(QtWidgets.QWidget(self)) + content_layout.addWidget(self.content_layout[-1]) + self.content_layout[-1].setObjectName("sectionHeadline") + + headline = QtWidgets.QVBoxLayout(self.content_layout[-1]) + headline.addWidget(Spacer(20, self)) + headline.addWidget(QtWidgets.QLabel(v["label"])) # adding nested layout with label - self.content_widget.append(QtWidgets.QWidget(self)) + self.content_layout.append(QtWidgets.QWidget(self)) + self.content_layout[-1].setObjectName("sectionContent") + nested_content_layout = QtWidgets.QFormLayout( - self.content_widget[-1]) + self.content_layout[-1]) nested_content_layout.setObjectName("NestedContentLayout") + content_layout.addWidget(self.content_layout[-1]) # add nested key as label - self.create_row(nested_content_layout, "QLabel", k) - data[k] = self.add_presets_to_layout(nested_content_layout, v) - elif isinstance(v, str): - print(f"layout.str: {k}") - print(f"content_layout: {content_layout}") - data[k] = self.create_row( - content_layout, "QLineEdit", k, setText=v) - elif isinstance(v, int): - print(f"layout.int: {k}") - print(f"content_layout: {content_layout}") - data[k] = self.create_row( - content_layout, "QSpinBox", k, setValue=v) + data[k]["value"] = self.populate_widgets( + v["value"], nested_content_layout) + + if v["type"] == "section": + # adding spacer between sections + self.content_layout.append(QtWidgets.QWidget(self)) + content_layout.addWidget(self.content_layout[-1]) + self.content_layout[-1].setObjectName("sectionHeadline") + + headline = QtWidgets.QVBoxLayout(self.content_layout[-1]) + headline.addWidget(Spacer(20, self)) + headline.addWidget(QtWidgets.QLabel(v["label"])) + + # adding nested layout with label + self.content_layout.append(QtWidgets.QWidget(self)) + self.content_layout[-1].setObjectName("sectionContent") + + nested_content_layout = QtWidgets.QFormLayout( + self.content_layout[-1]) + nested_content_layout.setObjectName("NestedContentLayout") + content_layout.addWidget(self.content_layout[-1]) + + # add nested key as label + data[k]["value"] = self.populate_widgets( + v["value"], nested_content_layout) + + elif v["type"] == "QLineEdit": + data[k]["value"] = self.create_row( + content_layout, "QLineEdit", v["label"], + setText=v["value"], setToolTip=tool_tip) + elif v["type"] == "QComboBox": + data[k]["value"] = self.create_row( + content_layout, "QComboBox", v["label"], + addItems=v["value"], setToolTip=tool_tip) + elif v["type"] == "QCheckBox": + data[k]["value"] = self.create_row( + content_layout, "QCheckBox", v["label"], + setChecked=v["value"], setToolTip=tool_tip) + elif v["type"] == "QSpinBox": + data[k]["value"] = self.create_row( + content_layout, "QSpinBox", v["label"], + setRange=(0, 99999), + setValue=v["value"], + setToolTip=tool_tip) return data @@ -179,20 +278,6 @@ class Spacer(QtWidgets.QWidget): self.setLayout(layout) -def get_reference_node_parents(ref): - """Return all parent reference nodes of reference node - - Args: - ref (str): reference node. - - Returns: - list: The upstream parent reference nodes. - - """ - parents = [] - return parents - - class SequenceLoader(api.Loader): """A basic SequenceLoader for Resolve @@ -258,8 +343,12 @@ class Creator(api.Creator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.presets = config.get_presets()['plugins']["resolve"][ - "create"].get(self.__class__.__name__, {}) + from pype.api import get_current_project_settings + resolve_p_settings = get_current_project_settings().get("resolve") + self.presets = dict() + if resolve_p_settings: + self.presets = resolve_p_settings["create"].get( + self.__class__.__name__, {}) # adding basic current context resolve objects self.project = resolve.get_current_project() @@ -271,3 +360,310 @@ class Creator(api.Creator): self.selected = resolve.get_current_track_items(filter=False) self.widget = CreatorWidget + + +class PublishClip: + """ + Convert a track item to publishable instance + + Args: + track_item (hiero.core.TrackItem): hiero track item object + kwargs (optional): additional data needed for rename=True (presets) + + Returns: + hiero.core.TrackItem: hiero track item object with pype tag + """ + vertical_clip_match = dict() + tag_data = dict() + types = { + "shot": "shot", + "folder": "folder", + "episode": "episode", + "sequence": "sequence", + "track": "sequence", + } + + # parents search patern + parents_search_patern = r"\{([a-z]*?)\}" + + # default templates for non-ui use + rename_default = False + hierarchy_default = "{_folder_}/{_sequence_}/{_track_}" + clip_name_default = "shot_{_trackIndex_:0>3}_{_clipIndex_:0>4}" + subset_name_default = "" + review_track_default = "< none >" + subset_family_default = "plate" + count_from_default = 10 + count_steps_default = 10 + vertical_sync_default = False + driving_layer_default = "" + + def __init__(self, cls, track_item_data, **kwargs): + # populate input cls attribute onto self.[attr] + self.__dict__.update(cls.__dict__) + + # get main parent objects + self.track_item_data = track_item_data + self.track_item = track_item_data["clip"]["item"] + sequence_name = track_item_data["sequence"].GetName() + self.sequence_name = str(sequence_name).replace(" ", "_") + + # track item (clip) main attributes + self.ti_name = self.track_item.GetName() + self.ti_index = int(track_item_data["clip"]["index"]) + + # get track name and index + track_name = track_item_data["track"]["name"] + self.track_name = str(track_name).replace(" ", "_") + self.track_index = int(track_item_data["track"]["index"]) + + # adding tag.family into tag + if kwargs.get("avalon"): + self.tag_data.update(kwargs["avalon"]) + + # adding ui inputs if any + self.ui_inputs = kwargs.get("ui_inputs", {}) + + # adding media pool folder if any + self.mp_folder = kwargs.get("mp_folder") + + # populate default data before we get other attributes + self._populate_track_item_default_data() + + # use all populated default data to create all important attributes + self._populate_attributes() + + # create parents with correct types + self._create_parents() + + def convert(self): + # solve track item data and add them to tag data + self._convert_to_tag_data() + + # if track name is in review track name and also if driving track name + # is not in review track name: skip tag creation + if (self.track_name in self.review_layer) and ( + self.driving_layer not in self.review_layer): + return + + # deal with clip name + new_name = self.tag_data.pop("newClipName") + + if self.rename: + self.tag_data["asset"] = new_name + else: + self.tag_data["asset"] = self.ti_name + + if not lib.pype_marker_workflow: + # create compound clip workflow + lib.create_compound_clip( + self.track_item_data, + self.tag_data["asset"], + self.mp_folder + ) + + # add track_item_data selection to tag + self.tag_data.update({ + "track_data": self.track_item_data["track"] + }) + + # create pype tag on track_item and add data + lib.imprint(self.track_item, self.tag_data) + + return self.track_item + + def _populate_track_item_default_data(self): + """ Populate default formating data from track item. """ + + self.track_item_default_data = { + "_folder_": "shots", + "_sequence_": self.sequence_name, + "_track_": self.track_name, + "_clip_": self.ti_name, + "_trackIndex_": self.track_index, + "_clipIndex_": self.ti_index + } + + def _populate_attributes(self): + """ Populate main object attributes. """ + # track item frame range and parent track name for vertical sync check + self.clip_in = int(self.track_item.GetStart()) + self.clip_out = int(self.track_item.GetEnd()) + + # define ui inputs if non gui mode was used + self.shot_num = self.ti_index + print( + "____ self.shot_num: {}".format(self.shot_num)) + + # ui_inputs data or default values if gui was not used + self.rename = self.ui_inputs.get( + "clipRename", {}).get("value") or self.rename_default + self.clip_name = self.ui_inputs.get( + "clipName", {}).get("value") or self.clip_name_default + self.hierarchy = self.ui_inputs.get( + "hierarchy", {}).get("value") or self.hierarchy_default + self.hierarchy_data = self.ui_inputs.get( + "hierarchyData", {}).get("value") or \ + self.track_item_default_data.copy() + self.count_from = self.ui_inputs.get( + "countFrom", {}).get("value") or self.count_from_default + self.count_steps = self.ui_inputs.get( + "countSteps", {}).get("value") or self.count_steps_default + self.subset_name = self.ui_inputs.get( + "subsetName", {}).get("value") or self.subset_name_default + self.subset_family = self.ui_inputs.get( + "subsetFamily", {}).get("value") or self.subset_family_default + self.vertical_sync = self.ui_inputs.get( + "vSyncOn", {}).get("value") or self.vertical_sync_default + self.driving_layer = self.ui_inputs.get( + "vSyncTrack", {}).get("value") or self.driving_layer_default + self.review_track = self.ui_inputs.get( + "reviewTrack", {}).get("value") or self.review_track_default + + # build subset name from layer name + if self.subset_name == "": + self.subset_name = self.track_name + + # create subset for publishing + self.subset = self.subset_family + self.subset_name.capitalize() + + def _replace_hash_to_expression(self, name, text): + """ Replace hash with number in correct padding. """ + _spl = text.split("#") + _len = (len(_spl) - 1) + _repl = "{{{0}:0>{1}}}".format(name, _len) + new_text = text.replace(("#" * _len), _repl) + return new_text + + def _convert_to_tag_data(self): + """ Convert internal data to tag data. + + Populating the tag data into internal variable self.tag_data + """ + # define vertical sync attributes + master_layer = True + self.review_layer = "" + if self.vertical_sync: + # check if track name is not in driving layer + if self.track_name not in self.driving_layer: + # if it is not then define vertical sync as None + master_layer = False + + # increasing steps by index of rename iteration + self.count_steps *= self.rename_index + + hierarchy_formating_data = dict() + _data = self.track_item_default_data.copy() + if self.ui_inputs: + # adding tag metadata from ui + for _k, _v in self.ui_inputs.items(): + if _v["target"] == "tag": + self.tag_data[_k] = _v["value"] + + # driving layer is set as positive match + if master_layer or self.vertical_sync: + # mark review layer + if self.review_track and ( + self.review_track not in self.review_track_default): + # if review layer is defined and not the same as defalut + self.review_layer = self.review_track + # shot num calculate + if self.rename_index == 0: + self.shot_num = self.count_from + else: + self.shot_num = self.count_from + self.count_steps + + # clip name sequence number + _data.update({"shot": self.shot_num}) + + # solve # in test to pythonic expression + for _k, _v in self.hierarchy_data.items(): + if "#" not in _v["value"]: + continue + self.hierarchy_data[ + _k]["value"] = self._replace_hash_to_expression( + _k, _v["value"]) + + # fill up pythonic expresisons in hierarchy data + for k, _v in self.hierarchy_data.items(): + hierarchy_formating_data[k] = _v["value"].format(**_data) + else: + # if no gui mode then just pass default data + hierarchy_formating_data = self.hierarchy_data + + tag_hierarchy_data = self._solve_tag_hierarchy_data( + hierarchy_formating_data + ) + + tag_hierarchy_data.update({"masterLayer": True}) + if master_layer and self.vertical_sync: + # tag_hierarchy_data.update({"masterLayer": True}) + self.vertical_clip_match.update({ + (self.clip_in, self.clip_out): tag_hierarchy_data + }) + + if not master_layer and self.vertical_sync: + # driving layer is set as negative match + for (_in, _out), master_data in self.vertical_clip_match.items(): + master_data.update({"masterLayer": False}) + if _in == self.clip_in and _out == self.clip_out: + data_subset = master_data["subset"] + # add track index in case duplicity of names in master data + if self.subset in data_subset: + master_data["subset"] = self.subset + str( + self.track_index) + # in case track name and subset name is the same then add + if self.subset_name == self.track_name: + master_data["subset"] = self.subset + # assing data to return hierarchy data to tag + tag_hierarchy_data = master_data + + # add data to return data dict + self.tag_data.update(tag_hierarchy_data) + + if master_layer and self.review_layer: + self.tag_data.update({"reviewTrack": self.review_layer}) + + def _solve_tag_hierarchy_data(self, hierarchy_formating_data): + """ Solve tag data from hierarchy data and templates. """ + # fill up clip name and hierarchy keys + hierarchy_filled = self.hierarchy.format(**hierarchy_formating_data) + clip_name_filled = self.clip_name.format(**hierarchy_formating_data) + + return { + "newClipName": clip_name_filled, + "hierarchy": hierarchy_filled, + "parents": self.parents, + "hierarchyData": hierarchy_formating_data, + "subset": self.subset, + "family": self.subset_family, + "families": ["clip"] + } + + def _convert_to_entity(self, key): + """ Converting input key to key with type. """ + # convert to entity type + entity_type = self.types.get(key, None) + + assert entity_type, "Missing entity type for `{}`".format( + key + ) + + return { + "entity_type": entity_type, + "entity_name": self.hierarchy_data[key]["value"].format( + **self.track_item_default_data + ) + } + + def _create_parents(self): + """ Create parents and return it in list. """ + self.parents = list() + + patern = re.compile(self.parents_search_patern) + par_split = [patern.findall(t).pop() + for t in self.hierarchy.split("/")] + + for key in par_split: + parent = self._convert_to_entity(key) + self.parents.append(parent) diff --git a/pype/hosts/resolve/preload_console.py b/pype/hosts/resolve/preload_console.py index 58975777b8..de55c3673c 100644 --- a/pype/hosts/resolve/preload_console.py +++ b/pype/hosts/resolve/preload_console.py @@ -3,7 +3,7 @@ import time from pype.hosts.resolve.utils import get_resolve_module from pype.api import Logger -log = Logger().get_logger(__name__, "resolve") +log = Logger().get_logger(__name__) wait_delay = 2.5 wait = 0.00 diff --git a/pype/hosts/resolve/todo-rendering.py b/pype/hosts/resolve/todo-rendering.py new file mode 100644 index 0000000000..87b04dd98f --- /dev/null +++ b/pype/hosts/resolve/todo-rendering.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# TODO: convert this script to be usable with PYPE +""" +Example DaVinci Resolve script: +Load a still from DRX file, apply the still to all clips in all timelines. +Set render format and codec, add render jobs for all timelines, render +to specified path and wait for rendering completion. +Once render is complete, delete all jobs +""" +# clonned from: https://github.com/survos/transcribe/blob/fe3cf51eb95b82dabcf21fbe5f89bfb3d8bb6ce2/python/3_grade_and_render_all_timelines.py # noqa + +from python_get_resolve import GetResolve +import sys +import time + + +def AddTimelineToRender(project, timeline, presetName, + targetDirectory, renderFormat, renderCodec): + project.SetCurrentTimeline(timeline) + project.LoadRenderPreset(presetName) + + if not project.SetCurrentRenderFormatAndCodec(renderFormat, renderCodec): + return False + + project.SetRenderSettings( + {"SelectAllFrames": 1, "TargetDir": targetDirectory}) + return project.AddRenderJob() + + +def RenderAllTimelines(resolve, presetName, targetDirectory, + renderFormat, renderCodec): + projectManager = resolve.GetProjectManager() + project = projectManager.GetCurrentProject() + if not project: + return False + + resolve.OpenPage("Deliver") + timelineCount = project.GetTimelineCount() + + for index in range(0, int(timelineCount)): + if not AddTimelineToRender( + project, + project.GetTimelineByIndex(index + 1), + presetName, + targetDirectory, + renderFormat, + renderCodec): + return False + return project.StartRendering() + + +def IsRenderingInProgress(resolve): + projectManager = resolve.GetProjectManager() + project = projectManager.GetCurrentProject() + if not project: + return False + + return project.IsRenderingInProgress() + + +def WaitForRenderingCompletion(resolve): + while IsRenderingInProgress(resolve): + time.sleep(1) + return + + +def ApplyDRXToAllTimelineClips(timeline, path, gradeMode=0): + trackCount = timeline.GetTrackCount("video") + + clips = {} + for index in range(1, int(trackCount) + 1): + clips.update(timeline.GetItemsInTrack("video", index)) + return timeline.ApplyGradeFromDRX(path, int(gradeMode), clips) + + +def ApplyDRXToAllTimelines(resolve, path, gradeMode=0): + projectManager = resolve.GetProjectManager() + project = projectManager.GetCurrentProject() + if not project: + return False + timelineCount = project.GetTimelineCount() + + for index in range(0, int(timelineCount)): + timeline = project.GetTimelineByIndex(index + 1) + project.SetCurrentTimeline(timeline) + if not ApplyDRXToAllTimelineClips(timeline, path, gradeMode): + return False + return True + + +def DeleteAllRenderJobs(resolve): + projectManager = resolve.GetProjectManager() + project = projectManager.GetCurrentProject() + project.DeleteAllRenderJobs() + return + + +# Inputs: +# - DRX file to import grade still and apply it for clips +# - grade mode (0, 1 or 2) +# - preset name for rendering +# - render path +# - render format +# - render codec +if len(sys.argv) < 7: + print( + "input parameters for scripts are [drx file path] [grade mode] " + "[render preset name] [render path] [render format] [render codec]") + sys.exit() + +drxPath = sys.argv[1] +gradeMode = sys.argv[2] +renderPresetName = sys.argv[3] +renderPath = sys.argv[4] +renderFormat = sys.argv[5] +renderCodec = sys.argv[6] + +# Get currently open project +resolve = GetResolve() + +if not ApplyDRXToAllTimelines(resolve, drxPath, gradeMode): + print("Unable to apply a still from drx file to all timelines") + sys.exit() + +if not RenderAllTimelines(resolve, renderPresetName, renderPath, + renderFormat, renderCodec): + print("Unable to set all timelines for rendering") + sys.exit() + +WaitForRenderingCompletion(resolve) + +DeleteAllRenderJobs(resolve) + +print("Rendering is completed.") diff --git a/pype/hosts/resolve/utility_scripts/OTIO_export.py b/pype/hosts/resolve/utility_scripts/OTIO_export.py new file mode 100644 index 0000000000..a1142f56dd --- /dev/null +++ b/pype/hosts/resolve/utility_scripts/OTIO_export.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +import os +from pype.hosts.resolve.otio import davinci_export as otio_export + +resolve = bmd.scriptapp("Resolve") # noqa +fu = resolve.Fusion() + +ui = fu.UIManager +disp = bmd.UIDispatcher(fu.UIManager) # noqa + + +title_font = ui.Font({"PixelSize": 18}) +dlg = disp.AddWindow( + { + "WindowTitle": "Export OTIO", + "ID": "OTIOwin", + "Geometry": [250, 250, 250, 100], + "Spacing": 0, + "Margin": 10 + }, + [ + ui.VGroup( + { + "Spacing": 2 + }, + [ + ui.Button( + { + "ID": "exportfilebttn", + "Text": "Select Destination", + "Weight": 1.25, + "ToolTip": "Choose where to save the otio", + "Flat": False + } + ), + ui.VGap(), + ui.Button( + { + "ID": "exportbttn", + "Text": "Export", + "Weight": 2, + "ToolTip": "Export the current timeline", + "Flat": False + } + ) + ] + ) + ] +) + +itm = dlg.GetItems() + + +def _close_window(event): + disp.ExitLoop() + + +def _export_button(event): + pm = resolve.GetProjectManager() + project = pm.GetCurrentProject() + fps = project.GetSetting("timelineFrameRate") + timeline = project.GetCurrentTimeline() + otio_timeline = otio_export.create_otio_timeline(timeline, fps) + otio_path = os.path.join( + itm["exportfilebttn"].Text, + timeline.GetName() + ".otio") + print(otio_path) + otio_export.write_to_file( + otio_timeline, + otio_path) + _close_window(None) + + +def _export_file_pressed(event): + selectedPath = fu.RequestDir(os.path.expanduser("~/Documents")) + itm["exportfilebttn"].Text = selectedPath + + +dlg.On.OTIOwin.Close = _close_window +dlg.On.exportfilebttn.Clicked = _export_file_pressed +dlg.On.exportbttn.Clicked = _export_button +dlg.Show() +disp.RunLoop() +dlg.Hide() diff --git a/pype/hosts/resolve/utility_scripts/OTIO_import.py b/pype/hosts/resolve/utility_scripts/OTIO_import.py new file mode 100644 index 0000000000..5719ec3e3c --- /dev/null +++ b/pype/hosts/resolve/utility_scripts/OTIO_import.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +import os +from pype.hosts.resolve.otio import davinci_import as otio_import + +resolve = bmd.scriptapp("Resolve") # noqa +fu = resolve.Fusion() +ui = fu.UIManager +disp = bmd.UIDispatcher(fu.UIManager) # noqa + + +title_font = ui.Font({"PixelSize": 18}) +dlg = disp.AddWindow( + { + "WindowTitle": "Import OTIO", + "ID": "OTIOwin", + "Geometry": [250, 250, 250, 100], + "Spacing": 0, + "Margin": 10 + }, + [ + ui.VGroup( + { + "Spacing": 2 + }, + [ + ui.Button( + { + "ID": "importOTIOfileButton", + "Text": "Select OTIO File Path", + "Weight": 1.25, + "ToolTip": "Choose otio file to import from", + "Flat": False + } + ), + ui.VGap(), + ui.Button( + { + "ID": "importButton", + "Text": "Import", + "Weight": 2, + "ToolTip": "Import otio to new timeline", + "Flat": False + } + ) + ] + ) + ] +) + +itm = dlg.GetItems() + + +def _close_window(event): + disp.ExitLoop() + + +def _import_button(event): + otio_import.read_from_file(itm["importOTIOfileButton"].Text) + _close_window(None) + + +def _import_file_pressed(event): + selected_path = fu.RequestFile(os.path.expanduser("~/Documents")) + itm["importOTIOfileButton"].Text = selected_path + + +dlg.On.OTIOwin.Close = _close_window +dlg.On.importOTIOfileButton.Clicked = _import_file_pressed +dlg.On.importButton.Clicked = _import_button +dlg.Show() +disp.RunLoop() +dlg.Hide() diff --git a/pype/hosts/resolve/utility_scripts/PYPE_sync_util_scripts.py b/pype/hosts/resolve/utility_scripts/PYPE_sync_util_scripts.py new file mode 100644 index 0000000000..753bddc1da --- /dev/null +++ b/pype/hosts/resolve/utility_scripts/PYPE_sync_util_scripts.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +import os +import sys +import pype + + +def main(env): + import pype.hosts.resolve as bmdvr + # Registers pype's Global pyblish plugins + pype.install() + bmdvr.setup(env) + + +if __name__ == "__main__": + result = main(os.environ) + sys.exit(not bool(result)) diff --git a/pype/hosts/resolve/utility_scripts/Pype_menu.py b/pype/hosts/resolve/utility_scripts/__PYPE__MENU__.py similarity index 100% rename from pype/hosts/resolve/utility_scripts/Pype_menu.py rename to pype/hosts/resolve/utility_scripts/__PYPE__MENU__.py diff --git a/pype/hosts/resolve/utility_scripts/resolve_dev_scriping.py b/pype/hosts/resolve/utility_scripts/resolve_dev_scriping.py new file mode 100644 index 0000000000..bd9fe593e0 --- /dev/null +++ b/pype/hosts/resolve/utility_scripts/resolve_dev_scriping.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + + +def main(): + import pype.hosts.resolve as bmdvr + bmdvr.utils.get_resolve_module() + + tracks = list() + track_type = "video" + sequence = bmdvr.get_current_sequence() + + # get all tracks count filtered by track type + selected_track_count = sequence.GetTrackCount(track_type) + + # loop all tracks and get items + for track_index in range(1, (int(selected_track_count) + 1)): + track_name = sequence.GetTrackName("video", track_index) + tracks.append(track_name) + + +if __name__ == "__main__": + main() diff --git a/pype/hosts/resolve/utility_scripts/test.py b/pype/hosts/resolve/utility_scripts/test.py index 69dc4768bd..a76e4dc501 100644 --- a/pype/hosts/resolve/utility_scripts/test.py +++ b/pype/hosts/resolve/utility_scripts/test.py @@ -1,19 +1,24 @@ #! python3 import sys -from pype.api import Logger import DaVinciResolveScript as bmdvr -log = Logger().get_logger(__name__) - - def main(): - import pype.hosts.resolve as bmdvr - bm = bmdvr.utils.get_resolve_module() - log.info(f"blackmagicmodule: {bm}") - - -print(f"_>> bmdvr.scriptapp(Resolve): {bmdvr.scriptapp('Resolve')}") + resolve = bmdvr.scriptapp('Resolve') + print(f"resolve: {resolve}") + project_manager = resolve.GetProjectManager() + project = project_manager.GetCurrentProject() + media_pool = project.GetMediaPool() + root_folder = media_pool.GetRootFolder() + ls_folder = root_folder.GetClipList() + timeline = project.GetCurrentTimeline() + timeline_name = timeline.GetName() + for tl in ls_folder: + if tl.GetName() not in timeline_name: + continue + print(tl.GetName()) + print(tl.GetMetadata()) + print(tl.GetClipProperty()) if __name__ == "__main__": diff --git a/pype/hosts/resolve/utils.py b/pype/hosts/resolve/utils.py index cb042a88e0..2a3f78a2df 100644 --- a/pype/hosts/resolve/utils.py +++ b/pype/hosts/resolve/utils.py @@ -9,7 +9,7 @@ import os import shutil from pype.api import Logger -log = Logger().get_logger(__name__, "resolve") +log = Logger().get_logger(__name__) def get_resolve_module(): diff --git a/pype/hosts/resolve/workio.py b/pype/hosts/resolve/workio.py index da74ebdeb1..18936df018 100644 --- a/pype/hosts/resolve/workio.py +++ b/pype/hosts/resolve/workio.py @@ -9,7 +9,7 @@ from . import ( ) -log = Logger().get_logger(__name__, "resolve") +log = Logger().get_logger(__name__) exported_projet_ext = ".drp" diff --git a/pype/hosts/tvpaint/hooks/pre_install_pywin.py b/pype/hosts/tvpaint/hooks/pre_install_pywin.py index ca9242c4c8..7abab33757 100644 --- a/pype/hosts/tvpaint/hooks/pre_install_pywin.py +++ b/pype/hosts/tvpaint/hooks/pre_install_pywin.py @@ -1,7 +1,7 @@ from pype.lib import ( PreLaunchHook, ApplicationLaunchFailed, - _subprocess + run_subprocess ) @@ -25,7 +25,7 @@ class PreInstallPyWin(PreLaunchHook): return try: - output = _subprocess( + output = run_subprocess( ["pip", "install", "pywin32==227"] ) self.log.debug("Pip install pywin32 output:\n{}'".format(output)) diff --git a/pype/lib/__init__.py b/pype/lib/__init__.py index 9444ef5195..691c105b76 100644 --- a/pype/lib/__init__.py +++ b/pype/lib/__init__.py @@ -2,7 +2,10 @@ """Pype module API.""" from .terminal import Terminal -from .execute import execute +from .execute import ( + execute, + run_subprocess +) from .log import PypeLogger, timeit from .mongo import ( decompose_url, @@ -10,17 +13,13 @@ from .mongo import ( get_default_components, PypeMongoConnection ) -from .anatomy import Anatomy - -from .config import ( - get_datetime_data, - load_json, - collect_json_from_path, - get_presets, - get_init_presets, - update_dict +from .anatomy import ( + merge_dict, + Anatomy ) +from .config import get_datetime_data + from .env_tools import ( env_value_to_bool, get_paths_from_environ @@ -39,6 +38,15 @@ from .avalon_context import ( get_hierarchy, get_linked_assets, get_latest_version, + + get_workdir_data, + get_workdir, + get_workdir_with_workdir_data, + + create_workfile_doc, + save_workfile_data_to_doc, + get_workfile_doc, + BuildWorkfile ) @@ -48,15 +56,18 @@ from .applications import ( ApplicationNotFound, ApplicationManager, PreLaunchHook, - PostLaunchHook, - _subprocess + PostLaunchHook ) from .plugin_tools import ( filter_pyblish_plugins, source_hash, get_unique_layer_name, - get_background_layers + get_background_layers, + oiio_supported, + decompress, + get_decompress_dir, + should_decompress ) from .user_settings import ( @@ -76,9 +87,23 @@ from .ffmpeg_utils import ( ffprobe_streams ) +from .editorial import ( + is_overlapping_otio_ranges, + otio_range_to_frame_range, + otio_range_with_handles, + convert_to_padded_path, + trim_media_range, + range_from_frames, + frames_to_secons, + make_sequence_collection +) + terminal = Terminal __all__ = [ + "execute", + "run_subprocess", + "env_value_to_bool", "get_paths_from_environ", @@ -92,10 +117,16 @@ __all__ = [ "get_hierarchy", "get_linked_assets", "get_latest_version", - "BuildWorkfile", - "PypeHook", - "execute_hook", + "get_workdir_data", + "get_workdir", + "get_workdir_with_workdir_data", + + "create_workfile_doc", + "save_workfile_data_to_doc", + "get_workfile_doc", + + "BuildWorkfile", "ApplicationLaunchFailed", "ApplictionExecutableNotFound", @@ -108,6 +139,10 @@ __all__ = [ "source_hash", "get_unique_layer_name", "get_background_layers", + "oiio_supported", + "decompress", + "get_decompress_dir", + "should_decompress", "version_up", "get_version_from_path", @@ -116,17 +151,13 @@ __all__ = [ "ffprobe_streams", "get_ffmpeg_tool_path", - "_subprocess", - "terminal", + + "merge_dict", "Anatomy", + "get_datetime_data", - "load_json", - "collect_json_from_path", - "get_presets", - "get_init_presets", - "update_dict", - "execute", + "PypeLogger", "decompose_url", "compose_url", @@ -136,5 +167,14 @@ __all__ = [ "IniSettingRegistry", "JSONSettingRegistry", "PypeSettingsRegistry", - "timeit" + "timeit", + + "is_overlapping_otio_ranges", + "otio_range_with_handles", + "convert_to_padded_path", + "otio_range_to_frame_range", + "trim_media_range", + "range_from_frames", + "frames_to_secons", + "make_sequence_collection" ] diff --git a/pype/lib/anatomy.py b/pype/lib/anatomy.py index cf907729c4..ad07851533 100644 --- a/pype/lib/anatomy.py +++ b/pype/lib/anatomy.py @@ -9,7 +9,6 @@ from pype.settings.lib import ( get_default_anatomy_settings, get_anatomy_settings ) -from . import config from .log import PypeLogger log = PypeLogger().get_logger(__name__) @@ -20,6 +19,32 @@ except NameError: StringType = str +def merge_dict(main_dict, enhance_dict): + """Merges dictionaries by keys. + + Function call itself if value on key is again dictionary. + + Args: + main_dict (dict): First dict to merge second one into. + enhance_dict (dict): Second dict to be merged. + + Returns: + dict: Merged result. + + .. note:: does not overrides whole value on first found key + but only values differences from enhance_dict + + """ + for key, value in enhance_dict.items(): + if key not in main_dict: + main_dict[key] = value + elif isinstance(value, dict) and isinstance(main_dict[key], dict): + main_dict[key] = merge_dict(main_dict[key], value) + else: + main_dict[key] = value + return main_dict + + class ProjectNotSet(Exception): """Exception raised when is created Anatomy without project name.""" @@ -395,9 +420,7 @@ class TemplatesDict(dict): if key in invalid_types: continue _invalid_types[key] = val - invalid_types = config.update_dict( - invalid_types, _invalid_types - ) + invalid_types = merge_dict(invalid_types, _invalid_types) return invalid_types @property @@ -405,7 +428,7 @@ class TemplatesDict(dict): """Return used values for all children templates.""" used_values = {} for value in self.values(): - used_values = config.update_dict(used_values, value.used_values) + used_values = merge_dict(used_values, value.used_values) return used_values def get_solved(self): @@ -840,7 +863,7 @@ class Templates: root_key = "{" + root_key + "}" - roots_dict = config.update_dict( + roots_dict = merge_dict( roots_dict, self._keys_to_dicts(used_root_keys, root_key) ) diff --git a/pype/lib/applications.py b/pype/lib/applications.py index cccc50d397..a7697a889c 100644 --- a/pype/lib/applications.py +++ b/pype/lib/applications.py @@ -1,5 +1,4 @@ import os -import copy import platform import inspect import subprocess @@ -16,8 +15,6 @@ from .python_module_tools import ( classes_from_module ) -log = PypeLogger().get_logger(__name__) - class ApplicationNotFound(Exception): """Application was not found in ApplicationManager by name.""" @@ -67,71 +64,6 @@ class ApplicationLaunchFailed(Exception): pass -# Special naming case for subprocess since its a built-in method. -def _subprocess(*args, **kwargs): - """Convenience method for getting output errors for subprocess. - - Entered arguments and keyword arguments are passed to subprocess Popen. - - Args: - *args: Variable length arument list passed to Popen. - **kwargs : Arbitary keyword arguments passed to Popen. Is possible to - pass `logging.Logger` object under "logger" if want to use - different than lib's logger. - - Returns: - str: Full output of subprocess concatenated stdout and stderr. - - Raises: - RuntimeError: Exception is raised if process finished with nonzero - return code. - """ - - # Get environents from kwarg or use current process environments if were - # not passed. - env = kwargs.get("env") or os.environ - # Make sure environment contains only strings - filtered_env = {k: str(v) for k, v in env.items()} - - # Use lib's logger if was not passed with kwargs. - logger = kwargs.pop("logger", log) - - # set overrides - kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) - kwargs['stderr'] = kwargs.get('stderr', subprocess.PIPE) - kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) - kwargs['env'] = filtered_env - - proc = subprocess.Popen(*args, **kwargs) - - full_output = "" - _stdout, _stderr = proc.communicate() - if _stdout: - _stdout = _stdout.decode("utf-8") - full_output += _stdout - logger.debug(_stdout) - - if _stderr: - _stderr = _stderr.decode("utf-8") - # Add additional line break if output already containt stdout - if full_output: - full_output += "\n" - full_output += _stderr - logger.warning(_stderr) - - if proc.returncode != 0: - exc_msg = "Executing arguments was not successful: \"{}\"".format(args) - if _stdout: - exc_msg += "\n\nOutput:\n{}".format(_stdout) - - if _stderr: - exc_msg += "Error:\n{}".format(_stderr) - - raise RuntimeError(exc_msg) - - return full_output - - class ApplicationManager: def __init__(self): self.log = PypeLogger().get_logger(self.__class__.__name__) @@ -531,15 +463,23 @@ class ApplicationLaunchContext: self.launch_args = executable.as_args() # Handle launch environemtns - passed_env = self.data.pop("env", None) - if passed_env is None: + env = self.data.pop("env", None) + if env is not None and not isinstance(env, dict): + self.log.warning(( + "Passed `env` kwarg has invalid type: {}. Expected: `dict`." + " Using `os.environ` instead." + ).format(str(type(env)))) + env = None + + if env is None: env = os.environ - else: - env = passed_env # subprocess.Popen keyword arguments self.kwargs = { - "env": copy.deepcopy(env) + "env": { + key: str(value) + for key, value in env.items() + } } if platform.system().lower() == "windows": @@ -580,7 +520,6 @@ class ApplicationLaunchContext: paths = [] # TODO load additional studio paths from settings - # TODO add paths based on used modules (like `ftrack`) import pype pype_dir = os.path.dirname(os.path.abspath(pype.__file__)) @@ -610,6 +549,13 @@ class ApplicationLaunchContext: and path not in paths ): paths.append(path) + + # Load modules paths + from pype.modules import ModulesManager + + manager = ModulesManager() + paths.extend(manager.collect_launch_hook_paths()) + return paths def discover_launch_hooks(self, force=False): diff --git a/pype/lib/avalon_context.py b/pype/lib/avalon_context.py index 3a18e956d9..fd4155703e 100644 --- a/pype/lib/avalon_context.py +++ b/pype/lib/avalon_context.py @@ -1,11 +1,13 @@ import os import json import re +import copy import logging import collections import functools from pype.settings import get_project_settings +from .anatomy import Anatomy # avalon module is not imported at the top # - may not be in path at the time of pype.lib initialization @@ -246,6 +248,229 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): return version_doc +def get_workdir_data(project_doc, asset_doc, task_name, host_name): + """Prepare data for workdir template filling from entered information. + + Args: + project_doc (dict): Mongo document of project from MongoDB. + asset_doc (dict): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. + + Returns: + dict: Data prepared for filling workdir template. + """ + hierarchy = "/".join(asset_doc["data"]["parents"]) + + data = { + "project": { + "name": project_doc["name"], + "code": project_doc["data"].get("code") + }, + "task": task_name, + "asset": asset_doc["name"], + "app": host_name, + "hierarchy": hierarchy + } + return data + + +def get_workdir_with_workdir_data( + workdir_data, anatomy=None, project_name=None, template_key=None +): + """Fill workdir path from entered data and project's anatomy. + + It is possible to pass only project's name instead of project's anatomy but + one of them **must** be entered. It is preffered to enter anatomy if is + available as initialization of a new Anatomy object may be time consuming. + + Args: + workdir_data (dict): Data to fill workdir template. + anatomy (Anatomy): Anatomy object for specific project. Optional if + `project_name` is entered. + project_name (str): Project's name. Optional if `anatomy` is entered + otherwise Anatomy object is created with using the project name. + template_key (str): Key of work templates in anatomy templates. By + default is seto to `"work"`. + + Returns: + TemplateResult: Workdir path. + + Raises: + ValueError: When both `anatomy` and `project_name` are set to None. + """ + if not anatomy and not project_name: + raise ValueError(( + "Missing required arguments one of `project_name` or `anatomy`" + " must be entered." + )) + + if not anatomy: + anatomy = Anatomy(project_name) + + if not template_key: + template_key = "work" + + anatomy_filled = anatomy.format(workdir_data) + # Output is TemplateResult object which contain usefull data + return anatomy_filled[template_key]["folder"] + + +def get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + template_key=None +): + """Fill workdir path from entered data and project's anatomy. + + Args: + project_doc (dict): Mongo document of project from MongoDB. + asset_doc (dict): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. In `Session` + is stored under `AVALON_APP` key. + anatomy (Anatomy): Optional argument. Anatomy object is created using + project name from `project_doc`. It is preffered to pass this + argument as initialization of a new Anatomy object may be time + consuming. + template_key (str): Key of work templates in anatomy templates. Default + value is defined in `get_workdir_with_workdir_data`. + + Returns: + TemplateResult: Workdir path. + """ + if not anatomy: + anatomy = Anatomy(project_doc["name"]) + + workdir_data = get_workdir_data( + project_doc, asset_doc, task_name, host_name + ) + # Output is TemplateResult object which contain usefull data + return get_workdir_with_workdir_data(workdir_data, anatomy, template_key) + + +@with_avalon +def get_workfile_doc(asset_id, task_name, filename, dbcon=None): + """Return workfile document for entered context. + + Do not use this method to get more than one document. In that cases use + custom query as this will return documents from database one by one. + + Args: + asset_id (ObjectId): Mongo ID of an asset under which workfile belongs. + task_name (str): Name of task under which the workfile belongs. + filename (str): Name of a workfile. + dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and + `avalon.io` is used if not entered. + + Returns: + dict: Workfile document or None. + """ + # Use avalon.io if dbcon is not entered + if not dbcon: + dbcon = avalon.io + + return dbcon.find_one({ + "type": "workfile", + "parent": asset_id, + "task_name": task_name, + "filename": filename + }) + + +@with_avalon +def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): + """Creates or replace workfile document in mongo. + + Do not use this method to update data. This method will remove all + additional data from existing document. + + Args: + asset_doc (dict): Document of asset under which workfile belongs. + task_name (str): Name of task for which is workfile related to. + filename (str): Filename of workfile. + workdir (str): Path to directory where `filename` is located. + dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and + `avalon.io` is used if not entered. + """ + # Use avalon.io if dbcon is not entered + if not dbcon: + dbcon = avalon.io + + # Filter of workfile document + doc_filter = { + "type": "workfile", + "parent": asset_doc["_id"], + "task_name": task_name, + "filename": filename + } + # Document data are copy of filter + doc_data = copy.deepcopy(doc_filter) + + # Prepare project for workdir data + project_doc = dbcon.find_one({"type": "project"}) + workdir_data = get_workdir_data( + project_doc, asset_doc, task_name, dbcon.Session["AVALON_APP"] + ) + # Prepare anatomy + anatomy = Anatomy(project_doc["name"]) + # Get workdir path (result is anatomy.TemplateResult) + template_workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + template_workdir_path = str(template_workdir).replace("\\", "/") + + # Replace slashses in workdir path where workfile is located + mod_workdir = workdir.replace("\\", "/") + + # Replace workdir from templates with rootless workdir + rootles_workdir = mod_workdir.replace( + template_workdir_path, + template_workdir.rootless.replace("\\", "/") + ) + + doc_data["schema"] = "pype:workfile-1.0" + doc_data["files"] = ["/".join([rootles_workdir, filename])] + doc_data["data"] = {} + + dbcon.replace_one( + doc_filter, + doc_data, + upsert=True + ) + + +@with_avalon +def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): + if not workfile_doc: + # TODO add log message + return + + if not data: + return + + # Use avalon.io if dbcon is not entered + if not dbcon: + dbcon = avalon.io + + # Convert data to mongo modification keys/values + # - this is naive implementation which does not expect nested + # dictionaries + set_data = {} + for key, value in data.items(): + new_key = "data.{}".format(key) + set_data[new_key] = value + + # Update workfile document with data + dbcon.update_one( + {"_id": workfile_doc["_id"]}, + {"$set": set_data} + ) + + class BuildWorkfile: """Wrapper for build workfile process. diff --git a/pype/lib/config.py b/pype/lib/config.py index 6244d65d68..ba394cfd56 100644 --- a/pype/lib/config.py +++ b/pype/lib/config.py @@ -1,11 +1,6 @@ # -*- coding: utf-8 -*- """Get configuration data.""" -import os -import json import datetime -from .log import PypeLogger - -log = PypeLogger().get_logger(__name__) def get_datetime_data(datetime_obj=None): @@ -79,233 +74,3 @@ def get_datetime_data(datetime_obj=None): "S": str(int(seconds)), "SS": str(seconds), } - - -def load_json(fpath, first_run=False): - """Load JSON data. - - Args: - fpath (str): Path to JSON file. - first_run (bool): Flag to run checks if file is loaded for the first - time. - Returns: - dict: parsed JSON object. - - """ - # Load json data - with open(fpath, "r") as opened_file: - lines = opened_file.read().splitlines() - - # prepare json string - standard_json = "" - for line in lines: - # Remove all whitespace on both sides - line = line.strip() - - # Skip blank lines - if len(line) == 0: - continue - - standard_json += line - - # Check if has extra commas - extra_comma = False - if ",]" in standard_json or ",}" in standard_json: - extra_comma = True - standard_json = standard_json.replace(",]", "]") - standard_json = standard_json.replace(",}", "}") - - if extra_comma and first_run: - log.error("Extra comma in json file: \"{}\"".format(fpath)) - - # return empty dict if file is empty - if standard_json == "": - if first_run: - log.error("Empty json file: \"{}\"".format(fpath)) - return {} - - # Try to parse string - try: - return json.loads(standard_json) - - except json.decoder.JSONDecodeError: - # Return empty dict if it is first time that decode error happened - if not first_run: - return {} - - # Repreduce the exact same exception but traceback contains better - # information about position of error in the loaded json - try: - with open(fpath, "r") as opened_file: - json.load(opened_file) - - except json.decoder.JSONDecodeError: - log.warning( - "File has invalid json format \"{}\"".format(fpath), - exc_info=True - ) - - return {} - - -def collect_json_from_path(input_path, first_run=False): - """Collect JSON file from path. - - Iterate through all subfolders and JSON files in `input_path`. - - Args: - input_path (str): Path from JSONs will be collected. - first_run (bool): Flag to run checks if file is loaded for the first - time. - - Returns: - dict: Collected JSONs. - - Examples: - - Imagine path:: - `{input_path}/path/to/file.json` - - >>> collect_json_from_path(input_path) - {'path': - {'to': - {'file': {JSON} - } - } - - """ - output = None - if os.path.isdir(input_path): - output = {} - for file in os.listdir(input_path): - full_path = os.path.sep.join([input_path, file]) - if os.path.isdir(full_path): - loaded = collect_json_from_path(full_path, first_run) - if loaded: - output[file] = loaded - else: - basename, ext = os.path.splitext(os.path.basename(file)) - if ext == '.json': - output[basename] = load_json(full_path, first_run) - else: - basename, ext = os.path.splitext(os.path.basename(input_path)) - if ext == '.json': - output = load_json(input_path, first_run) - - return output - - -def get_presets(project=None, first_run=False): - """Loads preset files with usage of ``collect_json_from_path``. - - Default preset path is set to: `{PYPE_CONFIG}/presets` - Project preset path is set to: `{PYPE_PROJECT_CONFIGS}/project_name` - - Environment variable `PYPE_STUDIO_CONFIG` is required - `PYPE_STUDIO_CONFIGS` only if want to use overrides per project. - - Args: - project (str): Project name. - first_run (bool): Flag to run checks if file is loaded for the first - time. - - Returns: - None: If default path does not exist. - default presets (dict): If project_name is not set or - if project's presets folder does not exist. - project presets (dict): If project_name is set and include - override data. - - """ - # config_path should be set from environments? - config_path = os.path.normpath(os.environ['PYPE_CONFIG']) - preset_items = [config_path, 'presets'] - config_path = os.path.sep.join(preset_items) - if not os.path.isdir(config_path): - log.error('Preset path was not found: "{}"'.format(config_path)) - return None - default_data = collect_json_from_path(config_path, first_run) - - if not project: - project = os.environ.get('AVALON_PROJECT', None) - - if not project: - return default_data - - project_configs_path = os.environ.get('PYPE_PROJECT_CONFIGS') - if not project_configs_path: - return default_data - - project_configs_path = os.path.normpath(project_configs_path) - project_config_items = [project_configs_path, project, 'presets'] - project_config_path = os.path.sep.join(project_config_items) - - if not os.path.isdir(project_config_path): - log.warning('Preset path for project {} not found: "{}"'.format( - project, project_config_path - )) - return default_data - project_data = collect_json_from_path(project_config_path, first_run) - - return update_dict(default_data, project_data) - - -def get_init_presets(project=None): - """Loads content of presets. - - Like :func:`get_presets()`` but also evaluate `init.json` - pointer to default presets. - - Args: - project(str): Project name. - - Returns: - None: If default path does not exist - default presets (dict): If project_name is not set or if project's - presets folder does not exist. - project presets (dict): If project_name is set and include - override data. - """ - presets = get_presets(project) - - try: - # try if it is not in projects custom directory - # `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json` - # init.json define preset names to be used - p_init = presets["init"] - presets["colorspace"] = presets["colorspace"][p_init["colorspace"]] - presets["dataflow"] = presets["dataflow"][p_init["dataflow"]] - except KeyError: - log.warning("No projects custom preset available...") - presets["colorspace"] = presets["colorspace"]["default"] - presets["dataflow"] = presets["dataflow"]["default"] - log.info(("Presets `colorspace` and `dataflow` " - "loaded from `default`...")) - - return presets - - -def update_dict(main_dict, enhance_dict): - """Merges dictionaries by keys. - - Function call itself if value on key is again dictionary. - - Args: - main_dict (dict): First dict to merge second one into. - enhance_dict (dict): Second dict to be merged. - - Returns: - dict: Merged result. - - .. note:: does not overrides whole value on first found key - but only values differences from enhance_dict - - """ - for key, value in enhance_dict.items(): - if key not in main_dict: - main_dict[key] = value - elif isinstance(value, dict) and isinstance(main_dict[key], dict): - main_dict[key] = update_dict(main_dict[key], value) - else: - main_dict[key] = value - return main_dict diff --git a/pype/lib/editorial.py b/pype/lib/editorial.py new file mode 100644 index 0000000000..7f29bf00bb --- /dev/null +++ b/pype/lib/editorial.py @@ -0,0 +1,160 @@ +import os +import re +import clique +from opentimelineio import opentime +from opentimelineio.opentime import ( + to_frames, RationalTime, TimeRange) + + +def otio_range_to_frame_range(otio_range): + start = to_frames( + otio_range.start_time, otio_range.start_time.rate) + end = start + to_frames( + otio_range.duration, otio_range.duration.rate) - 1 + return start, end + + +def otio_range_with_handles(otio_range, instance): + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + handles_duration = handle_start + handle_end + fps = float(otio_range.start_time.rate) + start = to_frames(otio_range.start_time, fps) + duration = to_frames(otio_range.duration, fps) + + return TimeRange( + start_time=RationalTime((start - handle_start), fps), + duration=RationalTime((duration + handles_duration), fps) + ) + + +def is_overlapping_otio_ranges(test_otio_range, main_otio_range, strict=False): + test_start, test_end = otio_range_to_frame_range(test_otio_range) + main_start, main_end = otio_range_to_frame_range(main_otio_range) + covering_exp = bool( + (test_start <= main_start) and (test_end >= main_end) + ) + inside_exp = bool( + (test_start >= main_start) and (test_end <= main_end) + ) + overlaying_right_exp = bool( + (test_start <= main_end) and (test_end >= main_end) + ) + overlaying_left_exp = bool( + (test_end >= main_start) and (test_start <= main_start) + ) + + if not strict: + return any(( + covering_exp, + inside_exp, + overlaying_right_exp, + overlaying_left_exp + )) + else: + return covering_exp + + +def convert_to_padded_path(path, padding): + """ + Return correct padding in sequence string + + Args: + path (str): path url or simple file name + padding (int): number of padding + + Returns: + type: string with reformated path + + Example: + convert_to_padded_path("plate.%d.exr") > plate.%04d.exr + + """ + if "%d" in path: + path = re.sub("%d", "%0{padding}d".format(padding=padding), path) + return path + + +def trim_media_range(media_range, source_range): + """ + Trim input media range with clip source range. + + Args: + media_range (otio.opentime.TimeRange): available range of media + source_range (otio.opentime.TimeRange): clip required range + + Returns: + otio.opentime.TimeRange: trimmed media range + + """ + rw_media_start = RationalTime( + media_range.start_time.value + source_range.start_time.value, + media_range.start_time.rate + ) + rw_media_duration = RationalTime( + source_range.duration.value, + media_range.duration.rate + ) + return TimeRange( + rw_media_start, rw_media_duration) + + +def range_from_frames(start, duration, fps): + """ + Returns otio time range. + + Args: + start (int): frame start + duration (int): frame duration + fps (float): frame range + + Returns: + otio.opentime.TimeRange: crated range + + """ + return TimeRange( + RationalTime(start, fps), + RationalTime(duration, fps) + ) + + +def frames_to_secons(frames, framerate): + """ + Returning secons. + + Args: + frames (int): frame + framerate (flaot): frame rate + + Returns: + float: second value + + """ + rt = opentime.from_frames(frames, framerate) + return opentime.to_seconds(rt) + + +def make_sequence_collection(path, otio_range, metadata): + """ + Make collection from path otio range and otio metadata. + + Args: + path (str): path to image sequence with `%d` + otio_range (otio.opentime.TimeRange): range to be used + metadata (dict): data where padding value can be found + + Returns: + list: dir_path (str): path to sequence, collection object + + """ + if "%" not in path: + return None + file_name = os.path.basename(path) + dir_path = os.path.dirname(path) + head = file_name.split("%")[0] + tail = os.path.splitext(file_name)[-1] + first, last = otio_range_to_frame_range(otio_range) + collection = clique.Collection( + head=head, tail=tail, padding=metadata["padding"]) + collection.indexes.update([i for i in range(first, (last + 1))]) + return dir_path, collection diff --git a/pype/lib/execute.py b/pype/lib/execute.py index d7951df384..1f1adcdf23 100644 --- a/pype/lib/execute.py +++ b/pype/lib/execute.py @@ -69,42 +69,67 @@ def execute(args, return popen.returncode -def _subprocess(*args, **kwargs): +def run_subprocess(*args, **kwargs): """Convenience method for getting output errors for subprocess. - .. seealso:: :mod:`subprocess` + Output logged when process finish. + Entered arguments and keyword arguments are passed to subprocess Popen. + + Args: + *args: Variable length arument list passed to Popen. + **kwargs : Arbitary keyword arguments passed to Popen. Is possible to + pass `logging.Logger` object under "logger" if want to use + different than lib's logger. + + Returns: + str: Full output of subprocess concatenated stdout and stderr. + + Raises: + RuntimeError: Exception is raised if process finished with nonzero + return code. """ - # make sure environment contains only strings - if not kwargs.get("env"): - filtered_env = {k: str(v) for k, v in os.environ.items()} - else: - filtered_env = {k: str(v) for k, v in kwargs.get("env").items()} + + # Get environents from kwarg or use current process environments if were + # not passed. + env = kwargs.get("env") or os.environ + # Make sure environment contains only strings + filtered_env = {k: str(v) for k, v in env.items()} + + # Use lib's logger if was not passed with kwargs. + logger = kwargs.pop("logger", log) # set overrides kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) - kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT) + kwargs['stderr'] = kwargs.get('stderr', subprocess.PIPE) kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) kwargs['env'] = filtered_env proc = subprocess.Popen(*args, **kwargs) - output, error = proc.communicate() + full_output = "" + _stdout, _stderr = proc.communicate() + if _stdout: + _stdout = _stdout.decode("utf-8") + full_output += _stdout + logger.debug(_stdout) - if output: - output = output.decode("utf-8") - output += "\n" - for line in output.strip().split("\n"): - log.info(line) - - if error: - error = error.decode("utf-8") - error += "\n" - for line in error.strip().split("\n"): - log.error(line) + if _stderr: + _stderr = _stderr.decode("utf-8") + # Add additional line break if output already containt stdout + if full_output: + full_output += "\n" + full_output += _stderr + logger.warning(_stderr) if proc.returncode != 0: - raise ValueError( - "\"{}\" was not successful:\nOutput: {}\nError: {}".format( - args, output, error)) - return output + exc_msg = "Executing arguments was not successful: \"{}\"".format(args) + if _stdout: + exc_msg += "\n\nOutput:\n{}".format(_stdout) + + if _stderr: + exc_msg += "Error:\n{}".format(_stderr) + + raise RuntimeError(exc_msg) + + return full_output diff --git a/pype/lib/log.py b/pype/lib/log.py index 47f379d952..b6cbff5648 100644 --- a/pype/lib/log.py +++ b/pype/lib/log.py @@ -21,99 +21,24 @@ import socket import sys import time import traceback -from logging.handlers import TimedRotatingFileHandler +import threading +import copy from . import Terminal from .mongo import ( MongoEnvNotSet, decompose_url, - compose_url, - get_default_components + PypeMongoConnection ) - try: import log4mongo from log4mongo.handlers import MongoHandler - from bson.objectid import ObjectId - MONGO_PROCESS_ID = ObjectId() except ImportError: - _mongo_logging = False -else: - _mongo_logging = True + log4mongo = None + MongoHandler = type("NOT_SET", (), {}) -try: - unicode - _unicode = True -except NameError: - _unicode = False - - -PYPE_DEBUG = int(os.getenv("PYPE_DEBUG", "0")) -LOG_DATABASE_NAME = os.environ.get("PYPE_LOG_MONGO_DB") or "pype" -LOG_COLLECTION_NAME = os.environ.get("PYPE_LOG_MONGO_COL") or "logs" - -system_name, pc_name = platform.uname()[:2] -host_name = socket.gethostname() -try: - ip = socket.gethostbyname(host_name) -except socket.gaierror: - ip = "127.0.0.1" - -# Get process name -if len(sys.argv) > 0 and os.path.basename(sys.argv[0]) == "tray.py": - process_name = "Tray" -else: - try: - import psutil - process = psutil.Process(os.getpid()) - process_name = process.name() - - except ImportError: - process_name = os.environ.get("AVALON_APP_NAME") - if not process_name: - process_name = os.path.basename(sys.executable) - - -def _log_mongo_components(): - mongo_url = os.environ.get("PYPE_LOG_MONGO_URL") - if mongo_url is not None: - components = decompose_url(mongo_url) - else: - components = get_default_components() - return components - - -def _bootstrap_mongo_log(components=None): - """ - This will check if database and collection for logging exist on server. - """ - import pymongo - - if components is None: - components = _log_mongo_components() - - if not components["host"]: - # fail silently - return - - timeout = int(os.environ.get("AVALON_TIMEOUT", 1000)) - kwargs = { - "host": compose_url(**components), - "serverSelectionTimeoutMS": timeout - } - - port = components.get("port") - if port is not None: - kwargs["port"] = int(port) - client = pymongo.MongoClient(**kwargs) - logdb = client[LOG_DATABASE_NAME] - - collist = logdb.list_collection_names() - if LOG_COLLECTION_NAME not in collist: - logdb.create_collection( - LOG_COLLECTION_NAME, capped=True, max=5000, size=1073741824 - ) - return logdb +# Check for `unicode` in builtins +USE_UNICODE = hasattr(__builtins__, "unicode") class PypeStreamHandler(logging.StreamHandler): @@ -148,7 +73,8 @@ class PypeStreamHandler(logging.StreamHandler): msg = Terminal.log(msg) stream = self.stream fs = "%s\n" - if not _unicode: # if no unicode support... + # if no unicode support... + if not USE_UNICODE: stream.write(fs % msg) else: try: @@ -225,23 +151,18 @@ class PypeMongoFormatter(logging.Formatter): 'fileName': record.pathname, 'module': record.module, 'method': record.funcName, - 'lineNumber': record.lineno, - 'process_id': MONGO_PROCESS_ID, - 'hostname': host_name, - 'hostip': ip, - 'username': getpass.getuser(), - 'system_name': system_name, - 'process_name': process_name + 'lineNumber': record.lineno } + document.update(PypeLogger.get_process_data()) + # Standard document decorated with exception info if record.exc_info is not None: - document.update({ - 'exception': { - 'message': str(record.exc_info[1]), - 'code': 0, - 'stackTrace': self.formatException(record.exc_info) - } - }) + document['exception'] = { + 'message': str(record.exc_info[1]), + 'code': 0, + 'stackTrace': self.formatException(record.exc_info) + } + # Standard document decorated with extra contextual information if len(self.DEFAULT_PROPERTIES) != len(record.__dict__): contextual_extra = set(record.__dict__).difference( @@ -253,9 +174,6 @@ class PypeMongoFormatter(logging.Formatter): class PypeLogger: - - PYPE_DEBUG = 0 - DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ] ' DBG = " - { %(name)s }: [ %(message)s ] " INF = ">>> [ %(message)s ] " @@ -271,55 +189,97 @@ class PypeLogger: logging.CRITICAL: CRI, } - def __init__(self): - self.PYPE_DEBUG = int(os.environ.get("PYPE_DEBUG", "0")) + # Is static class initialized + bootstraped = False + initialized = False + _init_lock = threading.Lock() - @staticmethod - def get_file_path(host='pype'): + # Defines if mongo logging should be used + use_mongo_logging = None + mongo_process_id = None - ts = time.time() - log_name = datetime.datetime.fromtimestamp(ts).strftime( - '%Y-%m-%d' # '%Y-%m-%d_%H-%M-%S' - ) + # Information about mongo url + log_mongo_url = None + log_mongo_url_components = None + log_database_name = None + log_collection_name = None - logger_file_root = os.path.join( - os.path.expanduser("~"), - ".pype-setup" - ) + # PYPE_DEBUG + pype_debug = 0 - logger_file_path = os.path.join( - logger_file_root, - "{}-{}.{}".format(host, log_name, 'log') - ) + # Data same for all record documents + process_data = None + # Cached process name or ability to set different process name + _process_name = None - if not os.path.exists(logger_file_root): - os.mkdir(logger_file_root) + @classmethod + def get_logger(cls, name=None, _host=None): + if not cls.initialized: + cls.initialize() - return logger_file_path + logger = logging.getLogger(name or "__main__") - def _get_file_handler(self, host): - logger_file_path = PypeLogger.get_file_path(host) + if cls.pype_debug > 1: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) - formatter = PypeFormatter(self.FORMAT_FILE) + add_mongo_handler = cls.use_mongo_logging + add_console_handler = True - file_handler = TimedRotatingFileHandler( - logger_file_path, - when='midnight' - ) - file_handler.set_name("PypeFileHandler") - file_handler.setFormatter(formatter) - return file_handler + for handler in logger.handlers: + if isinstance(handler, MongoHandler): + add_mongo_handler = False + elif isinstance(handler, PypeStreamHandler): + add_console_handler = False - def _get_mongo_handler(self): - components = _log_mongo_components() - # Check existence of mongo connection before creating Mongo handler - if log4mongo.handlers._connection is None: - _bootstrap_mongo_log(components) + if add_console_handler: + logger.addHandler(cls._get_console_handler()) + if add_mongo_handler: + try: + handler = cls._get_mongo_handler() + if handler: + logger.addHandler(handler) + + except MongoEnvNotSet: + # Skip if mongo environments are not set yet + cls.use_mongo_logging = False + + except Exception: + lines = traceback.format_exception(*sys.exc_info()) + for line in lines: + if line.endswith("\n"): + line = line[:-1] + Terminal.echo(line) + cls.use_mongo_logging = False + + # Do not propagate logs to root logger + logger.propagate = False + + if _host is not None: + # Warn about deprecated argument + # TODO remove backwards compatibility of host argument which is + # not used for more than a year + logger.warning( + "Logger \"{}\" is using argument `host` on `get_logger`" + " which is deprecated. Please remove as backwards" + " compatibility will be removed soon." + ) + return logger + + @classmethod + def _get_mongo_handler(cls): + cls.bootstrap_mongo_log() + + if not cls.use_mongo_logging: + return + + components = cls.log_mongo_url_components kwargs = { - "host": compose_url(**components), - "database_name": LOG_DATABASE_NAME, - "collection": LOG_COLLECTION_NAME, + "host": cls.log_mongo_url, + "database_name": cls.log_database_name, + "collection": cls.log_collection_name, "username": components["username"], "password": components["password"], "capped": True, @@ -332,56 +292,193 @@ class PypeLogger: return MongoHandler(**kwargs) - def _get_console_handler(self): - - formatter = PypeFormatter(self.FORMAT_FILE) + @classmethod + def _get_console_handler(cls): + formatter = PypeFormatter(cls.FORMAT_FILE) console_handler = PypeStreamHandler() console_handler.set_name("PypeStreamHandler") console_handler.setFormatter(formatter) return console_handler - def get_logger(self, name=None, host=None): - logger = logging.getLogger(name or '__main__') - - if self.PYPE_DEBUG > 1: - logger.setLevel(logging.DEBUG) + @classmethod + def initialize(cls): + # TODO update already created loggers on re-initialization + if not cls._init_lock.locked(): + with cls._init_lock: + cls._initialize() else: - logger.setLevel(logging.INFO) + # If lock is locked wait until is finished + while cls._init_lock.locked(): + time.sleep(0.1) - global _mongo_logging - add_mongo_handler = _mongo_logging - add_console_handler = True + @classmethod + def _initialize(cls): + # Change initialization state to prevent runtime changes + # if is executed during runtime + cls.initialized = False - for handler in logger.handlers: - if isinstance(handler, MongoHandler): - add_mongo_handler = False - elif isinstance(handler, PypeStreamHandler): - add_console_handler = False - - if add_console_handler: - logger.addHandler(self._get_console_handler()) - - if add_mongo_handler: + # Define if should logging to mongo be used + use_mongo_logging = bool(log4mongo is not None) + # Set mongo id for process (ONLY ONCE) + if use_mongo_logging and cls.mongo_process_id is None: try: - logger.addHandler(self._get_mongo_handler()) - - except MongoEnvNotSet: - # Skip if mongo environments are not set yet - _mongo_logging = False - + from bson.objectid import ObjectId except Exception: - lines = traceback.format_exception(*sys.exc_info()) - for line in lines: - if line.endswith("\n"): - line = line[:-1] - Terminal.echo(line) - _mongo_logging = False + use_mongo_logging = False - # Do not propagate logs to root logger - logger.propagate = False + # Check if mongo id was passed with environments and pop it + # - This is for subprocesses that are part of another process + # like Ftrack event server has 3 other subprocesses that should + # use same mongo id + if use_mongo_logging: + mongo_id = os.environ.pop("PYPE_PROCESS_MONGO_ID", None) + if not mongo_id: + # Create new object id + mongo_id = ObjectId() + else: + # Convert string to ObjectId object + mongo_id = ObjectId(mongo_id) + cls.mongo_process_id = mongo_id - return logger + # Store result to class definition + cls.use_mongo_logging = use_mongo_logging + + # Define if is in PYPE_DEBUG mode + cls.pype_debug = int(os.getenv("PYPE_DEBUG") or "0") + + # Mongo URL where logs will be stored + cls.log_mongo_url = ( + os.environ.get("PYPE_LOG_MONGO_URL") + or os.environ.get("PYPE_MONGO") + ) + if not cls.log_mongo_url: + cls.use_mongo_logging = False + else: + # Decompose url + cls.log_mongo_url_components = decompose_url(cls.log_mongo_url) + + # Database name in Mongo + cls.log_database_name = ( + os.environ.get("PYPE_LOG_MONGO_DB") or "pype" + ) + # Collection name under database in Mongo + cls.log_collection_name = ( + os.environ.get("PYPE_LOG_MONGO_COL") or "logs" + ) + + # Mark as initialized + cls.initialized = True + + @classmethod + def get_process_data(cls): + """Data about current process which should be same for all records. + + Process data are used for each record sent to mongo database. + """ + if cls.process_data is not None: + return copy.deepcopy(cls.process_data) + + if not cls.initialized: + cls.initialize() + + host_name = socket.gethostname() + try: + host_ip = socket.gethostbyname(host_name) + except socket.gaierror: + host_ip = "127.0.0.1" + + process_name = cls.get_process_name() + + cls.process_data = { + "process_id": cls.mongo_process_id, + "hostname": host_name, + "hostip": host_ip, + "username": getpass.getuser(), + "system_name": platform.system(), + "process_name": process_name + } + return copy.deepcopy(cls.process_data) + + @classmethod + def set_process_name(cls, process_name): + """Set process name for mongo logs.""" + # Just change the attribute + cls._process_name = process_name + # Update process data if are already set + if cls.process_data is not None: + cls.process_data["process_name"] = process_name + + @classmethod + def get_process_name(cls): + """Process name that is like "label" of a process. + + Pype's logging can be used from pype itseld of from hosts. Even in Pype + it's good to know if logs are from Pype tray or from pype's event + server. This should help to identify that information. + """ + if cls._process_name is not None: + return cls._process_name + + # Get process name + process_name = os.environ.get("AVALON_APP_NAME") + if not process_name: + try: + import psutil + process = psutil.Process(os.getpid()) + process_name = process.name() + + except ImportError: + pass + + if not process_name: + process_name = os.path.basename(sys.executable) + + cls._process_name = process_name + return cls._process_name + + @classmethod + def bootstrap_mongo_log(cls): + """Prepare mongo logging.""" + if cls.bootstraped: + return + + if not cls.initialized: + cls.initialize() + + if not cls.use_mongo_logging: + return + + client = log4mongo.handlers._connection + if not client: + client = cls.get_log_mongo_connection() + # Set the client inside log4mongo handlers to not create another + # mongo db connection. + log4mongo.handlers._connection = client + + logdb = client[cls.log_database_name] + + collist = logdb.list_collection_names() + if cls.log_collection_name not in collist: + logdb.create_collection( + cls.log_collection_name, + capped=True, + max=5000, + size=1073741824 + ) + cls.bootstraped = True + + @classmethod + def get_log_mongo_connection(cls): + """Mongo connection that allows to get to log collection. + + This is implemented to prevent multiple connections to mongo from same + process. + """ + if not cls.initialized: + cls.initialize() + + return PypeMongoConnection.get_mongo_client(cls.log_mongo_url) def timeit(method): diff --git a/pype/lib/plugin_tools.py b/pype/lib/plugin_tools.py index 13d311d96c..c39c9401c3 100644 --- a/pype/lib/plugin_tools.py +++ b/pype/lib/plugin_tools.py @@ -5,6 +5,9 @@ import inspect import logging import re import json +import tempfile + +from .execute import run_subprocess from pype.settings import get_project_settings @@ -134,3 +137,115 @@ def get_background_layers(file_url): layer.get("filename")). replace("\\", "/")) return layers + + +def oiio_supported(): + """ + Checks if oiiotool is configured for this platform. + + Expects full path to executable. + + 'should_decompress' will throw exception if configured, + but not present or not working. + Returns: + (bool) + """ + oiio_path = os.getenv("PYPE_OIIO_PATH", "") + if not oiio_path or not os.path.exists(oiio_path): + log.debug("OIIOTool is not configured or not present at {}". + format(oiio_path)) + return False + + return True + + +def decompress(target_dir, file_url, + input_frame_start=None, input_frame_end=None, log=None): + """ + Decompresses DWAA 'file_url' .exr to 'target_dir'. + + Creates uncompressed files in 'target_dir', they need to be cleaned. + + File url could be for single file or for a sequence, in that case + %0Xd will be as a placeholder for frame number AND input_frame* will + be filled. + In that case single oiio command with '--frames' will be triggered for + all frames, this should be faster then looping and running sequentially + + Args: + target_dir (str): extended from stagingDir + file_url (str): full urls to source file (with or without %0Xd) + input_frame_start (int) (optional): first frame + input_frame_end (int) (optional): last frame + log (Logger) (optional): pype logger + """ + is_sequence = input_frame_start is not None and \ + input_frame_end is not None and \ + (int(input_frame_end) > int(input_frame_start)) + + oiio_cmd = [] + oiio_cmd.append(os.getenv("PYPE_OIIO_PATH")) + + oiio_cmd.append("--compression none") + + base_file_name = os.path.basename(file_url) + oiio_cmd.append(file_url) + + if is_sequence: + oiio_cmd.append("--frames {}-{}".format(input_frame_start, + input_frame_end)) + + oiio_cmd.append("-o") + oiio_cmd.append(os.path.join(target_dir, base_file_name)) + + subprocess_exr = " ".join(oiio_cmd) + + if not log: + log = logging.getLogger(__name__) + + log.debug("Decompressing {}".format(subprocess_exr)) + run_subprocess( + subprocess_exr, shell=True, logger=log + ) + + +def get_decompress_dir(): + """ + Creates temporary folder for decompressing. + Its local, in case of farm it is 'local' to the farm machine. + + Should be much faster, needs to be cleaned up later. + """ + return os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) + + +def should_decompress(file_url): + """ + Tests that 'file_url' is compressed with DWAA. + + Uses 'oiio_supported' to check that OIIO tool is available for this + platform. + + Shouldn't throw exception as oiiotool is guarded by check function. + Currently implemented this way as there is no support for Mac and Linux + In the future, it should be more strict and throws exception on + misconfiguration. + + Args: + file_url (str): path to rendered file (in sequence it would be + first file, if that compressed it is expected that whole seq + will be too) + Returns: + (bool): 'file_url' is DWAA compressed and should be decompressed + and we can decompress (oiiotool supported) + """ + if oiio_supported(): + output = run_subprocess([ + os.getenv("PYPE_OIIO_PATH"), + "--info", "-v", file_url]) + return "compression: \"dwaa\"" in output or \ + "compression: \"dwab\"" in output + + return False diff --git a/pype/lib/terminal.py b/pype/lib/terminal.py index 043869130a..51b0bcebd6 100644 --- a/pype/lib/terminal.py +++ b/pype/lib/terminal.py @@ -11,6 +11,8 @@ # ..---===[[ PyP3 Setup ]]===---... # import re +import time +import threading class Terminal: @@ -24,6 +26,8 @@ class Terminal: # Is Terminal initialized _initialized = False + # Thread lock for initialization to avoid race conditions + _init_lock = threading.Lock() # Use colorized output use_colors = True # Output message replacements mapping - set on initialization @@ -39,16 +43,17 @@ class Terminal: Then tries to import python module that do the colors magic and create it's terminal object. Colorized output is not used if import of python module or terminal object creation fails. - """ - # Mark that Terminal's initialization was already triggered - Terminal._initialized = True - from . import env_value_to_bool + Set `_initialized` attribute to `True` when is done. + """ + + from pype.lib import env_value_to_bool use_colors = env_value_to_bool( "PYPE_LOG_NO_COLORS", default=Terminal.use_colors ) if not use_colors: Terminal.use_colors = use_colors + Terminal._initialized = True return try: @@ -59,10 +64,11 @@ class Terminal: except Exception: # Do not use colors if crashed Terminal.use_colors = False - Terminal.echo( + print( "Module `blessed` failed on import or terminal creation." " Pype terminal won't use colors." ) + Terminal._initialized = True return # shortcuts for blessed codes @@ -117,6 +123,8 @@ class Terminal: Terminal._Y = _Y Terminal._W = _W + Terminal._initialized = True + @staticmethod def _multiple_replace(text, adict): """Replace multiple tokens defined in dict. @@ -169,8 +177,18 @@ class Terminal: """ T = Terminal + # Initialize if not yet initialized and use thread lock to avoid race + # condition issues if not T._initialized: - T._initialize() + # Check if lock is already locked to be sure `_initialize` is not + # executed multiple times + if not T._init_lock.locked(): + with T._init_lock: + T._initialize() + else: + # If lock is locked wait until is finished + while T._init_lock.locked(): + time.sleep(0.1) # if we dont want colors, just print raw message if not T.use_colors: diff --git a/pype/modules/__init__.py b/pype/modules/__init__.py index 4f76dc2df0..93fc92f9d5 100644 --- a/pype/modules/__init__.py +++ b/pype/modules/__init__.py @@ -5,6 +5,7 @@ from .base import ( ITrayAction, ITrayService, IPluginPaths, + ILaunchHookPaths, ModulesManager, TrayModulesManager ) @@ -32,8 +33,9 @@ from .ftrack import ( IFtrackEventHandlerPaths ) from .clockify import ClockifyModule -from .logging import LoggingModule +from .log_viewer import LogViewModule from .muster import MusterModule +from .deadline import DeadlineModule from .standalonepublish_action import StandAlonePublishAction from .websocket_server import WebsocketModule from .sync_server import SyncServer @@ -45,6 +47,7 @@ __all__ = ( "ITrayAction", "ITrayService", "IPluginPaths", + "ILaunchHookPaths", "ModulesManager", "TrayModulesManager", @@ -70,8 +73,9 @@ __all__ = ( "ClockifyModule", "IdleManager", - "LoggingModule", + "LogViewModule", "MusterModule", + "DeadlineModule", "StandAlonePublishAction", "WebsocketModule", diff --git a/pype/modules/base.py b/pype/modules/base.py index 525320f1a7..ad0fecb8f7 100644 --- a/pype/modules/base.py +++ b/pype/modules/base.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- """Base class for Pype Modules.""" +import time import inspect import logging +import collections from uuid import uuid4 from abc import ABCMeta, abstractmethod import six @@ -84,6 +86,19 @@ class IPluginPaths: pass +@six.add_metaclass(ABCMeta) +class ILaunchHookPaths: + """Module has launch hook paths to return. + + Expected result is list of paths. + ["path/to/launch_hooks_dir"] + """ + + @abstractmethod + def get_launch_hook_paths(self): + pass + + @six.add_metaclass(ABCMeta) class ITrayModule: """Module has special procedures when used in Pype Tray. @@ -255,12 +270,17 @@ class ITrayService(ITrayModule): class ModulesManager: + # Helper attributes for report + _report_total_key = "Total" + def __init__(self): self.log = logging.getLogger(self.__class__.__name__) self.modules = [] self.modules_by_id = {} self.modules_by_name = {} + # For report of time consumption + self._report = {} self.initialize_modules() self.connect_modules() @@ -270,6 +290,11 @@ class ModulesManager: self.log.debug("*** Pype modules initialization.") # Prepare settings for modules modules_settings = get_system_settings()["modules"] + + report = {} + time_start = time.time() + prev_start_time = time_start + # Go through globals in `pype.modules` for name in dir(pype.modules): modules_item = getattr(pype.modules, name, None) @@ -308,17 +333,28 @@ class ModulesManager: enabled_str = " " self.log.debug("[{}] {}".format(enabled_str, name)) + now = time.time() + report[module.__class__.__name__] = now - prev_start_time + prev_start_time = now + except Exception: self.log.warning( "Initialization of module {} failed.".format(name), exc_info=True ) + if self._report is not None: + report[self._report_total_key] = time.time() - time_start + self._report["Initialization"] = report + def connect_modules(self): """Trigger connection with other enabled modules. Modules should handle their interfaces in `connect_with_modules`. """ + report = {} + time_start = time.time() + prev_start_time = time_start enabled_modules = self.get_enabled_modules() self.log.debug("Has {} enabled modules.".format(len(enabled_modules))) for module in enabled_modules: @@ -330,6 +366,14 @@ class ModulesManager: exc_info=True ) + now = time.time() + report[module.__class__.__name__] = now - prev_start_time + prev_start_time = now + + if self._report is not None: + report[self._report_total_key] = time.time() - time_start + self._report["Connect modules"] = report + def get_enabled_modules(self): """Enabled modules initialized by the manager. @@ -421,6 +465,156 @@ class ModulesManager: ).format(expected_keys, " | ".join(msg_items))) return output + def collect_launch_hook_paths(self): + """Helper to collect hooks from modules inherited ILaunchHookPaths. + + Returns: + list: Paths to launch hook directories. + """ + str_type = type("") + expected_types = (list, tuple, set) + + output = [] + for module in self.get_enabled_modules(): + # Skip module that do not inherit from `ILaunchHookPaths` + if not isinstance(module, ILaunchHookPaths): + continue + + hook_paths = module.get_launch_hook_paths() + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, str_type): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + + def print_report(self): + """Print out report of time spent on modules initialization parts. + + Reporting is not automated must be implemented for each initialization + part separatelly. Reports must be stored to `_report` attribute. + Print is skipped if `_report` is empty. + + Attribute `_report` is dictionary where key is "label" describing + the processed part and value is dictionary where key is module's + class name and value is time delta of it's processing. + + It is good idea to add total time delta on processed part under key + which is defined in attribute `_report_total_key`. By default has value + `"Total"` but use the attribute please. + + ```javascript + { + "Initialization": { + "FtrackModule": 0.003, + ... + "Total": 1.003, + }, + ... + } + ``` + """ + if not self._report: + return + + available_col_names = set() + for module_names in self._report.values(): + available_col_names |= set(module_names.keys()) + + # Prepare ordered dictionary for columns + cols = collections.OrderedDict() + # Add module names to first columnt + cols["Module name"] = list(sorted( + module.__class__.__name__ + for module in self.modules + if module.__class__.__name__ in available_col_names + )) + # Add total key (as last module) + cols["Module name"].append(self._report_total_key) + + # Add columns from report + for label in self._report.keys(): + cols[label] = [] + + total_module_times = {} + for module_name in cols["Module name"]: + total_module_times[module_name] = 0 + + for label, reported in self._report.items(): + for module_name in cols["Module name"]: + col_time = reported.get(module_name) + if col_time is None: + cols[label].append("N/A") + continue + cols[label].append("{:.3f}".format(col_time)) + total_module_times[module_name] += col_time + + # Add to also total column that should sum the row + cols[self._report_total_key] = [] + for module_name in cols["Module name"]: + cols[self._report_total_key].append( + "{:.3f}".format(total_module_times[module_name]) + ) + + # Prepare column widths and total row count + # - column width is by + col_widths = {} + total_rows = None + for key, values in cols.items(): + if total_rows is None: + total_rows = 1 + len(values) + max_width = len(key) + for value in values: + value_length = len(value) + if value_length > max_width: + max_width = value_length + col_widths[key] = max_width + + rows = [] + for _idx in range(total_rows): + rows.append([]) + + for key, values in cols.items(): + width = col_widths[key] + idx = 0 + rows[idx].append(key.ljust(width)) + for value in values: + idx += 1 + rows[idx].append(value.ljust(width)) + + filler_parts = [] + for width in col_widths.values(): + filler_parts.append(width * "-") + filler = "+".join(filler_parts) + + formatted_rows = [filler] + last_row_idx = len(rows) - 1 + for idx, row in enumerate(rows): + # Add filler before last row + if idx == last_row_idx: + formatted_rows.append(filler) + + formatted_rows.append("|".join(row)) + + # Add filler after first row + if idx == 0: + formatted_rows.append(filler) + + # Join rows with newline char and add new line at the end + output = "\n".join(formatted_rows) + "\n" + print(output) + class TrayModulesManager(ModulesManager): # Define order of modules in menu @@ -442,6 +636,7 @@ class TrayModulesManager(ModulesManager): self.modules = [] self.modules_by_id = {} self.modules_by_name = {} + self._report = {} def initialize(self, tray_menu): self.initialize_modules() @@ -457,6 +652,9 @@ class TrayModulesManager(ModulesManager): return output def tray_init(self): + report = {} + time_start = time.time() + prev_start_time = time_start for module in self.get_enabled_tray_modules(): try: module.tray_init() @@ -469,6 +667,14 @@ class TrayModulesManager(ModulesManager): exc_info=True ) + now = time.time() + report[module.__class__.__name__] = now - prev_start_time + prev_start_time = now + + if self._report is not None: + report[self._report_total_key] = time.time() - time_start + self._report["Tray init"] = report + def tray_menu(self, tray_menu): ordered_modules = [] enabled_by_name = { @@ -482,6 +688,9 @@ class TrayModulesManager(ModulesManager): ordered_modules.append(module_by_name) ordered_modules.extend(enabled_by_name.values()) + report = {} + time_start = time.time() + prev_start_time = time_start for module in ordered_modules: if not module.tray_initialized: continue @@ -497,8 +706,18 @@ class TrayModulesManager(ModulesManager): ), exc_info=True ) + now = time.time() + report[module.__class__.__name__] = now - prev_start_time + prev_start_time = now + + if self._report is not None: + report[self._report_total_key] = time.time() - time_start + self._report["Tray menu"] = report def start_modules(self): + report = {} + time_start = time.time() + prev_start_time = time_start for module in self.get_enabled_tray_modules(): if not module.tray_initialized: if isinstance(module, ITrayService): @@ -514,6 +733,13 @@ class TrayModulesManager(ModulesManager): ), exc_info=True ) + now = time.time() + report[module.__class__.__name__] = now - prev_start_time + prev_start_time = now + + if self._report is not None: + report[self._report_total_key] = time.time() - time_start + self._report["Modules start"] = report def on_exit(self): for module in self.get_enabled_tray_modules(): diff --git a/pype/modules/clockify/launcher_actions/ClockifyStart.py b/pype/modules/clockify/launcher_actions/ClockifyStart.py index f97360662f..d02005e2de 100644 --- a/pype/modules/clockify/launcher_actions/ClockifyStart.py +++ b/pype/modules/clockify/launcher_actions/ClockifyStart.py @@ -3,7 +3,7 @@ from pype.api import Logger from pype.modules.clockify.clockify_api import ClockifyAPI -log = Logger().get_logger(__name__, "clockify_start") +log = Logger().get_logger(__name__) class ClockifyStart(api.Action): diff --git a/pype/modules/clockify/launcher_actions/ClockifySync.py b/pype/modules/clockify/launcher_actions/ClockifySync.py index 422a346023..5f0e57b8c8 100644 --- a/pype/modules/clockify/launcher_actions/ClockifySync.py +++ b/pype/modules/clockify/launcher_actions/ClockifySync.py @@ -1,7 +1,7 @@ from avalon import api, io from pype.modules.clockify.clockify_api import ClockifyAPI from pype.api import Logger -log = Logger().get_logger(__name__, "clockify_sync") +log = Logger().get_logger(__name__) class ClockifySync(api.Action): diff --git a/pype/modules/deadline/deadline_module.py b/pype/modules/deadline/deadline_module.py index 6de68c390f..ba920f7f13 100644 --- a/pype/modules/deadline/deadline_module.py +++ b/pype/modules/deadline/deadline_module.py @@ -1,7 +1,9 @@ -from .. import PypeModule +import os +from pype.modules import ( + PypeModule, IPluginPaths) -class DeadlineModule(PypeModule): +class DeadlineModule(PypeModule, IPluginPaths): name = "deadline" def initialize(self, modules_settings): @@ -18,3 +20,10 @@ class DeadlineModule(PypeModule): def connect_with_modules(self, *_a, **_kw): return + + def get_plugin_paths(self): + """Deadline plugin paths.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + return { + "publish": [os.path.join(current_dir, "plugins", "publish")] + } diff --git a/pype/plugins/aftereffects/publish/submit_aftereffects_deadline.py b/pype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py similarity index 100% rename from pype/plugins/aftereffects/publish/submit_aftereffects_deadline.py rename to pype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/modules/deadline/plugins/publish/submit_maya_deadline.py similarity index 100% rename from pype/plugins/maya/publish/submit_maya_deadline.py rename to pype/modules/deadline/plugins/publish/submit_maya_deadline.py diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/modules/deadline/plugins/publish/submit_nuke_deadline.py similarity index 100% rename from pype/plugins/nuke/publish/submit_nuke_deadline.py rename to pype/modules/deadline/plugins/publish/submit_nuke_deadline.py diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/modules/deadline/plugins/publish/submit_publish_job.py similarity index 100% rename from pype/plugins/global/publish/submit_publish_job.py rename to pype/modules/deadline/plugins/publish/submit_publish_job.py diff --git a/pype/plugins/maya/publish/validate_deadline_connection.py b/pype/modules/deadline/plugins/publish/validate_deadline_connection.py similarity index 98% rename from pype/plugins/maya/publish/validate_deadline_connection.py rename to pype/modules/deadline/plugins/publish/validate_deadline_connection.py index 0733c3badf..1c49e68ee1 100644 --- a/pype/plugins/maya/publish/validate_deadline_connection.py +++ b/pype/modules/deadline/plugins/publish/validate_deadline_connection.py @@ -10,7 +10,7 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin): label = "Validate Deadline Web Service" order = pyblish.api.ValidatorOrder - hosts = ["maya"] + hosts = ["maya", "nuke"] families = ["renderlayer"] def process(self, context): diff --git a/pype/modules/ftrack/__init__.py b/pype/modules/ftrack/__init__.py index c02b0fca19..4fb427f13a 100644 --- a/pype/modules/ftrack/__init__.py +++ b/pype/modules/ftrack/__init__.py @@ -1,6 +1,7 @@ from .ftrack_module import ( FtrackModule, - IFtrackEventHandlerPaths + IFtrackEventHandlerPaths, + FTRACK_MODULE_DIR ) from . import ftrack_server from .ftrack_server import FtrackServer, check_ftrack_url @@ -9,6 +10,7 @@ from .lib import BaseHandler, BaseEvent, BaseAction, ServerAction __all__ = ( "FtrackModule", "IFtrackEventHandlerPaths", + "FTRACK_MODULE_DIR", "ftrack_server", "FtrackServer", diff --git a/pype/modules/ftrack/actions/action_clean_hierarchical_attributes.py b/pype/modules/ftrack/actions/action_clean_hierarchical_attributes.py index dc3a638192..f9824ec8ea 100644 --- a/pype/modules/ftrack/actions/action_clean_hierarchical_attributes.py +++ b/pype/modules/ftrack/actions/action_clean_hierarchical_attributes.py @@ -9,7 +9,6 @@ class CleanHierarchicalAttrsAction(BaseAction): label = "Pype Admin" variant = "- Clean hierarchical custom attributes" description = "Unset empty hierarchical attribute values." - role_list = ["Pypeclub", "Administrator", "Project Manager"] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") all_project_entities_query = ( @@ -20,12 +19,17 @@ class CleanHierarchicalAttrsAction(BaseAction): "select value, entity_id from CustomAttributeValue " "where entity_id in ({}) and configuration_id is \"{}\"" ) + settings_key = "clean_hierarchical_attr" def discover(self, session, entities, event): """Show only on project entity.""" - if len(entities) == 1 and entities[0].entity_type.lower() == "project": - return True - return False + if ( + len(entities) != 1 + or entities[0].entity_type.lower() != "project" + ): + return False + + return self.valid_roles(session, entities, event) def launch(self, session, entities, event): project = entities[0] diff --git a/pype/modules/ftrack/actions/action_create_cust_attrs.py b/pype/modules/ftrack/actions/action_create_cust_attrs.py index 9d6c16b556..ae040fd630 100644 --- a/pype/modules/ftrack/actions/action_create_cust_attrs.py +++ b/pype/modules/ftrack/actions/action_create_cust_attrs.py @@ -94,8 +94,8 @@ Example: "avalon_auto_sync": { "label": "Avalon auto-sync", "type": "boolean", - "write_security_role": ["API", "Administrator"], - "read_security_role": ["API", "Administrator"] + "write_security_roles": ["API", "Administrator"], + "read_security_roles": ["API", "Administrator"] } }, "is_hierarchical": { @@ -131,13 +131,16 @@ class CustomAttributes(BaseAction): variant = '- Create/Update Avalon Attributes' #: Action description. description = 'Creates Avalon/Mongo ID for double check' - #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") + settings_key = "create_update_attributes" required_keys = ("key", "label", "type") - presetable_keys = ("default", "write_security_role", "read_security_role") + presetable_keys = ( + "default", + "write_security_roles", + "read_security_roles" + ) hierarchical_key = "is_hierarchical" type_posibilities = ( @@ -150,7 +153,7 @@ class CustomAttributes(BaseAction): Validation - action is only for Administrators ''' - return True + return self.valid_roles(session, entities, event) def launch(self, session, entities, event): # JOB SETTINGS @@ -212,17 +215,17 @@ class CustomAttributes(BaseAction): self.groups = {} self.ftrack_settings = get_system_settings()["modules"]["ftrack"] - self.attrs_presets = self.prepare_attribute_pressets() + self.attrs_settings = self.prepare_attribute_settings() - def prepare_attribute_pressets(self): + def prepare_attribute_settings(self): output = {} - attr_presets = self.ftrack_settings["custom_attributes"] - for entity_type, preset in attr_presets.items(): + attr_settings = self.ftrack_settings["custom_attributes"] + for entity_type, attr_data in attr_settings.items(): # Lower entity type entity_type = entity_type.lower() # Just store if entity type is not "task" if entity_type != "task": - output[entity_type] = preset + output[entity_type] = attr_data continue # Prepare empty dictionary for entity type if not set yet @@ -230,7 +233,7 @@ class CustomAttributes(BaseAction): output[entity_type] = {} # Store presets per lowered object type - for obj_type, _preset in preset.items(): + for obj_type, _preset in attr_data.items(): output[entity_type][obj_type.lower()] = _preset return output @@ -267,14 +270,11 @@ class CustomAttributes(BaseAction): def create_hierarchical_mongo_attr(self, session, event): # Set security roles for attribute - default_role_list = ("API", "Administrator", "Pypeclub") data = { "key": CUST_ATTR_ID_KEY, "label": "Avalon/Mongo ID", "type": "text", "default": "", - "write_security_roles": default_role_list, - "read_security_roles": default_role_list, "group": CUST_ATTR_GROUP, "is_hierarchical": True, "config": {"markdown": False} @@ -497,21 +497,20 @@ class CustomAttributes(BaseAction): else: entity_key = attr_data["entity_type"] - entity_presets = self.attrs_presets.get(entity_key) or {} + entity_settings = self.attrs_settings.get(entity_key) or {} if entity_key.lower() == "task": object_type = attr_data["object_type"] - entity_presets = entity_presets.get(object_type.lower()) or {} + entity_settings = entity_settings.get(object_type.lower()) or {} - key_presets = entity_presets.get(attr_key) or {} - - for key, value in key_presets.items(): + key_settings = entity_settings.get(attr_key) or {} + for key, value in key_settings.items(): if key in self.presetable_keys and value: output[key] = value return output def process_attr_data(self, cust_attr_data, event): - attr_presets = self.presets_for_attr_data(cust_attr_data) - cust_attr_data.update(attr_presets) + attr_settings = self.presets_for_attr_data(cust_attr_data) + cust_attr_data.update(attr_settings) try: data = {} @@ -779,9 +778,9 @@ class CustomAttributes(BaseAction): roles_read = attr["read_security_roles"] if "write_security_roles" in attr: roles_write = attr["write_security_roles"] - output['read_security_roles'] = self.get_security_roles(roles_read) - output['write_security_roles'] = self.get_security_roles(roles_write) + output["read_security_roles"] = self.get_security_roles(roles_read) + output["write_security_roles"] = self.get_security_roles(roles_write) return output def get_entity_type(self, attr): diff --git a/pype/modules/ftrack/actions/action_delete_asset.py b/pype/modules/ftrack/actions/action_delete_asset.py index 4720273c81..3bdbbe2470 100644 --- a/pype/modules/ftrack/actions/action_delete_asset.py +++ b/pype/modules/ftrack/actions/action_delete_asset.py @@ -18,8 +18,8 @@ class DeleteAssetSubset(BaseAction): #: Action description. description = "Removes from Avalon with all childs and asset from Ftrack" icon = statics_icon("ftrack", "action_icons", "DeleteAsset.svg") - #: roles that are allowed to register this action - role_list = ["Pypeclub", "Administrator", "Project Manager"] + + settings_key = "delete_asset_subset" #: Db connection dbcon = AvalonMongoDB() @@ -32,17 +32,21 @@ class DeleteAssetSubset(BaseAction): """ Validation """ task_ids = [] for ent_info in event["data"]["selection"]: - entType = ent_info.get("entityType", "") - if entType == "task": + if ent_info.get("entityType") == "task": task_ids.append(ent_info["entityId"]) + is_valid = False for entity in entities: - ftrack_id = entity["id"] - if ftrack_id not in task_ids: - continue - if entity.entity_type.lower() != "task": - return True - return False + if ( + entity["id"] in task_ids + and entity.entity_type.lower() != "task" + ): + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def _launch(self, event): try: diff --git a/pype/modules/ftrack/actions/action_delete_old_versions.py b/pype/modules/ftrack/actions/action_delete_old_versions.py index 31d15da9e5..e1c1e173a3 100644 --- a/pype/modules/ftrack/actions/action_delete_old_versions.py +++ b/pype/modules/ftrack/actions/action_delete_old_versions.py @@ -21,7 +21,6 @@ class DeleteOldVersions(BaseAction): "Delete files from older publishes so project can be" " archived with only lates versions." ) - role_list = ["Pypeclub", "Project Manager", "Administrator"] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") dbcon = AvalonMongoDB() @@ -31,13 +30,16 @@ class DeleteOldVersions(BaseAction): sequence_splitter = "__sequence_splitter__" def discover(self, session, entities, event): - ''' Validation ''' - selection = event["data"].get("selection") or [] - for entity in selection: - entity_type = (entity.get("entityType") or "").lower() - if entity_type == "assetversion": - return True - return False + """ Validation. """ + is_valid = False + for entity in entities: + if entity.entity_type.lower() == "assetversion": + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def interface(self, session, entities, event): # TODO Add roots existence validation diff --git a/pype/modules/ftrack/actions/action_delivery.py b/pype/modules/ftrack/actions/action_delivery.py index 853fe64ec7..e9e939bb47 100644 --- a/pype/modules/ftrack/actions/action_delivery.py +++ b/pype/modules/ftrack/actions/action_delivery.py @@ -23,6 +23,7 @@ class Delivery(BaseAction): description = "Deliver data to client" role_list = ["Pypeclub", "Administrator", "Project manager"] icon = statics_icon("ftrack", "action_icons", "Delivery.svg") + settings_key = "delivery_action" def __init__(self, *args, **kwargs): self.db_con = AvalonMongoDB() @@ -30,11 +31,15 @@ class Delivery(BaseAction): super(Delivery, self).__init__(*args, **kwargs) def discover(self, session, entities, event): + is_valid = False for entity in entities: if entity.entity_type.lower() == "assetversion": - return True + is_valid = True + break - return False + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def interface(self, session, entities, event): if event["data"].get("values", {}): diff --git a/pype/modules/ftrack/actions/action_job_killer.py b/pype/modules/ftrack/actions/action_job_killer.py index cb193b88ce..1ddd1383a7 100644 --- a/pype/modules/ftrack/actions/action_job_killer.py +++ b/pype/modules/ftrack/actions/action_job_killer.py @@ -13,13 +13,12 @@ class JobKiller(BaseAction): #: Action description. description = 'Killing selected running jobs' #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") + settings_key = "job_killer" def discover(self, session, entities, event): ''' Validation ''' - - return True + return self.valid_roles(session, entities, event) def interface(self, session, entities, event): if not event['data'].get('values', {}): diff --git a/pype/modules/ftrack/actions/action_prepare_project.py b/pype/modules/ftrack/actions/action_prepare_project.py index 98493f65c7..3a955067d8 100644 --- a/pype/modules/ftrack/actions/action_prepare_project.py +++ b/pype/modules/ftrack/actions/action_prepare_project.py @@ -16,22 +16,23 @@ class PrepareProject(BaseAction): #: Action description. description = 'Set basic attributes on the project' #: roles that are allowed to register this action - role_list = ["Pypeclub", "Administrator", "Project manager"] icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg") + settings_key = "prepare_project" + # Key to store info about trigerring create folder structure create_project_structure_key = "create_folder_structure" item_splitter = {'type': 'label', 'value': '---'} def discover(self, session, entities, event): ''' Validation ''' - if len(entities) != 1: + if ( + len(entities) != 1 + or entities[0].entity_type.lower() != "project" + ): return False - if entities[0].entity_type.lower() != "project": - return False - - return True + return self.valid_roles(session, entities, event) def interface(self, session, entities, event): if event['data'].get('values', {}): diff --git a/pype/modules/ftrack/actions/action_seed.py b/pype/modules/ftrack/actions/action_seed.py index 2610a25024..549afc660c 100644 --- a/pype/modules/ftrack/actions/action_seed.py +++ b/pype/modules/ftrack/actions/action_seed.py @@ -15,7 +15,6 @@ class SeedDebugProject(BaseAction): #: priority priority = 100 #: roles that are allowed to register this action - role_list = ["Pypeclub"] icon = statics_icon("ftrack", "action_icons", "SeedProject.svg") # Asset names which will be created in `Assets` entity @@ -58,9 +57,12 @@ class SeedDebugProject(BaseAction): existing_projects = None new_project_item = "< New Project >" current_project_item = "< Current Project >" + settings_key = "seed_project" def discover(self, session, entities, event): ''' Validation ''' + if not self.valid_roles(session, entities, event): + return False return True def interface(self, session, entities, event): diff --git a/pype/modules/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/modules/ftrack/actions/action_store_thumbnails_to_avalon.py index 6df8271381..84f857e37a 100644 --- a/pype/modules/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/modules/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -21,8 +21,8 @@ class StoreThumbnailsToAvalon(BaseAction): # Action description description = 'Test action' # roles that are allowed to register this action - role_list = ["Pypeclub", "Administrator", "Project Manager"] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") + settings_key = "store_thubmnail_to_avalon" thumbnail_key = "AVALON_THUMBNAIL_ROOT" @@ -31,10 +31,15 @@ class StoreThumbnailsToAvalon(BaseAction): super(StoreThumbnailsToAvalon, self).__init__(*args, **kwargs) def discover(self, session, entities, event): + is_valid = False for entity in entities: if entity.entity_type.lower() == "assetversion": - return True - return False + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def launch(self, session, entities, event): user = session.query( diff --git a/pype/modules/ftrack/actions/action_sync_to_avalon.py b/pype/modules/ftrack/actions/action_sync_to_avalon.py index 6077511092..b86b469d1c 100644 --- a/pype/modules/ftrack/actions/action_sync_to_avalon.py +++ b/pype/modules/ftrack/actions/action_sync_to_avalon.py @@ -41,20 +41,26 @@ class SyncToAvalonLocal(BaseAction): #: priority priority = 200 #: roles that are allowed to register this action - role_list = ["Pypeclub"] icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg") + settings_key = "sync_to_avalon_local" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.entities_factory = SyncEntitiesFactory(self.log, self.session) def discover(self, session, entities, event): - ''' Validation ''' + """ Validate selection. """ + is_valid = False for ent in event["data"]["selection"]: # Ignore entities that are not tasks or projects if ent["entityType"].lower() in ["show", "task"]: - return True - return False + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def launch(self, session, in_entities, event): time_start = time.time() diff --git a/pype/modules/ftrack/actions/action_thumbnail_to_childern.py b/pype/modules/ftrack/actions/action_thumbnail_to_childern.py index 604688d221..b90dfa027c 100644 --- a/pype/modules/ftrack/actions/action_thumbnail_to_childern.py +++ b/pype/modules/ftrack/actions/action_thumbnail_to_childern.py @@ -15,11 +15,9 @@ class ThumbToChildren(BaseAction): icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg") def discover(self, session, entities, event): - ''' Validation ''' - - if (len(entities) != 1 or entities[0].entity_type in ['Project']): + """Show only on project.""" + if (len(entities) != 1 or entities[0].entity_type in ["Project"]): return False - return True def launch(self, session, entities, event): diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 87d9d5afe9..6df27682e0 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -59,18 +59,22 @@ class PushHierValuesToNonHier(ServerAction): ) # configurable - interest_entity_types = ["Shot"] - interest_attributes = ["frameStart", "frameEnd"] - role_list = ["Pypeclub", "Administrator", "Project Manager"] + settings_key = "sync_hier_entity_attributes" + settings_enabled_key = "action_enabled" def discover(self, session, entities, event): """ Validation """ # Check if selection is valid + is_valid = False for ent in event["data"]["selection"]: # Ignore entities that are not tasks or projects if ent["entityType"].lower() in ("task", "show"): - return True - return False + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid def launch(self, session, entities, event): self.log.debug("{}: Creating job".format(self.label)) @@ -88,7 +92,7 @@ class PushHierValuesToNonHier(ServerAction): session.commit() try: - result = self.propagate_values(session, entities) + result = self.propagate_values(session, event, entities) job["status"] = "done" session.commit() @@ -111,9 +115,9 @@ class PushHierValuesToNonHier(ServerAction): job["status"] = "failed" session.commit() - def attrs_configurations(self, session, object_ids): + def attrs_configurations(self, session, object_ids, interest_attributes): attrs = session.query(self.cust_attrs_query.format( - self.join_query_keys(self.interest_attributes), + self.join_query_keys(interest_attributes), self.join_query_keys(object_ids) )).all() @@ -129,7 +133,14 @@ class PushHierValuesToNonHier(ServerAction): output[obj_id].append(attr) return output, hiearchical - def propagate_values(self, session, selected_entities): + def propagate_values(self, session, event, selected_entities): + ftrack_settings = self.get_ftrack_settings( + session, event, selected_entities + ) + action_settings = ( + ftrack_settings[self.settings_frack_subkey][self.settings_key] + ) + project_entity = self.get_project_from_entity(selected_entities[0]) selected_ids = [entity["id"] for entity in selected_entities] @@ -138,7 +149,7 @@ class PushHierValuesToNonHier(ServerAction): )) interest_entity_types = tuple( ent_type.lower() - for ent_type in self.interest_entity_types + for ent_type in action_settings["interest_entity_types"] ) all_object_types = session.query("ObjectType").all() object_types_by_low_name = { @@ -158,9 +169,10 @@ class PushHierValuesToNonHier(ServerAction): for obj_type in destination_object_types ) + interest_attributes = action_settings["interest_attributes"] # Find custom attributes definitions attrs_by_obj_id, hier_attrs = self.attrs_configurations( - session, destination_object_type_ids + session, destination_object_type_ids, interest_attributes ) # Filter destination object types if they have any object specific # custom attribute diff --git a/pype/modules/ftrack/events/event_next_task_update.py b/pype/modules/ftrack/events/event_next_task_update.py index 025bac0d07..284cff886b 100644 --- a/pype/modules/ftrack/events/event_next_task_update.py +++ b/pype/modules/ftrack/events/event_next_task_update.py @@ -1,19 +1,79 @@ -import operator import collections from pype.modules.ftrack import BaseEvent class NextTaskUpdate(BaseEvent): - def filter_entities_info(self, session, event): + """Change status on following Task. + + Handler cares about changes of status id on Task entities. When new status + has state "Done" it will try to find following task and change it's status. + It is expected following task should be marked as "Ready to work on". + + By default all tasks with same task type must have state "Done" to do any + changes. And when all tasks with same task type are "done" it will change + statuses on all tasks with next task type. + + # Enable + Handler is based on settings, handler can be turned on/off with "enabled" + key. + ``` + "enabled": True + ``` + + # Status mappings + Must have set mappings of new statuses: + ``` + "mapping": { + # From -> To + "Not Ready": "Ready", + ... + } + ``` + + If current status name is not found then status change is skipped. + + # Ignored statuses + These status names are skipping as they would be in "Done" state. Best + example is status "Omitted" which in most of cases is "Blocked" state but + it will never change. + ``` + "ignored_statuses": [ + "Omitted", + ... + ] + ``` + + # Change statuses sorted by task type and by name + Change behaviour of task type batching. Statuses are not checked and set + by batches of tasks by Task type but one by one. Tasks are sorted by + Task type and then by name if all previous tasks are "Done" the following + will change status. + ``` + "name_sorting": True + ``` + """ + settings_key = "next_task_update" + + def launch(self, session, event): + '''Propagates status from version to task when changed''' + + filtered_entities_info = self.filter_entities_info(event) + if not filtered_entities_info: + return + + for project_id, entities_info in filtered_entities_info.items(): + self.process_by_project(session, event, project_id, entities_info) + + def filter_entities_info(self, event): # Filter if event contain relevant data entities_info = event["data"].get("entities") if not entities_info: return - first_filtered_entities = [] + filtered_entities_info = collections.defaultdict(list) for entity_info in entities_info: - # Care only about tasks - if entity_info.get("entityType") != "task": + # Care only about Task `entity_type` + if entity_info.get("entity_type") != "Task": continue # Care only about changes of status @@ -25,204 +85,353 @@ class NextTaskUpdate(BaseEvent): ): continue - first_filtered_entities.append(entity_info) + project_id = None + for parent_info in reversed(entity_info["parents"]): + if parent_info["entityType"] == "show": + project_id = parent_info["entityId"] + break - if not first_filtered_entities: - return first_filtered_entities + if project_id: + filtered_entities_info[project_id].append(entity_info) + return filtered_entities_info - status_ids = [ - entity_info["changes"]["statusid"]["new"] - for entity_info in first_filtered_entities - ] - statuses_by_id = self.get_statuses_by_id( - session, status_ids=status_ids + def process_by_project(self, session, event, project_id, _entities_info): + project_name = self.get_project_name_from_event( + session, event, project_id ) - # Make sure `entity_type` is "Task" - task_object_type = session.query( - "select id, name from ObjectType where name is \"Task\"" - ).one() - - # Care only about tasks having status with state `Done` - filtered_entities = [] - for entity_info in first_filtered_entities: - if entity_info["objectTypeId"] != task_object_type["id"]: - continue - status_id = entity_info["changes"]["statusid"]["new"] - status_entity = statuses_by_id[status_id] - if status_entity["state"]["name"].lower() == "done": - filtered_entities.append(entity_info) - - return filtered_entities - - def get_parents_by_id(self, session, entities_info): - parent_ids = [ - "\"{}\"".format(entity_info["parentId"]) - for entity_info in entities_info - ] - parent_entities = session.query( - "TypedContext where id in ({})".format(", ".join(parent_ids)) - ).all() - - return { - entity["id"]: entity - for entity in parent_entities - } - - def get_tasks_by_id(self, session, parent_ids): - joined_parent_ids = ",".join([ - "\"{}\"".format(parent_id) - for parent_id in parent_ids - ]) - task_entities = session.query( - "Task where parent_id in ({})".format(joined_parent_ids) - ).all() - - return { - entity["id"]: entity - for entity in task_entities - } - - def get_statuses_by_id(self, session, task_entities=None, status_ids=None): - if task_entities is None and status_ids is None: - return {} - - if status_ids is None: - status_ids = [] - for task_entity in task_entities: - status_ids.append(task_entity["status_id"]) - - if not status_ids: - return {} - - status_entities = session.query( - "Status where id in ({})".format(", ".join(status_ids)) - ).all() - - return { - entity["id"]: entity - for entity in status_entities - } - - def get_sorted_task_types(self, session): - data = { - _type: _type.get("sort") - for _type in session.query("Type").all() - if _type.get("sort") is not None - } - - return [ - item[0] - for item in sorted(data.items(), key=operator.itemgetter(1)) - ] - - def launch(self, session, event): - '''Propagates status from version to task when changed''' - - entities_info = self.filter_entities_info(session, event) - if not entities_info: - return - - parents_by_id = self.get_parents_by_id(session, entities_info) - tasks_by_id = self.get_tasks_by_id( - session, tuple(parents_by_id.keys()) + # Load settings + project_settings = self.get_project_settings_from_event( + event, project_name ) - tasks_to_parent_id = collections.defaultdict(list) - for task_entity in tasks_by_id.values(): - tasks_to_parent_id[task_entity["parent_id"]].append(task_entity) - - statuses_by_id = self.get_statuses_by_id(session, tasks_by_id.values()) - - next_status_name = "Ready" - next_status = session.query( - "Status where name is \"{}\"".format(next_status_name) - ).first() - if not next_status: - self.log.warning("Couldn't find status with name \"{}\"".format( - next_status_name + # Load status mapping from presets + event_settings = ( + project_settings["ftrack"]["events"][self.settings_key] + ) + if not event_settings["enabled"]: + self.log.debug("Project \"{}\" has disabled {}.".format( + project_name, self.__class__.__name__ )) return + statuses = session.query("Status").all() + + entities_info = self.filter_by_status_state(_entities_info, statuses) + if not entities_info: + return + + parent_ids = set() + event_task_ids_by_parent_id = collections.defaultdict(list) for entity_info in entities_info: parent_id = entity_info["parentId"] - task_id = entity_info["entityId"] - task_entity = tasks_by_id[task_id] + entity_id = entity_info["entityId"] + parent_ids.add(parent_id) + event_task_ids_by_parent_id[parent_id].append(entity_id) - all_same_type_taks_done = True - for parents_task in tasks_to_parent_id[parent_id]: - if ( - parents_task["id"] == task_id - or parents_task["type_id"] != task_entity["type_id"] - ): - continue + # From now it doesn't matter what was in event data + task_entities = session.query( + ( + "select id, type_id, status_id, parent_id, link from Task" + " where parent_id in ({})" + ).format(self.join_query_keys(parent_ids)) + ).all() - parents_task_status = statuses_by_id[parents_task["status_id"]] - low_status_name = parents_task_status["name"].lower() - # Skip if task's status name "Omitted" - if low_status_name == "omitted": - continue + tasks_by_parent_id = collections.defaultdict(list) + for task_entity in task_entities: + tasks_by_parent_id[task_entity["parent_id"]].append(task_entity) - low_state_name = parents_task_status["state"]["name"].lower() - if low_state_name != "done": - all_same_type_taks_done = False - break + project_entity = session.get("Project", project_id) + self.set_next_task_statuses( + session, + tasks_by_parent_id, + event_task_ids_by_parent_id, + statuses, + project_entity, + event_settings + ) - if not all_same_type_taks_done: - continue + def filter_by_status_state(self, entities_info, statuses): + statuses_by_id = { + status["id"]: status + for status in statuses + } - # Prepare all task types - sorted_task_types = self.get_sorted_task_types(session) - sorted_task_types_len = len(sorted_task_types) + # Care only about tasks having status with state `Done` + filtered_entities_info = [] + for entity_info in entities_info: + status_id = entity_info["changes"]["statusid"]["new"] + status_entity = statuses_by_id[status_id] + if status_entity["state"]["name"].lower() == "done": + filtered_entities_info.append(entity_info) + return filtered_entities_info - from_idx = None - for idx, task_type in enumerate(sorted_task_types): - if task_type["id"] == task_entity["type_id"]: - from_idx = idx + 1 - break + def set_next_task_statuses( + self, + session, + tasks_by_parent_id, + event_task_ids_by_parent_id, + statuses, + project_entity, + event_settings + ): + statuses_by_id = { + status["id"]: status + for status in statuses + } - # Current task type is last in order - if from_idx is None or from_idx >= sorted_task_types_len: - continue + # Lower ignored statuses + ignored_statuses = set( + status_name.lower() + for status_name in event_settings["ignored_statuses"] + ) + # Lower both key and value of mapped statuses + mapping = { + status_from.lower(): status_to.lower() + for status_from, status_to in event_settings["mapping"].items() + } + # Should use name sorting or not + name_sorting = event_settings["name_sorting"] - next_task_type_id = None - next_task_type_tasks = [] - for idx in range(from_idx, sorted_task_types_len): - next_task_type = sorted_task_types[idx] - for parents_task in tasks_to_parent_id[parent_id]: - if next_task_type_id is None: - if parents_task["type_id"] != next_task_type["id"]: - continue - next_task_type_id = next_task_type["id"] + # Collect task type ids from changed entities + task_type_ids = set() + for task_entities in tasks_by_parent_id.values(): + for task_entity in task_entities: + task_type_ids.add(task_entity["type_id"]) - if parents_task["type_id"] == next_task_type_id: - next_task_type_tasks.append(parents_task) + statusese_by_obj_id = self.statuses_for_tasks( + task_type_ids, project_entity + ) - if next_task_type_id is not None: - break + sorted_task_type_ids = self.get_sorted_task_type_ids(session) - for next_task_entity in next_task_type_tasks: - if next_task_entity["status"]["name"].lower() != "not ready": - continue + for parent_id, _task_entities in tasks_by_parent_id.items(): + task_entities_by_type_id = collections.defaultdict(list) + for _task_entity in _task_entities: + type_id = _task_entity["type_id"] + task_entities_by_type_id[type_id].append(_task_entity) - ent_path = "/".join( - [ent["name"] for ent in next_task_entity["link"]] + event_ids = set(event_task_ids_by_parent_id[parent_id]) + if name_sorting: + # Sort entities by name + self.sort_by_name_task_entities_by_type( + task_entities_by_type_id ) - try: - next_task_entity["status"] = next_status - session.commit() - self.log.info( - "\"{}\" updated status to \"{}\"".format( - ent_path, next_status_name - ) + # Sort entities by type id + sorted_task_entities = [] + for type_id in sorted_task_type_ids: + task_entities = task_entities_by_type_id.get(type_id) + if task_entities: + sorted_task_entities.extend(task_entities) + + next_tasks = self.next_tasks_with_name_sorting( + sorted_task_entities, + event_ids, + statuses_by_id, + ignored_statuses + ) + + else: + next_tasks = self.next_tasks_with_type_sorting( + task_entities_by_type_id, + sorted_task_type_ids, + event_ids, + statuses_by_id, + ignored_statuses + ) + + for task_entity in next_tasks: + if task_entity["status"]["state"]["name"].lower() == "done": + continue + + task_status = statuses_by_id[task_entity["status_id"]] + old_status_name = task_status["name"].lower() + if old_status_name in ignored_statuses: + continue + + new_task_name = mapping.get(old_status_name) + if not new_task_name: + self.log.debug( + "Didn't found mapping for status \"{}\".".format( + task_status["name"] ) - except Exception: - session.rollback() - self.log.warning( - "\"{}\" status couldnt be set to \"{}\"".format( - ent_path, next_status_name - ), - exc_info=True + ) + continue + + ent_path = "/".join( + [ent["name"] for ent in task_entity["link"]] + ) + type_id = task_entity["type_id"] + new_status = statusese_by_obj_id[type_id].get(new_task_name) + if new_status is None: + self.log.warning(( + "\"{}\" does not have available status name \"{}\"" + ).format(ent_path, new_task_name)) + continue + + try: + task_entity["status_id"] = new_status["id"] + session.commit() + self.log.info( + "\"{}\" updated status to \"{}\"".format( + ent_path, new_status["name"] ) + ) + except Exception: + session.rollback() + self.log.warning( + "\"{}\" status couldnt be set to \"{}\"".format( + ent_path, new_status["name"] + ), + exc_info=True + ) + + def next_tasks_with_name_sorting( + self, + sorted_task_entities, + event_ids, + statuses_by_id, + ignored_statuses, + ): + # Pre sort task entities by name + use_next_task = False + next_tasks = [] + for task_entity in sorted_task_entities: + if task_entity["id"] in event_ids: + event_ids.remove(task_entity["id"]) + use_next_task = True + continue + + if not use_next_task: + continue + + task_status = statuses_by_id[task_entity["status_id"]] + low_status_name = task_status["name"].lower() + if low_status_name in ignored_statuses: + continue + + next_tasks.append(task_entity) + use_next_task = False + if not event_ids: + break + + return next_tasks + + def check_statuses_done( + self, task_entities, ignored_statuses, statuses_by_id + ): + all_are_done = True + for task_entity in task_entities: + task_status = statuses_by_id[task_entity["status_id"]] + low_status_name = task_status["name"].lower() + if low_status_name in ignored_statuses: + continue + + low_state_name = task_status["state"]["name"].lower() + if low_state_name != "done": + all_are_done = False + break + return all_are_done + + def next_tasks_with_type_sorting( + self, + task_entities_by_type_id, + sorted_task_type_ids, + event_ids, + statuses_by_id, + ignored_statuses + ): + # `use_next_task` is used only if `name_sorting` is enabled! + next_tasks = [] + use_next_tasks = False + for type_id in sorted_task_type_ids: + if type_id not in task_entities_by_type_id: + continue + + task_entities = task_entities_by_type_id[type_id] + + # Check if any task was in event + event_id_in_tasks = False + for task_entity in task_entities: + task_id = task_entity["id"] + if task_id in event_ids: + event_ids.remove(task_id) + event_id_in_tasks = True + + if use_next_tasks: + # Check if next tasks are not done already + all_in_type_done = self.check_statuses_done( + task_entities, ignored_statuses, statuses_by_id + ) + if all_in_type_done: + continue + + next_tasks.extend(task_entities) + use_next_tasks = False + if not event_ids: + break + + if not event_id_in_tasks: + continue + + all_in_type_done = self.check_statuses_done( + task_entities, ignored_statuses, statuses_by_id + ) + use_next_tasks = all_in_type_done + if all_in_type_done: + continue + + if not event_ids: + break + + use_next_tasks = False + + return next_tasks + + def statuses_for_tasks(self, task_type_ids, project_entity): + project_schema = project_entity["project_schema"] + output = {} + for task_type_id in task_type_ids: + statuses = project_schema.get_statuses("Task", task_type_id) + output[task_type_id] = { + status["name"].lower(): status + for status in statuses + } + + return output + + def get_sorted_task_type_ids(self, session): + types_by_order = collections.defaultdict(list) + for _type in session.query("Type").all(): + sort_oder = _type.get("sort") + if sort_oder is not None: + types_by_order[sort_oder].append(_type["id"]) + + types = [] + for sort_oder in sorted(types_by_order.keys()): + types.extend(types_by_order[sort_oder]) + return types + + @staticmethod + def sort_by_name_task_entities_by_type(task_entities_by_type_id): + _task_entities_by_type_id = {} + for type_id, task_entities in task_entities_by_type_id.items(): + # Store tasks by name + task_entities_by_name = {} + for task_entity in task_entities: + task_name = task_entity["name"] + task_entities_by_name[task_name] = task_entity + + # Store task entities by sorted names + sorted_task_entities = [] + for task_name in sorted(task_entities_by_name.keys()): + task_entity = task_entities_by_name[task_name] + sorted_task_entities.append(task_entity) + # Store result to temp dictionary + _task_entities_by_type_id[type_id] = sorted_task_entities + + # Override values in source object + for type_id, value in _task_entities_by_type_id.items(): + task_entities_by_type_id[type_id] = value def register(session): diff --git a/pype/modules/ftrack/events/event_push_frame_values_to_task.py b/pype/modules/ftrack/events/event_push_frame_values_to_task.py index 061002c13f..8e277679bd 100644 --- a/pype/modules/ftrack/events/event_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/event_push_frame_values_to_task.py @@ -7,8 +7,6 @@ from pype.modules.ftrack import BaseEvent class PushFrameValuesToTaskEvent(BaseEvent): # Ignore event handler by default - ignore_me = True - cust_attrs_query = ( "select id, key, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" @@ -27,36 +25,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): _cached_changes = [] _max_delta = 30 - # Configrable (lists) - interest_entity_types = {"Shot"} - interest_attributes = {"frameStart", "frameEnd"} - - @staticmethod - def join_keys(keys): - return ",".join(["\"{}\"".format(key) for key in keys]) - - @classmethod - def task_object_id(cls, session): - if cls._cached_task_object_id is None: - task_object_type = session.query( - "ObjectType where name is \"Task\"" - ).one() - cls._cached_task_object_id = task_object_type["id"] - return cls._cached_task_object_id - - @classmethod - def interest_object_ids(cls, session): - if cls._cached_interest_object_ids is None: - object_types = session.query( - "ObjectType where name in ({})".format( - cls.join_keys(cls.interest_entity_types) - ) - ).all() - cls._cached_interest_object_ids = tuple( - object_type["id"] - for object_type in object_types - ) - return cls._cached_interest_object_ids + settings_key = "sync_hier_entity_attributes" def session_user_id(self, session): if self._cached_user_id is None: @@ -67,30 +36,146 @@ class PushFrameValuesToTaskEvent(BaseEvent): return self._cached_user_id def launch(self, session, event): - interesting_data, changed_keys_by_object_id = ( - self.extract_interesting_data(session, event) + filtered_entities_info = self.filter_entities_info(event) + if not filtered_entities_info: + return + + for project_id, entities_info in filtered_entities_info.items(): + self.process_by_project(session, event, project_id, entities_info) + + def filter_entities_info(self, event): + # Filter if event contain relevant data + entities_info = event["data"].get("entities") + if not entities_info: + return + + entities_info_by_project_id = {} + for entity_info in entities_info: + # Care only about tasks + if entity_info.get("entityType") != "task": + continue + + # Skip `Task` entity type + if entity_info["entity_type"].lower() == "task": + continue + + # Care only about changes of status + changes = entity_info.get("changes") + if not changes: + continue + + # Get project id from entity info + project_id = None + for parent_item in reversed(entity_info["parents"]): + if parent_item["entityType"] == "show": + project_id = parent_item["entityId"] + break + + if project_id is None: + continue + + if project_id not in entities_info_by_project_id: + entities_info_by_project_id[project_id] = [] + entities_info_by_project_id[project_id].append(entity_info) + + return entities_info_by_project_id + + def process_by_project(self, session, event, project_id, entities_info): + project_name = self.get_project_name_from_event( + session, event, project_id + ) + # Load settings + project_settings = self.get_project_settings_from_event( + event, project_name + ) + # Load status mapping from presets + event_settings = ( + project_settings + ["ftrack"] + ["events"] + ["sync_hier_entity_attributes"] + ) + # Skip if event is not enabled + if not event_settings["enabled"]: + self.log.debug("Project \"{}\" has disabled {}".format( + project_name, self.__class__.__name__ + )) + return + + interest_attributes = event_settings["interest_attributes"] + if not interest_attributes: + self.log.info(( + "Project \"{}\" does not have filled 'interest_attributes'," + " skipping." + )) + return + interest_entity_types = event_settings["interest_entity_types"] + if not interest_entity_types: + self.log.info(( + "Project \"{}\" does not have filled 'interest_entity_types'," + " skipping." + )) + return + + # Filter entities info with changes + interesting_data, changed_keys_by_object_id = self.filter_changes( + session, event, entities_info, interest_attributes ) if not interesting_data: return - entities = self.get_entities(session, interesting_data) + # Prepare object types + object_types = session.query("select id, name from ObjectType").all() + object_types_by_name = {} + for object_type in object_types: + name_low = object_type["name"].lower() + object_types_by_name[name_low] = object_type + + # Prepare task object id + task_object_id = object_types_by_name["task"]["id"] + + # Collect object type ids based on settings + interest_object_ids = [] + for entity_type in interest_entity_types: + _entity_type = entity_type.lower() + object_type = object_types_by_name.get(_entity_type) + if not object_type: + self.log.warning("Couldn't find object type \"{}\"".format( + entity_type + )) + + interest_object_ids.append(object_type["id"]) + + # Query entities by filtered data and object ids + entities = self.get_entities( + session, interesting_data, interest_object_ids + ) if not entities: return - entities_by_id = { - entity["id"]: entity + # Pop not found entities from interesting data + entity_ids = set( + entity["id"] for entity in entities - } + ) for entity_id in tuple(interesting_data.keys()): - if entity_id not in entities_by_id: + if entity_id not in entity_ids: interesting_data.pop(entity_id) - attrs_by_obj_id, hier_attrs = self.attrs_configurations(session) + # Add task object type to list + attr_obj_ids = list(interest_object_ids) + attr_obj_ids.append(task_object_id) + + attrs_by_obj_id, hier_attrs = self.attrs_configurations( + session, attr_obj_ids, interest_attributes + ) - task_object_id = self.task_object_id(session) task_attrs = attrs_by_obj_id.get(task_object_id) + + changed_keys = set() # Skip keys that are not both in hierachical and type specific for object_id, keys in changed_keys_by_object_id.items(): + changed_keys |= set(keys) object_id_attrs = attrs_by_obj_id.get(object_id) for key in keys: if key not in hier_attrs: @@ -113,8 +198,8 @@ class PushFrameValuesToTaskEvent(BaseEvent): "There is not created Custom Attributes {} " " for entity types: {}" ).format( - self.join_keys(self.interest_attributes), - self.join_keys(self.interest_entity_types) + self.join_query_keys(interest_attributes), + self.join_query_keys(interest_entity_types) )) return @@ -124,16 +209,24 @@ class PushFrameValuesToTaskEvent(BaseEvent): if task_attrs: task_entities = self.get_task_entities(session, interesting_data) - task_entities_by_id = {} + task_entity_ids = set() parent_id_by_task_id = {} for task_entity in task_entities: - task_entities_by_id[task_entity["id"]] = task_entity - parent_id_by_task_id[task_entity["id"]] = task_entity["parent_id"] + task_id = task_entity["id"] + task_entity_ids.add(task_id) + parent_id_by_task_id[task_id] = task_entity["parent_id"] - changed_keys = set() - for keys in changed_keys_by_object_id.values(): - changed_keys |= set(keys) + self.finalize( + session, interesting_data, + changed_keys, attrs_by_obj_id, hier_attrs, + task_entity_ids, parent_id_by_task_id + ) + def finalize( + self, session, interesting_data, + changed_keys, attrs_by_obj_id, hier_attrs, + task_entity_ids, parent_id_by_task_id + ): attr_id_to_key = {} for attr_confs in attrs_by_obj_id.values(): for key in changed_keys: @@ -147,12 +240,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): attr_id_to_key[custom_attr_id] = key entity_ids = ( - set(interesting_data.keys()) | set(task_entities_by_id.keys()) + set(interesting_data.keys()) | task_entity_ids ) attr_ids = set(attr_id_to_key.keys()) current_values_by_id = self.current_values( - session, attr_ids, entity_ids, task_entities_by_id, hier_attrs + session, attr_ids, entity_ids, task_entity_ids, hier_attrs ) for entity_id, current_values in current_values_by_id.items(): @@ -214,45 +307,9 @@ class PushFrameValuesToTaskEvent(BaseEvent): session.rollback() self.log.warning("Changing of values failed.", exc_info=True) - def current_values( - self, session, attr_ids, entity_ids, task_entities_by_id, hier_attrs + def filter_changes( + self, session, event, entities_info, interest_attributes ): - current_values_by_id = {} - if not attr_ids or not entity_ids: - return current_values_by_id - joined_conf_ids = self.join_keys(attr_ids) - joined_entity_ids = self.join_keys(entity_ids) - - call_expr = [{ - "action": "query", - "expression": self.cust_attr_query.format( - joined_entity_ids, joined_conf_ids - ) - }] - if hasattr(session, "call"): - [values] = session.call(call_expr) - else: - [values] = session._call(call_expr) - - for item in values["data"]: - entity_id = item["entity_id"] - attr_id = item["configuration_id"] - if entity_id in task_entities_by_id and attr_id in hier_attrs: - continue - - if entity_id not in current_values_by_id: - current_values_by_id[entity_id] = {} - current_values_by_id[entity_id][attr_id] = item["value"] - return current_values_by_id - - def extract_interesting_data(self, session, event): - # Filter if event contain relevant data - entities_info = event["data"].get("entities") - if not entities_info: - return - - # for key, value in event["data"].items(): - # self.log.info("{}: {}".format(key, value)) session_user_id = self.session_user_id(session) user_data = event["data"].get("user") changed_by_session = False @@ -264,18 +321,10 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data = {} changed_keys_by_object_id = {} for entity_info in entities_info: - # Care only about tasks - if entity_info.get("entityType") != "task": - continue - - # Care only about changes of status - changes = entity_info.get("changes") or {} - if not changes: - continue - # Care only about changes if specific keys entity_changes = {} - for key in self.interest_attributes: + changes = entity_info["changes"] + for key in interest_attributes: if key in changes: entity_changes[key] = changes[key]["new"] @@ -307,48 +356,66 @@ class PushFrameValuesToTaskEvent(BaseEvent): if not entity_changes: continue - # Do not care about "Task" entity_type - task_object_id = self.task_object_id(session) - object_id = entity_info.get("objectTypeId") - if not object_id or object_id == task_object_id: - continue - + entity_id = entity_info["entityId"] + object_id = entity_info["objectTypeId"] interesting_data[entity_id] = entity_changes if object_id not in changed_keys_by_object_id: changed_keys_by_object_id[object_id] = set() - changed_keys_by_object_id[object_id] |= set(entity_changes.keys()) return interesting_data, changed_keys_by_object_id - def get_entities(self, session, interesting_data): - entities = session.query( - "TypedContext where id in ({})".format( - self.join_keys(interesting_data.keys()) - ) - ).all() + def current_values( + self, session, attr_ids, entity_ids, task_entity_ids, hier_attrs + ): + current_values_by_id = {} + if not attr_ids or not entity_ids: + return current_values_by_id + joined_conf_ids = self.join_query_keys(attr_ids) + joined_entity_ids = self.join_query_keys(entity_ids) - output = [] - interest_object_ids = self.interest_object_ids(session) - for entity in entities: - if entity["object_type_id"] in interest_object_ids: - output.append(entity) - return output + call_expr = [{ + "action": "query", + "expression": self.cust_attr_query.format( + joined_entity_ids, joined_conf_ids + ) + }] + if hasattr(session, "call"): + [values] = session.call(call_expr) + else: + [values] = session._call(call_expr) + + for item in values["data"]: + entity_id = item["entity_id"] + attr_id = item["configuration_id"] + if entity_id in task_entity_ids and attr_id in hier_attrs: + continue + + if entity_id not in current_values_by_id: + current_values_by_id[entity_id] = {} + current_values_by_id[entity_id][attr_id] = item["value"] + return current_values_by_id + + def get_entities(self, session, interesting_data, interest_object_ids): + return session.query(( + "select id from TypedContext" + " where id in ({}) and object_type_id in ({})" + ).format( + self.join_query_keys(interesting_data.keys()), + self.join_query_keys(interest_object_ids) + )).all() def get_task_entities(self, session, interesting_data): return session.query( - "Task where parent_id in ({})".format( - self.join_keys(interesting_data.keys()) + "select id, parent_id from Task where parent_id in ({})".format( + self.join_query_keys(interesting_data.keys()) ) ).all() - def attrs_configurations(self, session): - object_ids = list(self.interest_object_ids(session)) - object_ids.append(self.task_object_id(session)) - + def attrs_configurations(self, session, object_ids, interest_attributes): attrs = session.query(self.cust_attrs_query.format( - self.join_keys(self.interest_attributes), - self.join_keys(object_ids) + self.join_query_keys(interest_attributes), + self.join_query_keys(object_ids) )).all() output = {} diff --git a/pype/modules/ftrack/events/event_task_to_parent_status.py b/pype/modules/ftrack/events/event_task_to_parent_status.py index 2bb7be1a26..9b1f61911e 100644 --- a/pype/modules/ftrack/events/event_task_to_parent_status.py +++ b/pype/modules/ftrack/events/event_task_to_parent_status.py @@ -56,17 +56,16 @@ class TaskStatusToParent(BaseEvent): return filtered_entity_info def process_by_project(self, session, event, project_id, entities_info): - # Get project entity - project_entity = self.get_project_entity_from_event( + # Get project name + project_name = self.get_project_name_from_event( session, event, project_id ) # Load settings - project_settings = self.get_settings_for_project( - session, event, project_entity=project_entity + project_settings = self.get_project_settings_from_event( + event, project_name ) # Prepare loaded settings and check if can be processed - project_name = project_entity["full_name"] result = self.prepare_settings(project_settings, project_name) if not result: return @@ -133,6 +132,7 @@ class TaskStatusToParent(BaseEvent): obj_id = object_type["id"] object_type_name_by_id[obj_id] = types_mapping[mapping_name] + project_entity = session.get("Project", project_id) project_schema = project_entity["project_schema"] available_statuses_by_obj_id = {} for obj_id in obj_ids: diff --git a/pype/modules/ftrack/events/event_task_to_version_status.py b/pype/modules/ftrack/events/event_task_to_version_status.py index 8d226424c3..d27a7f9e98 100644 --- a/pype/modules/ftrack/events/event_task_to_version_status.py +++ b/pype/modules/ftrack/events/event_task_to_version_status.py @@ -99,14 +99,14 @@ class TaskToVersionStatus(BaseEvent): if not entities_info: return - project_entity = self.get_project_entity_from_event( + project_name = self.get_project_name_from_event( session, event, project_id ) - project_settings = self.get_settings_for_project( - session, event, project_entity=project_entity + # Load settings + project_settings = self.get_project_settings_from_event( + event, project_name ) - project_name = project_entity["full_name"] event_settings = ( project_settings["ftrack"]["events"][self.settings_key] ) @@ -171,6 +171,7 @@ class TaskToVersionStatus(BaseEvent): } # Final process of changing statuses + project_entity = session.get("Project", project_id) av_statuses_by_low_name, av_statuses_by_id = ( self.get_asset_version_statuses(project_entity) ) diff --git a/pype/modules/ftrack/events/event_thumbnail_updates.py b/pype/modules/ftrack/events/event_thumbnail_updates.py index 09d992b8c4..b71322c894 100644 --- a/pype/modules/ftrack/events/event_thumbnail_updates.py +++ b/pype/modules/ftrack/events/event_thumbnail_updates.py @@ -19,14 +19,14 @@ class ThumbnailEvents(BaseEvent): def process_project_entities( self, session, event, project_id, entities_info ): - project_entity = self.get_project_entity_from_event( + project_name = self.get_project_name_from_event( session, event, project_id ) - project_settings = self.get_settings_for_project( - session, event, project_entity=project_entity + # Load settings + project_settings = self.get_project_settings_from_event( + event, project_name ) - project_name = project_entity["full_name"] event_settings = ( project_settings ["ftrack"] diff --git a/pype/modules/ftrack/events/event_version_to_task_statuses.py b/pype/modules/ftrack/events/event_version_to_task_statuses.py index 03f873f2cd..4a42e27336 100644 --- a/pype/modules/ftrack/events/event_version_to_task_statuses.py +++ b/pype/modules/ftrack/events/event_version_to_task_statuses.py @@ -47,15 +47,14 @@ class VersionToTaskStatus(BaseEvent): def process_by_project(self, session, event, project_id, entities_info): # Check for project data if event is enabled for event handler - status_mapping = None - project_entity = self.get_project_entity_from_event( + project_name = self.get_project_name_from_event( session, event, project_id ) - project_settings = self.get_settings_for_project( - session, event, project_entity=project_entity + # Load settings + project_settings = self.get_project_settings_from_event( + event, project_name ) - project_name = project_entity["full_name"] # Load status mapping from presets event_settings = ( project_settings["ftrack"]["events"]["status_version_to_task"] @@ -147,7 +146,7 @@ class VersionToTaskStatus(BaseEvent): # Qeury statuses statusese_by_obj_id = self.statuses_for_tasks( - session, task_entities, project_entity + session, task_entities, project_id ) # Prepare status names by their ids status_name_by_id = { @@ -224,11 +223,12 @@ class VersionToTaskStatus(BaseEvent): exc_info=True ) - def statuses_for_tasks(self, session, task_entities, project_entity): + def statuses_for_tasks(self, session, task_entities, project_id): task_type_ids = set() for task_entity in task_entities: task_type_ids.add(task_entity["type_id"]) + project_entity = session.get("Project", project_id) project_schema = project_entity["project_schema"] output = {} for task_type_id in task_type_ids: diff --git a/pype/modules/ftrack/ftrack_module.py b/pype/modules/ftrack/ftrack_module.py index 44607681ec..d2de27e1b9 100644 --- a/pype/modules/ftrack/ftrack_module.py +++ b/pype/modules/ftrack/ftrack_module.py @@ -3,9 +3,16 @@ from abc import ABCMeta, abstractmethod import six import pype from pype.modules import ( - PypeModule, ITrayModule, IPluginPaths, ITimersManager, IUserModule + PypeModule, + ITrayModule, + IPluginPaths, + ITimersManager, + IUserModule, + ILaunchHookPaths ) +FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + @six.add_metaclass(ABCMeta) class IFtrackEventHandlerPaths: @@ -19,7 +26,12 @@ class IFtrackEventHandlerPaths: class FtrackModule( - PypeModule, ITrayModule, IPluginPaths, ITimersManager, IUserModule + PypeModule, + ITrayModule, + IPluginPaths, + ITimersManager, + IUserModule, + ILaunchHookPaths ): name = "ftrack" @@ -54,6 +66,10 @@ class FtrackModule( "publish": [os.path.join(pype.PLUGINS_DIR, "ftrack", "publish")] } + def get_launch_hook_paths(self): + """Implementation of `ILaunchHookPaths`.""" + return os.path.join(FTRACK_MODULE_DIR, "launch_hooks") + def connect_with_modules(self, enabled_modules): for module in enabled_modules: if not isinstance(module, IFtrackEventHandlerPaths): diff --git a/pype/modules/ftrack/ftrack_server/socket_thread.py b/pype/modules/ftrack/ftrack_server/socket_thread.py index e66e8bc775..6a5fe2c9d6 100644 --- a/pype/modules/ftrack/ftrack_server/socket_thread.py +++ b/pype/modules/ftrack/ftrack_server/socket_thread.py @@ -55,6 +55,8 @@ class SocketThread(threading.Thread): "Running Socked thread on {}:{}".format(*server_address) ) + env = os.environ.copy() + env["PYPE_PROCESS_MONGO_ID"] = str(Logger.mongo_process_id) self.subproc = subprocess.Popen( [ sys.executable, @@ -62,6 +64,7 @@ class SocketThread(threading.Thread): *self.additional_args, str(self.port) ], + env=env, stdin=subprocess.PIPE ) diff --git a/pype/modules/ftrack/ftrack_server/sub_user_server.py b/pype/modules/ftrack/ftrack_server/sub_user_server.py index 1dedbbc7d0..58d5982ac2 100644 --- a/pype/modules/ftrack/ftrack_server/sub_user_server.py +++ b/pype/modules/ftrack/ftrack_server/sub_user_server.py @@ -51,6 +51,8 @@ def main(args): if __name__ == "__main__": + Logger.set_process_name("Ftrack User server") + # Register interupt signal def signal_handler(sig, frame): log.info( diff --git a/pype/hooks/global/post_ftrack_changes.py b/pype/modules/ftrack/launch_hooks/post_ftrack_changes.py similarity index 100% rename from pype/hooks/global/post_ftrack_changes.py rename to pype/modules/ftrack/launch_hooks/post_ftrack_changes.py diff --git a/pype/modules/ftrack/launch_hooks/pre_python2_vendor.py b/pype/modules/ftrack/launch_hooks/pre_python2_vendor.py new file mode 100644 index 0000000000..46b4009737 --- /dev/null +++ b/pype/modules/ftrack/launch_hooks/pre_python2_vendor.py @@ -0,0 +1,40 @@ +import os +from pype.lib import PreLaunchHook +from pype.modules.ftrack import FTRACK_MODULE_DIR + + +class PrePyhton2Support(PreLaunchHook): + """Add python ftrack api module for Python 2 to PYTHONPATH. + + Path to vendor modules is added to the beggining of PYTHONPATH. + """ + # There will be needed more granular filtering in future + app_groups = ["maya", "nuke", "nukex", "hiero", "nukestudio"] + + def execute(self): + # Prepare vendor dir path + python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor") + + # Add Python 2 modules + python_paths = [ + # `python-ftrack-api` + os.path.join(python_2_vendor, "ftrack-python-api", "source"), + # `arrow` + os.path.join(python_2_vendor, "arrow"), + # `builtins` from `python-future` + # - `python-future` is strict Python 2 module that cause crashes + # of Python 3 scripts executed through pype (burnin script etc.) + os.path.join(python_2_vendor, "builtins"), + # `backports.functools_lru_cache` + os.path.join( + python_2_vendor, "backports.functools_lru_cache" + ) + ] + + # Load PYTHONPATH from current launch context + python_path = self.launch_context.env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/pype/modules/ftrack/lib/avalon_sync.py b/pype/modules/ftrack/lib/avalon_sync.py index cb34ce918d..2e9d8d68ed 100644 --- a/pype/modules/ftrack/lib/avalon_sync.py +++ b/pype/modules/ftrack/lib/avalon_sync.py @@ -284,7 +284,7 @@ class SyncEntitiesFactory: " from Project where full_name is \"{}\"" ) entities_query = ( - "select id, name, parent_id, link" + "select id, name, type_id, parent_id, link" " from TypedContext where project_id is \"{}\"" ) ignore_custom_attr_key = "avalon_ignore_sync" @@ -399,11 +399,6 @@ class SyncEntitiesFactory: "message": "Synchronization failed" } - # Find all entities in project - all_project_entities = self.session.query( - self.entities_query.format(ft_project_id) - ).all() - # Store entities by `id` and `parent_id` entities_dict = collections.defaultdict(lambda: { "children": list(), @@ -417,6 +412,15 @@ class SyncEntitiesFactory: "tasks": {} }) + # Find all entities in project + all_project_entities = self.session.query( + self.entities_query.format(ft_project_id) + ).all() + task_types = self.session.query("select id, name from Type").all() + task_type_names_by_id = { + task_type["id"]: task_type["name"] + for task_type in task_types + } for entity in all_project_entities: parent_id = entity["parent_id"] entity_type = entity.entity_type @@ -426,7 +430,8 @@ class SyncEntitiesFactory: elif entity_type_low == "task": # enrich task info with additional metadata - task = {"type": entity["type"]["name"]} + task_type_name = task_type_names_by_id[entity["type_id"]] + task = {"type": task_type_name} entities_dict[parent_id]["tasks"][entity["name"]] = task continue diff --git a/pype/modules/ftrack/lib/custom_attributes.json b/pype/modules/ftrack/lib/custom_attributes.json index 17ff6691d3..3945dfaf6e 100644 --- a/pype/modules/ftrack/lib/custom_attributes.json +++ b/pype/modules/ftrack/lib/custom_attributes.json @@ -2,15 +2,11 @@ "show": { "avalon_auto_sync": { "label": "Avalon auto-sync", - "type": "boolean", - "write_security_role": ["API", "Administrator"], - "read_security_role": ["API", "Administrator"] + "type": "boolean" }, "library_project": { "label": "Library Project", - "type": "boolean", - "write_security_role": ["API", "Administrator"], - "read_security_role": ["API", "Administrator"] + "type": "boolean" } }, "is_hierarchical": { diff --git a/pype/modules/ftrack/lib/ftrack_action_handler.py b/pype/modules/ftrack/lib/ftrack_action_handler.py index e04ed6b404..d95c81955e 100644 --- a/pype/modules/ftrack/lib/ftrack_action_handler.py +++ b/pype/modules/ftrack/lib/ftrack_action_handler.py @@ -29,6 +29,9 @@ class BaseAction(BaseHandler): icon = None type = 'Action' + settings_frack_subkey = "user_handlers" + settings_enabled_key = "enabled" + def __init__(self, session): '''Expects a ftrack_api.Session instance''' if self.label is None: @@ -67,6 +70,9 @@ class BaseAction(BaseHandler): def _discover(self, event): entities = self._translate_event(event) + if not entities: + return + accepts = self.discover(self.session, entities, event) if not accepts: return @@ -146,21 +152,18 @@ class BaseAction(BaseHandler): def _launch(self, event): entities = self._translate_event(event) + if not entities: + return preactions_launched = self._handle_preactions(self.session, event) if preactions_launched is False: return - interface = self._interface( - self.session, entities, event - ) - + interface = self._interface(self.session, entities, event) if interface: return interface - response = self.launch( - self.session, entities, event - ) + response = self.launch(self.session, entities, event) return self._handle_result(response) @@ -196,50 +199,29 @@ class BaseAction(BaseHandler): return result + @staticmethod + def roles_check(settings_roles, user_roles, default=True): + """Compare roles from setting and user's roles. -class ServerAction(BaseAction): - """Action class meant to be used on event server. + Args: + settings_roles(list): List of role names from settings. + user_roles(list): User's lowered role names. + default(bool): If `settings_roles` is empty list. - Unlike the `BaseAction` roles are not checked on register but on discover. - For the same reason register is modified to not filter topics by username. - """ + Returns: + bool: `True` if user has at least one role from settings or + default if `settings_roles` is empty. + """ + if not settings_roles: + return default - def __init__(self, *args, **kwargs): - if not self.role_list: - self.role_list = set() - else: - self.role_list = set( - role_name.lower() - for role_name in self.role_list - ) - super(ServerAction, self).__init__(*args, **kwargs) - - def _register_role_check(self): - # Skip register role check. - return - - def _discover(self, event): - """Check user discover availability.""" - if not self._check_user_discover(event): - return - return super(ServerAction, self)._discover(event) - - def _check_user_discover(self, event): - """Should be action discovered by user trying to show actions.""" - if not self.role_list: - return True - - user_entity = self._get_user_entity(event) - if not user_entity: - return False - - for role in user_entity["user_security_roles"]: - lowered_role = role["security_role"]["name"].lower() - if lowered_role in self.role_list: + for role_name in settings_roles: + if role_name.lower() in user_roles: return True return False - def _get_user_entity(self, event): + @classmethod + def get_user_entity_from_event(cls, session, event): """Query user entity from event.""" not_set = object() @@ -251,17 +233,91 @@ class ServerAction(BaseAction): user_id = user_info.get("id") username = user_info.get("username") if user_id: - user_entity = self.session.query( + user_entity = session.query( "User where id is {}".format(user_id) ).first() if not user_entity and username: - user_entity = self.session.query( + user_entity = session.query( "User where username is {}".format(username) ).first() event["data"]["user_entity"] = user_entity return user_entity + @classmethod + def get_user_roles_from_event(cls, session, event): + """Query user entity from event.""" + not_set = object() + + user_roles = event["data"].get("user_roles", not_set) + if user_roles is not_set: + user_roles = [] + user_entity = cls.get_user_entity_from_event(session, event) + for role in user_entity["user_security_roles"]: + user_roles.append(role["security_role"]["name"].lower()) + event["data"]["user_roles"] = user_roles + return user_roles + + def get_project_name_from_event(self, session, event, entities): + """Load or query and fill project entity from/to event data. + + Project data are stored by ftrack id because in most cases it is + easier to access project id than project name. + + Args: + session (ftrack_api.Session): Current session. + event (ftrack_api.Event): Processed event by session. + entities (list): Ftrack entities of selection. + """ + + # Try to get project entity from event + project_name = event["data"].get("project_name") + if not project_name: + project_entity = self.get_project_from_entity( + entities[0], session + ) + project_name = project_entity["full_name"] + + event["data"]["project_name"] = project_name + return project_name + + def get_ftrack_settings(self, session, event, entities): + project_name = self.get_project_name_from_event( + session, event, entities + ) + project_settings = self.get_project_settings_from_event( + event, project_name + ) + return project_settings["ftrack"] + + def valid_roles(self, session, entities, event): + """Validate user roles by settings. + + Method requires to have set `settings_key` attribute. + """ + ftrack_settings = self.get_ftrack_settings(session, event, entities) + settings = ( + ftrack_settings[self.settings_frack_subkey][self.settings_key] + ) + if self.settings_enabled_key: + if not settings.get(self.settings_enabled_key, True): + return False + + user_role_list = self.get_user_roles_from_event(session, event) + if not self.roles_check(settings.get("role_list"), user_role_list): + return False + return True + + +class ServerAction(BaseAction): + """Action class meant to be used on event server. + + Unlike the `BaseAction` roles are not checked on register but on discover. + For the same reason register is modified to not filter topics by username. + """ + + settings_frack_subkey = "events" + def register(self): """Register subcription to Ftrack event hub.""" self.session.event_hub.subscribe( diff --git a/pype/modules/ftrack/lib/ftrack_base_handler.py b/pype/modules/ftrack/lib/ftrack_base_handler.py index 022c4f0829..74c31d1c6f 100644 --- a/pype/modules/ftrack/lib/ftrack_base_handler.py +++ b/pype/modules/ftrack/lib/ftrack_base_handler.py @@ -37,7 +37,6 @@ class BaseHandler(object): type = 'No-type' ignore_me = False preactions = [] - role_list = [] @staticmethod def join_query_keys(keys): @@ -142,28 +141,7 @@ class BaseHandler(object): def reset_session(self): self.session.reset() - def _register_role_check(self): - if not self.role_list or not isinstance(self.role_list, (list, tuple)): - return - - user_entity = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) - ).one() - available = False - lowercase_rolelist = [ - role_name.lower() - for role_name in self.role_list - ] - for role in user_entity["user_security_roles"]: - if role["security_role"]["name"].lower() in lowercase_rolelist: - available = True - break - if available is False: - raise MissingPermision - def _preregister(self): - self._register_role_check() - # Custom validations result = self.preregister() if result is None: @@ -550,7 +528,7 @@ class BaseHandler(object): "Publishing event: {}" ).format(str(event.__dict__))) - def get_project_from_entity(self, entity): + def get_project_from_entity(self, entity, session=None): low_entity_type = entity.entity_type.lower() if low_entity_type == "project": return entity @@ -571,72 +549,32 @@ class BaseHandler(object): return parent["project"] project_data = entity["link"][0] - return self.session.query( + + if session is None: + session = self.session + return session.query( "Project where id is {}".format(project_data["id"]) ).one() - def get_project_entity_from_event(self, session, event, project_id): - """Load or query and fill project entity from/to event data. - - Project data are stored by ftrack id because in most cases it is - easier to access project id than project name. - - Args: - session (ftrack_api.Session): Current session. - event (ftrack_api.Event): Processed event by session. - project_id (str): Ftrack project id. - """ - if not project_id: - raise ValueError( - "Entered `project_id` is not valid. {} ({})".format( - str(project_id), str(type(project_id)) - ) - ) - # Try to get project entity from event - project_entities = event["data"].get("project_entities") - if not project_entities: - project_entities = {} - event["data"]["project_entities"] = project_entities - - project_entity = project_entities.get(project_id) - if not project_entity: - # Get project entity from task and store to event - project_entity = session.get("Project", project_id) - event["data"]["project_entities"][project_id] = project_entity - return project_entity - - def get_settings_for_project( - self, session, event, project_id=None, project_entity=None - ): + def get_project_settings_from_event(self, event, project_name): """Load or fill pype's project settings from event data. Project data are stored by ftrack id because in most cases it is easier to access project id than project name. Args: - session (ftrack_api.Session): Current session. event (ftrack_api.Event): Processed event by session. - project_id (str): Ftrack project id. Must be entered if - project_entity is not. - project_entity (ftrack_api.Entity): Project entity. Must be entered - if project_id is not. + project_entity (ftrack_api.Entity): Project entity. """ - if not project_entity: - project_entity = self.get_project_entity_from_event( - session, event, project_id - ) - - project_name = project_entity["full_name"] - project_settings_by_id = event["data"].get("project_settings") if not project_settings_by_id: project_settings_by_id = {} event["data"]["project_settings"] = project_settings_by_id - project_settings = project_settings_by_id.get(project_id) + project_settings = project_settings_by_id.get(project_name) if not project_settings: project_settings = get_project_settings(project_name) - event["data"]["project_settings"][project_id] = project_settings + event["data"]["project_settings"][project_name] = project_settings return project_settings @staticmethod diff --git a/pype/modules/ftrack/lib/ftrack_event_handler.py b/pype/modules/ftrack/lib/ftrack_event_handler.py index 53b78ccc17..af565c5421 100644 --- a/pype/modules/ftrack/lib/ftrack_event_handler.py +++ b/pype/modules/ftrack/lib/ftrack_event_handler.py @@ -46,3 +46,34 @@ class BaseEvent(BaseHandler): session, ignore=['socialfeed', 'socialnotification'] ) + + def get_project_name_from_event(self, session, event, project_id): + """Load or query and fill project entity from/to event data. + + Project data are stored by ftrack id because in most cases it is + easier to access project id than project name. + + Args: + session (ftrack_api.Session): Current session. + event (ftrack_api.Event): Processed event by session. + project_id (str): Ftrack project id. + """ + if not project_id: + raise ValueError( + "Entered `project_id` is not valid. {} ({})".format( + str(project_id), str(type(project_id)) + ) + ) + # Try to get project entity from event + project_data = event["data"].get("project_data") + if not project_data: + project_data = {} + event["data"]["project_data"] = project_data + + project_name = project_data.get(project_id) + if not project_name: + # Get project entity from task and store to event + project_entity = session.get("Project", project_id) + project_name = project_entity["full_name"] + event["data"]["project_data"][project_id] = project_name + return project_name diff --git a/pype/modules/ftrack/python2_vendor/arrow b/pype/modules/ftrack/python2_vendor/arrow new file mode 160000 index 0000000000..b746fedf72 --- /dev/null +++ b/pype/modules/ftrack/python2_vendor/arrow @@ -0,0 +1 @@ +Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/pype/vendor/backports/__init__.py b/pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py similarity index 100% rename from pype/vendor/backports/__init__.py rename to pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py diff --git a/pype/vendor/backports/configparser/__init__.py b/pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py similarity index 100% rename from pype/vendor/backports/configparser/__init__.py rename to pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py diff --git a/pype/vendor/backports/configparser/helpers.py b/pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py similarity index 100% rename from pype/vendor/backports/configparser/helpers.py rename to pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py diff --git a/pype/vendor/backports/functools_lru_cache.py b/pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py similarity index 100% rename from pype/vendor/backports/functools_lru_cache.py rename to pype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py diff --git a/pype/vendor/builtins/__init__.py b/pype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py similarity index 100% rename from pype/vendor/builtins/__init__.py rename to pype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py diff --git a/pype/modules/ftrack/python2_vendor/ftrack-python-api b/pype/modules/ftrack/python2_vendor/ftrack-python-api new file mode 160000 index 0000000000..d277f474ab --- /dev/null +++ b/pype/modules/ftrack/python2_vendor/ftrack-python-api @@ -0,0 +1 @@ +Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e diff --git a/pype/modules/ftrack/tray/ftrack_tray.py b/pype/modules/ftrack/tray/ftrack_tray.py index 56133208c2..3f6432e541 100644 --- a/pype/modules/ftrack/tray/ftrack_tray.py +++ b/pype/modules/ftrack/tray/ftrack_tray.py @@ -13,7 +13,7 @@ from . import login_dialog from pype.api import Logger, resources -log = Logger().get_logger("FtrackModule", "ftrack") +log = Logger().get_logger("FtrackModule") class FtrackTrayWrapper: diff --git a/pype/modules/log_viewer/__init__.py b/pype/modules/log_viewer/__init__.py new file mode 100644 index 0000000000..672f47c015 --- /dev/null +++ b/pype/modules/log_viewer/__init__.py @@ -0,0 +1,6 @@ +from .log_view_module import LogViewModule + + +__all__ = ( + "LogViewModule", +) diff --git a/pype/modules/logging/logging_module.py b/pype/modules/log_viewer/log_view_module.py similarity index 96% rename from pype/modules/logging/logging_module.py rename to pype/modules/log_viewer/log_view_module.py index 06101b51a5..1252eaf888 100644 --- a/pype/modules/logging/logging_module.py +++ b/pype/modules/log_viewer/log_view_module.py @@ -2,7 +2,7 @@ from pype.api import Logger from .. import PypeModule, ITrayModule -class LoggingModule(PypeModule, ITrayModule): +class LogViewModule(PypeModule, ITrayModule): name = "log_viewer" def initialize(self, modules_settings): diff --git a/vendor/README.md b/pype/modules/log_viewer/tray/__init__.py similarity index 100% rename from vendor/README.md rename to pype/modules/log_viewer/tray/__init__.py diff --git a/pype/modules/logging/tray/app.py b/pype/modules/log_viewer/tray/app.py similarity index 100% rename from pype/modules/logging/tray/app.py rename to pype/modules/log_viewer/tray/app.py diff --git a/pype/modules/logging/tray/models.py b/pype/modules/log_viewer/tray/models.py similarity index 91% rename from pype/modules/logging/tray/models.py rename to pype/modules/log_viewer/tray/models.py index 3591f3dde2..6820d5bcb9 100644 --- a/pype/modules/logging/tray/models.py +++ b/pype/modules/log_viewer/tray/models.py @@ -1,9 +1,6 @@ import collections from Qt import QtCore, QtGui -from pype.api import Logger -from pype.lib.log import _bootstrap_mongo_log, LOG_COLLECTION_NAME - -log = Logger().get_logger("LogModel", "LoggingModule") +from pype.lib.log import PypeLogger class LogModel(QtGui.QStandardItemModel): @@ -44,9 +41,14 @@ class LogModel(QtGui.QStandardItemModel): self.dbcon = None # Crash if connection is not possible to skip this module - database = _bootstrap_mongo_log() - if LOG_COLLECTION_NAME in database.list_collection_names(): - self.dbcon = database[LOG_COLLECTION_NAME] + if not PypeLogger.initialized: + PypeLogger.initialize() + + connection = PypeLogger.get_log_mongo_connection() + if connection: + PypeLogger.bootstrap_mongo_log() + database = connection[PypeLogger.log_database_name] + self.dbcon = database[PypeLogger.log_collection_name] def headerData(self, section, orientation, role): if ( diff --git a/pype/modules/logging/tray/widgets.py b/pype/modules/log_viewer/tray/widgets.py similarity index 100% rename from pype/modules/logging/tray/widgets.py rename to pype/modules/log_viewer/tray/widgets.py diff --git a/pype/modules/logging/__init__.py b/pype/modules/logging/__init__.py deleted file mode 100644 index c87d8b7f43..0000000000 --- a/pype/modules/logging/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .logging_module import LoggingModule - - -__all__ = ( - "LoggingModule", -) diff --git a/pype/modules/rest_api/rest_api.py b/pype/modules/rest_api/rest_api.py index 2a074fd97a..a30402b5fe 100644 --- a/pype/modules/rest_api/rest_api.py +++ b/pype/modules/rest_api/rest_api.py @@ -131,26 +131,61 @@ class RestApiModule(PypeModule, ITrayService): module.rest_api_initialization(self) - def find_port(self): - start_port = self.default_port - exclude_ports = self.exclude_ports + @staticmethod + def find_free_port(port_from, port_to=None, exclude_ports=None, host=None): + """Find available socket port from entered range. + + It is also possible to only check if entered port is available. + + Args: + port_from (int): Port number which is checked as first. + port_to (int): Last port that is checked in sequence from entered + `port_from`. Only `port_from` is checked if is not entered. + Nothing is processed if is equeal to `port_from`! + exclude_ports (list, tuple, set): List of ports that won't be + checked form entered range. + host (str): Host where will check for free ports. Set to + "localhost" by default. + """ + # Check only entered port if `port_to` is not defined + if port_to is None: + port_to = port_from + + # Excluded ports (e.g. reserved for other servers/clients) + if exclude_ports is None: + exclude_ports = [] + + # Default host is localhost but it is possible to look for other hosts + if host is None: + host = "localhost" + found_port = None - # port check takes time so it's lowered to 100 ports - for port in range(start_port, start_port+100): + for port in range(port_from, port_to + 1): if port in exclude_ports: continue - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - result = sock.connect_ex(("localhost", port)) - if result != 0: - found_port = port + + sock = None + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind((host, port)) + found_port = port + + except socket.error: + continue + + finally: + if sock: + sock.close() + if found_port is not None: break - if found_port is None: - return None + return found_port def tray_init(self): - port = self.find_port() + port = self.find_free_port( + self.default_port, self.default_port + 100, self.exclude_ports + ) self.rest_api_url = "http://localhost:{}".format(port) self.rest_api_thread = RestApiThread(self, port) self.register_statics("/res", resources.RESOURCES_DIR) diff --git a/pype/modules/settings_action.py b/pype/modules/settings_action.py index 0d56a6c5ae..c1fa8a68bc 100644 --- a/pype/modules/settings_action.py +++ b/pype/modules/settings_action.py @@ -45,8 +45,16 @@ class SettingsAction(PypeModule, ITrayAction): if not self.settings_window: raise AssertionError("Window is not initialized.") + # Store if was visible + was_visible = self.settings_window.isVisible() + + # Show settings gui self.settings_window.show() # Pull window to the front. self.settings_window.raise_() self.settings_window.activateWindow() + + # Reset content if was not visible + if not was_visible: + self.settings_window.reset() diff --git a/pype/plugins/ftrack/publish/collect_ftrack_api.py b/pype/plugins/ftrack/publish/collect_ftrack_api.py index 59839d7710..1683ec4bb7 100644 --- a/pype/plugins/ftrack/publish/collect_ftrack_api.py +++ b/pype/plugins/ftrack/publish/collect_ftrack_api.py @@ -1,11 +1,6 @@ import os -import pyblish.api import logging - -try: - import ftrack_api_old as ftrack_api -except Exception: - import ftrack_api +import pyblish.api class CollectFtrackApi(pyblish.api.ContextPlugin): @@ -22,12 +17,14 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): ftrack_log.setLevel(logging.WARNING) # Collect session + # NOTE Import python module here to know if import was successful + import ftrack_api + session = ftrack_api.Session(auto_connect_event_hub=True) self.log.debug("Ftrack user: \"{0}\"".format(session.api_user)) context.data["ftrackSession"] = session # Collect task - project_name = os.environ.get('AVALON_PROJECT', '') asset_name = os.environ.get('AVALON_ASSET', '') task_name = os.environ.get('AVALON_TASK', None) diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index a1377cc771..c4f7726071 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -36,7 +36,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder - 0.04 label = 'Integrate Hierarchy To Ftrack' families = ["shot"] - hosts = ["hiero"] + hosts = ["hiero", "resolve"] optional = False def process(self, context): diff --git a/pype/plugins/global/publish/collect_hierarchy.py b/pype/plugins/global/publish/collect_hierarchy.py new file mode 100644 index 0000000000..5c5dbf018c --- /dev/null +++ b/pype/plugins/global/publish/collect_hierarchy.py @@ -0,0 +1,113 @@ +import pyblish.api +import avalon.api as avalon + + +class CollectHierarchy(pyblish.api.ContextPlugin): + """Collecting hierarchy from `parents`. + + present in `clip` family instances coming from the request json data file + + It will add `hierarchical_context` into each instance for integrate + plugins to be able to create needed parents for the context if they + don't exist yet + """ + + label = "Collect Hierarchy" + order = pyblish.api.CollectorOrder - 0.57 + families = ["shot"] + hosts = ["resolve"] + + def process(self, context): + temp_context = {} + project_name = avalon.Session["AVALON_PROJECT"] + final_context = {} + final_context[project_name] = {} + final_context[project_name]['entity_type'] = 'Project' + + for instance in context: + self.log.info("Processing instance: `{}` ...".format(instance)) + + # shot data dict + shot_data = {} + family = instance.data.get("family") + + # filter out all unepropriate instances + if not instance.data["publish"]: + continue + + # exclude other families then self.families with intersection + if not set(self.families).intersection([family]): + continue + + # exclude if not masterLayer True + if not instance.data.get("masterLayer"): + continue + + # get asset build data if any available + shot_data["inputs"] = [ + x["_id"] for x in instance.data.get("assetbuilds", []) + ] + + # suppose that all instances are Shots + shot_data['entity_type'] = 'Shot' + shot_data['tasks'] = instance.data.get("tasks") or [] + shot_data["comments"] = instance.data.get("comments", []) + + shot_data['custom_attributes'] = { + "handleStart": instance.data["handleStart"], + "handleEnd": instance.data["handleEnd"], + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + "clipIn": instance.data["clipIn"], + "clipOut": instance.data["clipOut"], + 'fps': instance.context.data["fps"], + "resolutionWidth": instance.data["resolutionWidth"], + "resolutionHeight": instance.data["resolutionHeight"], + "pixelAspect": instance.data["pixelAspect"] + } + + actual = {instance.data["asset"]: shot_data} + + for parent in reversed(instance.data["parents"]): + next_dict = {} + parent_name = parent["entity_name"] + next_dict[parent_name] = {} + next_dict[parent_name]["entity_type"] = parent[ + "entity_type"].capitalize() + next_dict[parent_name]["childs"] = actual + actual = next_dict + + temp_context = self._update_dict(temp_context, actual) + + # skip if nothing for hierarchy available + if not temp_context: + return + + final_context[project_name]['childs'] = temp_context + + # adding hierarchy context to context + context.data["hierarchyContext"] = final_context + self.log.debug("context.data[hierarchyContext] is: {}".format( + context.data["hierarchyContext"])) + + def _update_dict(self, parent_dict, child_dict): + """ + Nesting each children into its parent. + + Args: + parent_dict (dict): parent dict wich should be nested with children + child_dict (dict): children dict which should be injested + """ + + for key in parent_dict: + if key in child_dict and isinstance(parent_dict[key], dict): + child_dict[key] = self._update_dict( + parent_dict[key], child_dict[key] + ) + else: + if parent_dict.get(key) and child_dict.get(key): + continue + else: + child_dict[key] = parent_dict[key] + + return child_dict diff --git a/pype/plugins/global/publish/collect_otio_frame_ranges.py b/pype/plugins/global/publish/collect_otio_frame_ranges.py new file mode 100644 index 0000000000..849a2c2475 --- /dev/null +++ b/pype/plugins/global/publish/collect_otio_frame_ranges.py @@ -0,0 +1,70 @@ +""" +Requires: + otioTimeline -> context data attribute + review -> instance data attribute + masterLayer -> instance data attribute + otioClipRange -> instance data attribute +""" +# import os +import opentimelineio as otio +import pyblish.api +import pype.lib +from pprint import pformat + + +class CollectOcioFrameRanges(pyblish.api.InstancePlugin): + """Getting otio ranges from otio_clip + + Adding timeline and source ranges to instance data""" + + label = "Collect OTIO Frame Ranges" + order = pyblish.api.CollectorOrder - 0.58 + families = ["shot", "clip"] + hosts = ["resolve"] + + def process(self, instance): + # get basic variables + otio_clip = instance.data["otioClip"] + workfile_start = instance.data["workfileFrameStart"] + + # get ranges + otio_tl_range = otio_clip.range_in_parent() + otio_src_range = otio_clip.source_range + otio_avalable_range = otio_clip.available_range() + otio_tl_range_handles = pype.lib.otio_range_with_handles( + otio_tl_range, instance) + otio_src_range_handles = pype.lib.otio_range_with_handles( + otio_src_range, instance) + + # get source avalable start frame + src_starting_from = otio.opentime.to_frames( + otio_avalable_range.start_time, + otio_avalable_range.start_time.rate) + + # convert to frames + range_convert = pype.lib.otio_range_to_frame_range + tl_start, tl_end = range_convert(otio_tl_range) + tl_start_h, tl_end_h = range_convert(otio_tl_range_handles) + src_start, src_end = range_convert(otio_src_range) + src_start_h, src_end_h = range_convert(otio_src_range_handles) + frame_start = workfile_start + frame_end = frame_start + otio.opentime.to_frames( + otio_tl_range.duration, otio_tl_range.duration.rate) - 1 + + data = { + "frameStart": frame_start, + "frameEnd": frame_end, + "clipIn": tl_start, + "clipOut": tl_end, + "clipInH": tl_start_h, + "clipOutH": tl_end_h, + "sourceStart": src_starting_from + src_start, + "sourceEnd": src_starting_from + src_end, + "sourceStartH": src_starting_from + src_start_h, + "sourceEndH": src_starting_from + src_end_h, + } + instance.data.update(data) + self.log.debug( + "_ data: {}".format(pformat(data))) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) diff --git a/pype/plugins/global/publish/collect_otio_review.py b/pype/plugins/global/publish/collect_otio_review.py new file mode 100644 index 0000000000..0c7eeaea44 --- /dev/null +++ b/pype/plugins/global/publish/collect_otio_review.py @@ -0,0 +1,99 @@ +""" +Requires: + instance -> otioClip + context -> otioTimeline + +Optional: + otioClip.metadata -> masterLayer + +Provides: + instance -> otioReviewClips + instance -> families (adding ["review", "ftrack"]) +""" + +import opentimelineio as otio +import pyblish.api +from pprint import pformat + + +class CollectOcioReview(pyblish.api.InstancePlugin): + """Get matching otio track from defined review layer""" + + label = "Collect OTIO Review" + order = pyblish.api.CollectorOrder - 0.57 + families = ["clip"] + hosts = ["resolve"] + + def process(self, instance): + # get basic variables + otio_review_clips = list() + otio_timeline = instance.context.data["otioTimeline"] + otio_clip = instance.data["otioClip"] + + # optionally get `reviewTrack` + review_track_name = otio_clip.metadata.get("reviewTrack") + + # generate range in parent + otio_tl_range = otio_clip.range_in_parent() + + # calculate real timeline end needed for the clip + clip_end_frame = int( + otio_tl_range.start_time.value + otio_tl_range.duration.value) + + # skip if no review track available + if not review_track_name: + return + + # loop all tracks and match with name in `reviewTrack` + for track in otio_timeline.tracks: + if review_track_name not in track.name: + continue + + # process correct track + # establish gap + otio_gap = None + + # get track parent range + track_rip = track.range_in_parent() + + # calculate real track end frame + track_end_frame = int( + track_rip.start_time.value + track_rip.duration.value) + + # check if the end of track is not lower then clip requirement + if clip_end_frame > track_end_frame: + # calculate diference duration + gap_duration = clip_end_frame - track_end_frame + # create rational time range for gap + otio_gap_range = otio.opentime.TimeRange( + start_time=otio.opentime.RationalTime( + float(0), + track_rip.start_time.rate + ), + duration=otio.opentime.RationalTime( + float(gap_duration), + track_rip.start_time.rate + ) + ) + # crate gap + otio_gap = otio.schema.Gap(source_range=otio_gap_range) + + # trim available clips from devined track as reviewable source + otio_review_clips = otio.algorithms.track_trimmed_to_range( + track, + otio_tl_range + ) + # add gap at the end if track end is shorter then needed + if otio_gap: + otio_review_clips.append(otio_gap) + + if otio_review_clips: + instance.data["families"] += ["review", "ftrack"] + instance.data["otioReviewClips"] = otio_review_clips + self.log.info( + "Creating review track: {}".format(otio_review_clips)) + + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) + self.log.debug( + "_ families: {}".format(instance.data["families"])) diff --git a/pype/plugins/global/publish/collect_otio_subset_resources.py b/pype/plugins/global/publish/collect_otio_subset_resources.py new file mode 100644 index 0000000000..d1fd47debd --- /dev/null +++ b/pype/plugins/global/publish/collect_otio_subset_resources.py @@ -0,0 +1,182 @@ +# TODO: this head doc string +""" +Requires: + instance -> otio_clip + +Provides: + instance -> otioReviewClips +""" +import os +import clique +import opentimelineio as otio +import pyblish.api +import pype + + +class CollectOcioSubsetResources(pyblish.api.InstancePlugin): + """Get Resources for a subset version""" + + label = "Collect OTIO Subset Resources" + order = pyblish.api.CollectorOrder - 0.57 + families = ["clip"] + hosts = ["resolve"] + + def process(self, instance): + if not instance.data.get("representations"): + instance.data["representations"] = list() + version_data = dict() + + # get basic variables + otio_clip = instance.data["otioClip"] + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + + # generate range in parent + otio_src_range = otio_clip.source_range + otio_avalable_range = otio_clip.available_range() + trimmed_media_range = pype.lib.trim_media_range( + otio_avalable_range, otio_src_range) + + # calculate wth handles + otio_src_range_handles = pype.lib.otio_range_with_handles( + otio_src_range, instance) + trimmed_media_range_h = pype.lib.trim_media_range( + otio_avalable_range, otio_src_range_handles) + + # frame start and end from media + s_frame_start, s_frame_end = pype.lib.otio_range_to_frame_range( + trimmed_media_range) + a_frame_start, a_frame_end = pype.lib.otio_range_to_frame_range( + otio_avalable_range) + a_frame_start_h, a_frame_end_h = pype.lib.otio_range_to_frame_range( + trimmed_media_range_h) + + # fix frame_start and frame_end frame to be in range of media + if a_frame_start_h < a_frame_start: + a_frame_start_h = a_frame_start + + if a_frame_end_h > a_frame_end: + a_frame_end_h = a_frame_end + + # count the difference for frame_start and frame_end + diff_start = s_frame_start - a_frame_start_h + diff_end = a_frame_end_h - s_frame_end + + # add to version data start and end range data + # for loader plugins to be correctly displayed and loaded + version_data.update({ + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": diff_start, + "handleEnd": diff_end, + "fps": otio_avalable_range.start_time.rate + }) + + # change frame_start and frame_end values + # for representation to be correctly renumbered in integrate_new + frame_start -= diff_start + frame_end += diff_end + + media_ref = otio_clip.media_reference + metadata = media_ref.metadata + + # check in two way if it is sequence + if hasattr(otio.schema, "ImageSequenceReference"): + # for OpenTimelineIO 0.13 and newer + if isinstance(media_ref, + otio.schema.ImageSequenceReference): + is_sequence = True + else: + # for OpenTimelineIO 0.12 and older + if metadata.get("padding"): + is_sequence = True + + self.log.info( + "frame_start-frame_end: {}-{}".format(frame_start, frame_end)) + + if is_sequence: + # file sequence way + if hasattr(media_ref, "target_url_base"): + self.staging_dir = media_ref.target_url_base + head = media_ref.name_prefix + tail = media_ref.name_suffix + collection = clique.Collection( + head=head, + tail=tail, + padding=media_ref.frame_zero_padding + ) + collection.indexes.update( + [i for i in range(a_frame_start_h, (a_frame_end_h + 1))]) + + self.log.debug(collection) + repre = self._create_representation( + frame_start, frame_end, collection=collection) + else: + # in case it is file sequence but not new OTIO schema + # `ImageSequenceReference` + path = media_ref.target_url + collection_data = pype.lib.make_sequence_collection( + path, trimmed_media_range, metadata) + self.staging_dir, collection = collection_data + + self.log.debug(collection) + repre = self._create_representation( + frame_start, frame_end, collection=collection) + else: + dirname, filename = os.path.split(media_ref.target_url) + self.staging_dir = dirname + + self.log.debug(path) + repre = self._create_representation( + frame_start, frame_end, file=filename) + + if repre: + instance.data["versionData"] = version_data + self.log.debug(">>>>>>>> version data {}".format(version_data)) + # add representation to instance data + instance.data["representations"].append(repre) + self.log.debug(">>>>>>>> {}".format(repre)) + + def _create_representation(self, start, end, **kwargs): + """ + Creating representation data. + + Args: + start (int): start frame + end (int): end frame + kwargs (dict): optional data + + Returns: + dict: representation data + """ + + # create default representation data + representation_data = { + "frameStart": start, + "frameEnd": end, + "stagingDir": self.staging_dir + } + + if kwargs.get("collection"): + collection = kwargs.get("collection") + files = [f for f in collection] + ext = collection.format("{tail}") + representation_data.update({ + "name": ext[1:], + "ext": ext[1:], + "files": files, + "frameStart": start, + "frameEnd": end, + }) + return representation_data + if kwargs.get("file"): + file = kwargs.get("file") + ext = os.path.splitext(file)[-1] + representation_data.update({ + "name": ext[1:], + "ext": ext[1:], + "files": file, + "frameStart": start, + "frameEnd": end, + }) + return representation_data diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 501162b6a6..c14dfba50a 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -6,6 +6,9 @@ import tempfile import pype.api import pyblish +from pype.lib import should_decompress, \ + get_decompress_dir, decompress +import shutil class ExtractBurnin(pype.api.Extractor): @@ -28,7 +31,9 @@ class ExtractBurnin(pype.api.Extractor): "premiere", "standalonepublisher", "harmony", - "fusion" + "fusion", + "aftereffects", + # "resolve" ] optional = True @@ -204,6 +209,26 @@ class ExtractBurnin(pype.api.Extractor): # Prepare paths and files for process. self.input_output_paths(new_repre, temp_data, filename_suffix) + decompressed_dir = '' + full_input_path = temp_data["full_input_path"] + do_decompress = should_decompress(full_input_path) + if do_decompress: + decompressed_dir = get_decompress_dir() + + decompress( + decompressed_dir, + full_input_path, + temp_data["frame_start"], + temp_data["frame_end"], + self.log + ) + + # input path changed, 'decompressed' added + input_file = os.path.basename(full_input_path) + temp_data["full_input_path"] = os.path.join( + decompressed_dir, + input_file) + # Data for burnin script script_data = { "input": temp_data["full_input_path"], @@ -240,7 +265,9 @@ class ExtractBurnin(pype.api.Extractor): self.log.debug("Executing: {}".format(subprcs_cmd)) # Run burnin script - pype.api.subprocess(subprcs_cmd, shell=True, logger=self.log) + pype.api.run_subprocess( + subprcs_cmd, shell=True, logger=self.log + ) # Remove the temporary json os.remove(temporary_json_filepath) @@ -263,6 +290,9 @@ class ExtractBurnin(pype.api.Extractor): os.remove(filepath) self.log.debug("Removed: \"{}\"".format(filepath)) + if do_decompress and os.path.exists(decompressed_dir): + shutil.rmtree(decompressed_dir) + def prepare_basic_data(self, instance): """Pick data from instance for processing and for burnin strings. diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 551e57796a..1c921a90d4 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -3,15 +3,21 @@ import os import pyblish.api import pype.api import pype.lib +from pype.lib import should_decompress, \ + get_decompress_dir, decompress +import shutil class ExtractJpegEXR(pyblish.api.InstancePlugin): """Create jpg thumbnail from sequence using ffmpeg""" label = "Extract Jpeg EXR" - hosts = ["shell", "fusion"] order = pyblish.api.ExtractorOrder - families = ["imagesequence", "render", "render2d", "source"] + families = [ + "imagesequence", "render", "render2d", + "source", "plate", "take" + ] + hosts = ["shell", "fusion", "resolve"] enabled = False # presetable attribute @@ -22,7 +28,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): if 'crypto' in instance.data['subset']: return - # ffmpeg doesn't support multipart exrs + do_decompress = False + # ffmpeg doesn't support multipart exrs, use oiiotool if available if instance.data.get("multipartExr") is True: return @@ -36,10 +43,6 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] - if instance.data.get("multipartExr"): - # ffmpeg doesn't support multipart exrs - return - for repre in representations: tags = repre.get("tags", []) self.log.debug(repre) @@ -50,7 +53,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): if not isinstance(repre['files'], (list, tuple)): input_file = repre['files'] else: - input_file = repre['files'][0] + file_index = int(float(len(repre['files'])) * 0.5) + input_file = repre['files'][file_index] stagingdir = os.path.normpath(repre.get("stagingDir")) @@ -60,6 +64,19 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): full_input_path = os.path.join(stagingdir, input_file) self.log.info("input {}".format(full_input_path)) + decompressed_dir = '' + do_decompress = should_decompress(full_input_path) + if do_decompress: + decompressed_dir = get_decompress_dir() + + decompress( + decompressed_dir, + full_input_path) + # input path changed, 'decompressed' added + full_input_path = os.path.join( + decompressed_dir, + input_file) + filename = os.path.splitext(input_file)[0] if not filename.endswith('.'): filename += "." @@ -93,7 +110,16 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): # run subprocess self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg, shell=True) + try: # temporary until oiiotool is supported cross platform + pype.api.run_subprocess( + subprocess_jpeg, shell=True, logger=self.log + ) + except RuntimeError as exp: + if "Compression" in str(exp): + self.log.debug("Unsupported compression on input files. " + + "Skipping!!!") + return + raise if "representations" not in instance.data: instance.data["representations"] = [] @@ -111,4 +137,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): self.log.debug("Adding: {}".format(representation)) representations_new.append(representation) + if do_decompress and os.path.exists(decompressed_dir): + shutil.rmtree(decompressed_dir) + instance.data["representations"] = representations_new diff --git a/pype/plugins/global/publish/extract_otio_file.py b/pype/plugins/global/publish/extract_otio_file.py new file mode 100644 index 0000000000..84932f07a8 --- /dev/null +++ b/pype/plugins/global/publish/extract_otio_file.py @@ -0,0 +1,41 @@ +import os +import pyblish.api +import pype.api +import opentimelineio as otio + + +class ExtractOTIOFile(pype.api.Extractor): + """ + Extractor export OTIO file + """ + + label = "Extract OTIO file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["workfile"] + hosts = ["resolve"] + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + name = instance.data["name"] + staging_dir = self.staging_dir(instance) + + otio_timeline = instance.context.data["otioTimeline"] + # create otio timeline representation + otio_file_name = name + ".otio" + otio_file_path = os.path.join(staging_dir, otio_file_name) + otio.adapters.write_to_file(otio_timeline, otio_file_path) + + representation_otio = { + 'name': "otio", + 'ext': "otio", + 'files': otio_file_name, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation_otio) + + self.log.info("Added OTIO file representation: {}".format( + representation_otio)) diff --git a/pype/plugins/global/publish/extract_otio_review.py b/pype/plugins/global/publish/extract_otio_review.py new file mode 100644 index 0000000000..396dcb08a8 --- /dev/null +++ b/pype/plugins/global/publish/extract_otio_review.py @@ -0,0 +1,426 @@ +""" +Requires: + instance -> handleStart + instance -> handleEnd + instance -> otioClip + instance -> otioReviewClips + +Optional: + instance -> workfileFrameStart + instance -> resolutionWidth + instance -> resolutionHeight + +Provides: + instance -> otioReviewClips +""" + +import os +import clique +import opentimelineio as otio +from pyblish import api +import pype + + +class ExtractOTIOReview(pype.api.Extractor): + """ + Extract OTIO timeline into one concuted image sequence file. + + The `otioReviewClip` is holding trimmed range of clips relative to + the `otioClip`. Handles are added during looping by available list + of Gap and clips in the track. Handle start (head) is added before + first Gap or Clip and Handle end (tail) is added at the end of last + Clip or Gap. In case there is missing source material after the + handles addition Gap will be added. At the end all Gaps are converted + to black frames and available material is converted to image sequence + frames. At the end representation is created and added to the instance. + + At the moment only image sequence output is supported + + """ + + order = api.ExtractorOrder - 0.45 + label = "Extract OTIO review" + hosts = ["resolve"] + families = ["review"] + + # plugin default attributes + temp_file_head = "tempFile." + to_width = 1280 + to_height = 720 + output_ext = ".jpg" + + def process(self, instance): + # TODO: convert resulting image sequence to mp4 + # TODO: add oudio ouput to the mp4 if audio in review is on. + + # get otio clip and other time info from instance clip + # TODO: what if handles are different in `versionData`? + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + otio_review_clips = instance.data["otioReviewClips"] + + # add plugin wide attributes + self.representation_files = list() + self.used_frames = list() + self.workfile_start = int(instance.data.get( + "workfileFrameStart", 1001)) - handle_start + self.padding = len(str(self.workfile_start)) + self.used_frames.append(self.workfile_start) + self.to_width = instance.data.get( + "resolutionWidth") or self.to_width + self.to_height = instance.data.get( + "resolutionHeight") or self.to_height + + # skip instance if no reviewable data available + if (not isinstance(otio_review_clips[0], otio.schema.Clip)) \ + and (len(otio_review_clips) == 1): + self.log.warning( + "Instance `{}` has nothing to process".format(instance)) + return + else: + self.staging_dir = self.staging_dir(instance) + if not instance.data.get("representations"): + instance.data["representations"] = list() + + # loop available clips in otio track + for index, r_otio_cl in enumerate(otio_review_clips): + # QUESTION: what if transition on clip? + + # get frame range values + src_range = r_otio_cl.source_range + start = src_range.start_time.value + duration = src_range.duration.value + available_range = None + self.actual_fps = src_range.duration.rate + + # add available range only if not gap + if isinstance(r_otio_cl, otio.schema.Clip): + available_range = r_otio_cl.available_range() + self.actual_fps = available_range.duration.rate + + # reframing handles conditions + if (len(otio_review_clips) > 1) and (index == 0): + # more clips | first clip reframing with handle + start -= handle_start + duration += handle_start + elif len(otio_review_clips) > 1 \ + and (index == len(otio_review_clips) - 1): + # more clips | last clip reframing with handle + duration += handle_end + elif len(otio_review_clips) == 1: + # one clip | add both handles + start -= handle_start + duration += (handle_start + handle_end) + + if available_range: + available_range = self._trim_available_range( + available_range, start, duration, self.actual_fps) + + # process all track items of the track + if isinstance(r_otio_cl, otio.schema.Clip): + # process Clip + media_ref = r_otio_cl.media_reference + metadata = media_ref.metadata + is_sequence = None + + # check in two way if it is sequence + if hasattr(otio.schema, "ImageSequenceReference"): + # for OpenTimelineIO 0.13 and newer + if isinstance(media_ref, + otio.schema.ImageSequenceReference): + is_sequence = True + else: + # for OpenTimelineIO 0.12 and older + if metadata.get("padding"): + is_sequence = True + + if is_sequence: + # file sequence way + if hasattr(media_ref, "target_url_base"): + dirname = media_ref.target_url_base + head = media_ref.name_prefix + tail = media_ref.name_suffix + first, last = pype.lib.otio_range_to_frame_range( + available_range) + collection = clique.Collection( + head=head, + tail=tail, + padding=media_ref.frame_zero_padding + ) + collection.indexes.update( + [i for i in range(first, (last + 1))]) + # render segment + self._render_seqment( + sequence=[dirname, collection]) + # generate used frames + self._generate_used_frames( + len(collection.indexes)) + else: + # in case it is file sequence but not new OTIO schema + # `ImageSequenceReference` + path = media_ref.target_url + collection_data = pype.lib.make_sequence_collection( + path, available_range, metadata) + dir_path, collection = collection_data + + # render segment + self._render_seqment( + sequence=[dir_path, collection]) + # generate used frames + self._generate_used_frames( + len(collection.indexes)) + else: + # single video file way + path = media_ref.target_url + # render video file to sequence + self._render_seqment( + video=[path, available_range]) + # generate used frames + self._generate_used_frames( + available_range.duration.value) + # QUESTION: what if nested track composition is in place? + else: + # at last process a Gap + self._render_seqment(gap=duration) + # generate used frames + self._generate_used_frames(duration) + + # creating and registering representation + representation = self._create_representation(start, duration) + instance.data["representations"].append(representation) + self.log.info(f"Adding representation: {representation}") + + def _create_representation(self, start, duration): + """ + Creating representation data. + + Args: + start (int): start frame + duration (int): duration frames + + Returns: + dict: representation data + """ + + end = start + duration + + # create default representation data + representation_data = { + "frameStart": start, + "frameEnd": end, + "stagingDir": self.staging_dir, + "tags": ["review", "ftrackreview", "delete"] + } + + collection = clique.Collection( + self.temp_file_head, + tail=self.output_ext, + padding=self.padding, + indexes=set(self.used_frames) + ) + start = min(collection.indexes) + end = max(collection.indexes) + + files = [f for f in collection] + ext = collection.format("{tail}") + representation_data.update({ + "name": ext[1:], + "ext": ext[1:], + "files": files, + "frameStart": start, + "frameEnd": end, + }) + return representation_data + + def _trim_available_range(self, avl_range, start, duration, fps): + """ + Trim available media range to source range. + + If missing media range is detected it will convert it into + black frames gaps. + + Args: + avl_range (otio.time.TimeRange): media available time range + start (int): start frame + duration (int): duration frames + fps (float): frame rate + + Returns: + otio.time.TimeRange: trimmed available range + """ + avl_start = int(avl_range.start_time.value) + src_start = int(avl_start + start) + avl_durtation = int(avl_range.duration.value) + + # if media start is les then clip requires + if src_start < avl_start: + # calculate gap + gap_duration = avl_start - src_start + + # create gap data to disk + self._render_seqment(gap=gap_duration) + # generate used frames + self._generate_used_frames(gap_duration) + + # fix start and end to correct values + start = 0 + duration -= gap_duration + + # if media duration is shorter then clip requirement + if duration > avl_durtation: + # calculate gap + gap_start = int(src_start + avl_durtation) + gap_end = int(src_start + duration) + gap_duration = gap_end - gap_start + + # create gap data to disk + self._render_seqment(gap=gap_duration, end_offset=avl_durtation) + # generate used frames + self._generate_used_frames(gap_duration, end_offset=avl_durtation) + + # fix duration lenght + duration = avl_durtation + + # return correct trimmed range + return pype.lib.trim_media_range( + avl_range, pype.lib.range_from_frames(start, duration, fps) + ) + + def _render_seqment(self, sequence=None, + video=None, gap=None, end_offset=None): + """ + Render seqment into image sequence frames. + + Using ffmpeg to convert compatible video and image source + to defined image sequence format. + + Args: + sequence (list): input dir path string, collection object in list + video (list)[optional]: video_path string, otio_range in list + gap (int)[optional]: gap duration + end_offset (int)[optional]: offset gap frame start in frames + + Returns: + otio.time.TimeRange: trimmed available range + """ + # get rendering app path + ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg") + + # create path and frame start to destination + output_path, out_frame_start = self._get_ffmpeg_output() + + if end_offset: + out_frame_start += end_offset + + # start command list + command = [ffmpeg_path] + + if sequence: + input_dir, collection = sequence + in_frame_start = min(collection.indexes) + + # converting image sequence to image sequence + input_file = collection.format("{head}{padding}{tail}") + input_path = os.path.join(input_dir, input_file) + + # form command for rendering gap files + command.extend([ + "-start_number {}".format(in_frame_start), + "-i {}".format(input_path) + ]) + + elif video: + video_path, otio_range = video + frame_start = otio_range.start_time.value + input_fps = otio_range.start_time.rate + frame_duration = otio_range.duration.value + sec_start = pype.lib.frames_to_secons(frame_start, input_fps) + sec_duration = pype.lib.frames_to_secons(frame_duration, input_fps) + + # form command for rendering gap files + command.extend([ + "-ss {}".format(sec_start), + "-t {}".format(sec_duration), + "-i {}".format(video_path) + ]) + + elif gap: + sec_duration = pype.lib.frames_to_secons( + gap, self.actual_fps) + + # form command for rendering gap files + command.extend([ + "-t {} -r {}".format(sec_duration, self.actual_fps), + "-f lavfi", + "-i color=c=black:s={}x{}".format(self.to_width, + self.to_height), + "-tune stillimage" + ]) + + # add output attributes + command.extend([ + "-start_number {}".format(out_frame_start), + output_path + ]) + # execute + self.log.debug("Executing: {}".format(" ".join(command))) + output = pype.api.subprocess(" ".join(command), shell=True) + self.log.debug("Output: {}".format(output)) + + def _generate_used_frames(self, duration, end_offset=None): + """ + Generating used frames into plugin argument `used_frames`. + + The argument `used_frames` is used for checking next available + frame to start with during rendering sequence segments. + + Args: + duration (int): duration of frames needed to be generated + end_offset (int)[optional]: in case frames need to be offseted + + """ + + padding = "{{:0{}d}}".format(self.padding) + if end_offset: + new_frames = list() + start_frame = self.used_frames[-1] + for index in range((end_offset + 1), + (int(end_offset + duration) + 1)): + seq_number = padding.format(start_frame + index) + self.log.debug( + f"index: `{index}` | seq_number: `{seq_number}`") + new_frames.append(int(seq_number)) + new_frames += self.used_frames + self.used_frames = new_frames + else: + for _i in range(1, (int(duration) + 1)): + if self.used_frames[-1] == self.workfile_start: + seq_number = padding.format(self.used_frames[-1]) + self.workfile_start -= 1 + else: + seq_number = padding.format(self.used_frames[-1] + 1) + self.used_frames.append(int(seq_number)) + + def _get_ffmpeg_output(self): + """ + Returning ffmpeg output command arguments. + + Returns: + str: output_path is path for image sequence output + int: out_frame_start is starting sequence frame + + """ + output_file = "{}{}{}".format( + self.temp_file_head, + "%0{}d".format(self.padding), + self.output_ext + ) + # create path to destination + output_path = os.path.join(self.staging_dir, output_file) + + # generate frame start + out_frame_start = self.used_frames[-1] + 1 + if self.used_frames[-1] == self.workfile_start: + out_frame_start = self.used_frames[-1] + + return output_path, out_frame_start diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index aa8d8accb5..5414ae5289 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -6,6 +6,8 @@ import pyblish.api import clique import pype.api import pype.lib +from pype.lib import should_decompress, \ + get_decompress_dir, decompress class ExtractReview(pyblish.api.InstancePlugin): @@ -14,7 +16,7 @@ class ExtractReview(pyblish.api.InstancePlugin): Compulsory attribute of representation is tags list with "review", otherwise the representation is ignored. - All new represetnations are created and encoded by ffmpeg following + All new representations are created and encoded by ffmpeg following presets found in `pype-config/presets/plugins/global/ publish.json:ExtractReview:outputs`. """ @@ -31,7 +33,8 @@ class ExtractReview(pyblish.api.InstancePlugin): "harmony", "standalonepublisher", "fusion", - "tvpaint" + "tvpaint", + "resolve" ] # Supported extensions @@ -188,15 +191,23 @@ class ExtractReview(pyblish.api.InstancePlugin): temp_data = self.prepare_temp_data(instance, repre, output_def) - ffmpeg_args = self._ffmpeg_arguments( - output_def, instance, new_repre, temp_data - ) + try: # temporary until oiiotool is supported cross platform + ffmpeg_args = self._ffmpeg_arguments( + output_def, instance, new_repre, temp_data + ) + except ZeroDivisionError: + if 'exr' in temp_data["origin_repre"]["ext"]: + self.log.debug("Unsupported compression on input " + + "files. Skipping!!!") + return + raise + subprcs_cmd = " ".join(ffmpeg_args) # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) - pype.api.subprocess( + pype.api.run_subprocess( subprcs_cmd, shell=True, logger=self.log ) @@ -318,9 +329,9 @@ class ExtractReview(pyblish.api.InstancePlugin): Args: output_def (dict): Currently processed output definition. instance (Instance): Currently processed instance. - new_repre (dict): Reprensetation representing output of this + new_repre (dict): Representation representing output of this process. - temp_data (dict): Base data for successfull process. + temp_data (dict): Base data for successful process. """ # Get FFmpeg arguments from profile presets @@ -331,9 +342,35 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpeg_video_filters = out_def_ffmpeg_args.get("video_filters") or [] ffmpeg_audio_filters = out_def_ffmpeg_args.get("audio_filters") or [] + if isinstance(new_repre['files'], list): + input_files_urls = [os.path.join(new_repre["stagingDir"], f) for f + in new_repre['files']] + test_path = input_files_urls[0] + else: + test_path = os.path.join( + new_repre["stagingDir"], new_repre['files']) + do_decompress = should_decompress(test_path) + + if do_decompress: + # change stagingDir, decompress first + # calculate all paths with modified directory, used on too many + # places + # will be purged by cleanup.py automatically + orig_staging_dir = new_repre["stagingDir"] + new_repre["stagingDir"] = get_decompress_dir() + # Prepare input and output filepaths self.input_output_paths(new_repre, output_def, temp_data) + if do_decompress: + input_file = temp_data["full_input_path"].\ + replace(new_repre["stagingDir"], orig_staging_dir) + + decompress(new_repre["stagingDir"], input_file, + temp_data["frame_start"], + temp_data["frame_end"], + self.log) + # Set output frames len to 1 when ouput is single image if ( temp_data["output_ext_is_image"] @@ -930,7 +967,7 @@ class ExtractReview(pyblish.api.InstancePlugin): return regexes def validate_value_by_regexes(self, value, in_list): - """Validates in any regexe from list match entered value. + """Validates in any regex from list match entered value. Args: in_list (list): List with regexes. @@ -955,9 +992,9 @@ class ExtractReview(pyblish.api.InstancePlugin): def profile_exclusion(self, matching_profiles): """Find out most matching profile byt host, task and family match. - Profiles are selectivelly filtered. Each profile should have + Profiles are selectively filtered. Each profile should have "__value__" key with list of booleans. Each boolean represents - existence of filter for specific key (host, taks, family). + existence of filter for specific key (host, tasks, family). Profiles are looped in sequence. In each sequence are split into true_list and false_list. For next sequence loop are used profiles in true_list if there are any profiles else false_list is used. @@ -1036,7 +1073,7 @@ class ExtractReview(pyblish.api.InstancePlugin): highest_profile_points = -1 # Each profile get 1 point for each matching filter. Profile with most - # points is returnd. For cases when more than one profile will match + # points is returned. For cases when more than one profile will match # are also stored ordered lists of matching values. for profile in self.profiles: profile_points = 0 @@ -1592,8 +1629,9 @@ class ExtractReview(pyblish.api.InstancePlugin): # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) - output = pype.api.subprocess(subprcs_cmd, shell=True) - self.log.debug("Output: {}".format(output)) + pype.api.run_subprocess( + subprcs_cmd, shell=True, logger=self.log + ) # create representation data repre_new.update({ @@ -1648,7 +1686,7 @@ class ExtractReview(pyblish.api.InstancePlugin): def add_video_filter_args(self, args, inserting_arg): """ - Fixing video filter argumets to be one long string + Fixing video filter arguments to be one long string Args: args (list): list of string arguments diff --git a/pype/plugins/global/publish/extract_review_slate.py b/pype/plugins/global/publish/extract_review_slate.py index 5cf632406c..65930ea8fa 100644 --- a/pype/plugins/global/publish/extract_review_slate.py +++ b/pype/plugins/global/publish/extract_review_slate.py @@ -186,8 +186,9 @@ class ExtractReviewSlate(pype.api.Extractor): # run slate generation subprocess self.log.debug("Slate Executing: {}".format(slate_subprcs_cmd)) - slate_output = pype.api.subprocess(slate_subprcs_cmd, shell=True) - self.log.debug("Slate Output: {}".format(slate_output)) + pype.api.run_subprocess( + slate_subprcs_cmd, shell=True, logger=self.log + ) # create ffmpeg concat text file path conc_text_file = input_file.replace(ext, "") + "_concat" + ".txt" @@ -221,8 +222,9 @@ class ExtractReviewSlate(pype.api.Extractor): # ffmpeg concat subprocess self.log.debug("Executing concat: {}".format(concat_subprcs_cmd)) - concat_output = pype.api.subprocess(concat_subprcs_cmd, shell=True) - self.log.debug("Output concat: {}".format(concat_output)) + pype.api.run_subprocess( + concat_subprcs_cmd, shell=True, logger=self.log + ) self.log.debug("__ repre[tags]: {}".format(repre["tags"])) repre_update = { diff --git a/pype/plugins/global/publish/extract_scanline_exr.py b/pype/plugins/global/publish/extract_scanline_exr.py index 9c3073d61d..a801baa17c 100644 --- a/pype/plugins/global/publish/extract_scanline_exr.py +++ b/pype/plugins/global/publish/extract_scanline_exr.py @@ -65,7 +65,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): subprocess_exr = " ".join(oiio_cmd) self.log.info(f"running: {subprocess_exr}") - pype.api.subprocess(subprocess_exr) + pype.api.run_subprocess(subprocess_exr, logger=self.log) # raise error if there is no ouptput if not os.path.exists(os.path.join(stagingdir, original_name)): diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 26e5fff699..2e5a9e5b94 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -15,7 +15,7 @@ from avalon import io from avalon.vendor import filelink import pype.api from datetime import datetime -from pype.modules import ModulesManager +# from pype.modules import ModulesManager # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -329,6 +329,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if repre.get("outputName"): template_data["output"] = repre['outputName'] + template_data["representation"] = repre["name"] + + ext = repre["ext"] + if ext.startswith("."): + self.log.warning(( + "Implementaion warning: <\"{}\">" + " Representation's extension stored under \"ext\" key " + " started with dot (\"{}\")." + ).format(repre["name"], ext)) + ext = ext[1:] + repre["ext"] = ext + template_data["ext"] = ext + template = os.path.normpath( anatomy.templates[template_name]["path"]) @@ -355,7 +368,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): test_dest_files = list() for i in [1, 2]: - template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] @@ -376,6 +388,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = None + # TODO use frame padding from right template group if repre.get("frameStart") is not None: frame_start_padding = int( anatomy.templates["render"].get( @@ -411,7 +424,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst = "{0}{1}{2}".format( dst_head, dst_padding, - dst_tail).replace("..", ".") + dst_tail + ) self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) @@ -431,7 +445,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_head, dst_start_frame, dst_tail - ).replace("..", ".") + ) repre['published_path'] = dst else: @@ -449,13 +463,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "Given file name is a full path" ) - template_data["representation"] = repre['ext'] - src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values - dst = os.path.normpath(template_filled).replace("..", ".") + dst = os.path.normpath(template_filled) instance.data["transfers"].append([src, dst]) @@ -933,15 +945,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): remote_site = None sync_server_presets = None - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] - try: - if sync_server.enabled: - local_site, remote_site = sync_server.get_sites_for_project() - except ValueError: - log.debug(("There are not set presets for SyncServer." - " No credentials provided, no synching possible"). - format(str(sync_server_presets))) + # manager = ModulesManager() + # sync_server = manager.modules_by_name["sync_server"] + # try: + # if sync_server.enabled: + # local_site, remote_site = sync_server.get_sites_for_project() + # except ValueError: + # log.debug(("There are not set presets for SyncServer." + # " No credentials provided, no synching possible"). + # format(str(sync_server_presets))) rec = { "_id": io.ObjectId(), diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 97122d2c39..23d2da5f4b 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -95,7 +95,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): template_data.update({ "_id": str(thumbnail_id), "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), - "ext": file_extension, + "ext": file_extension[1:], "thumbnail_type": "thumbnail" }) diff --git a/pype/plugins/hiero/publish/extract_review_cutup.py b/pype/plugins/hiero/publish/extract_review_cutup.py index 87e584d0b0..ec3a9ec17e 100644 --- a/pype/plugins/hiero/publish/extract_review_cutup.py +++ b/pype/plugins/hiero/publish/extract_review_cutup.py @@ -142,7 +142,7 @@ class ExtractReviewCutUp(pype.api.Extractor): ).format(**locals()) self.log.debug("ffprob_cmd: {}".format(ffprob_cmd)) - audio_check_output = pype.api.subprocess(ffprob_cmd) + audio_check_output = pype.api.run_subprocess(ffprob_cmd) self.log.debug( "audio_check_output: {}".format(audio_check_output)) @@ -177,7 +177,7 @@ class ExtractReviewCutUp(pype.api.Extractor): # try to get video native resolution data try: - resolution_output = pype.api.subprocess(( + resolution_output = pype.api.run_subprocess(( "\"{ffprobe_path}\" -i \"{full_input_path}\"" " -v error " "-select_streams v:0 -show_entries " @@ -290,8 +290,7 @@ class ExtractReviewCutUp(pype.api.Extractor): # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) - output = pype.api.subprocess(subprcs_cmd) - self.log.debug("Output: {}".format(output)) + pype.api.run_subprocess(subprcs_cmd, logger=self.log) repre_new = { "files": new_files, diff --git a/pype/plugins/houdini/publish/extract_alembic.py b/pype/plugins/houdini/publish/extract_alembic.py index 4910c45aca..a71b85c529 100644 --- a/pype/plugins/houdini/publish/extract_alembic.py +++ b/pype/plugins/houdini/publish/extract_alembic.py @@ -42,7 +42,7 @@ class ExtractAlembic(pype.api.Extractor): representation = { 'name': 'abc', - 'ext': '.abc', + 'ext': 'abc', 'files': file_name, "stagingDir": staging_dir, } diff --git a/pype/plugins/houdini/publish/extract_vdb_cache.py b/pype/plugins/houdini/publish/extract_vdb_cache.py index 29686ef2fd..c56a4dd73a 100644 --- a/pype/plugins/houdini/publish/extract_vdb_cache.py +++ b/pype/plugins/houdini/publish/extract_vdb_cache.py @@ -42,7 +42,7 @@ class ExtractVDBCache(pype.api.Extractor): representation = { 'name': 'mov', - 'ext': '.mov', + 'ext': 'mov', 'files': output, "stagingDir": staging_dir, } diff --git a/pype/plugins/launcher/actions/AssetCreator.py b/pype/plugins/launcher/actions/AssetCreator.py index 9787aae002..5e845fdb5e 100644 --- a/pype/plugins/launcher/actions/AssetCreator.py +++ b/pype/plugins/launcher/actions/AssetCreator.py @@ -2,7 +2,7 @@ from avalon import api, lib from pype.api import Logger -log = Logger().get_logger(__name__, "asset_creator") +log = Logger().get_logger(__name__) class AssetCreator(api.Action): diff --git a/pype/plugins/maya/create/create_render.py b/pype/plugins/maya/create/create_render.py index bdd237a54e..2b0b0e19f7 100644 --- a/pype/plugins/maya/create/create_render.py +++ b/pype/plugins/maya/create/create_render.py @@ -127,18 +127,18 @@ class CreateRender(avalon.maya.Creator): system_settings = get_system_settings()["modules"] + deadline_enabled = system_settings["deadline"]["enabled"] + muster_enabled = system_settings["muster"]["enabled"] deadline_url = system_settings["deadline"]["DEADLINE_REST_URL"] muster_url = system_settings["muster"]["MUSTER_REST_URL"] - if deadline_url and muster_url: + if deadline_enabled and muster_enabled: self.log.error( "Both Deadline and Muster are enabled. " "Cannot support both." ) raise RuntimeError("Both Deadline and Muster are enabled") - if deadline_url is None: - self.log.warning("Deadline REST API url not found.") - else: + if deadline_enabled: argument = "{}/api/pools?NamesOnly=true".format(deadline_url) try: response = self._requests_get(argument) @@ -155,9 +155,7 @@ class CreateRender(avalon.maya.Creator): # set any secondary pools self.data["secondaryPool"] = ["-"] + pools - if muster_url is None: - self.log.warning("Muster REST API URL not found.") - else: + if muster_enabled: self.log.info(">>> Loading Muster credentials ...") self._load_credentials() self.log.info(">>> Getting pools ...") @@ -193,6 +191,7 @@ class CreateRender(avalon.maya.Creator): self.data["tilesX"] = 2 self.data["tilesY"] = 2 self.data["convertToScanline"] = False + self.data["vrayUseReferencedAovs"] = False # Disable for now as this feature is not working yet # self.data["assScene"] = False diff --git a/pype/plugins/maya/publish/collect_render.py b/pype/plugins/maya/publish/collect_render.py index 3dde3b1592..0853473120 100644 --- a/pype/plugins/maya/publish/collect_render.py +++ b/pype/plugins/maya/publish/collect_render.py @@ -149,7 +149,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): # return all expected files for all cameras and aovs in given # frame range - ef = ExpectedFiles() + ef = ExpectedFiles(render_instance) exp_files = ef.get(renderer, layer_name) self.log.info("multipart: {}".format(ef.multipart)) assert exp_files, "no file names were generated, this is bug" @@ -248,7 +248,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "tilesX": render_instance.data.get("tilesX") or 2, "tilesY": render_instance.data.get("tilesY") or 2, "priority": render_instance.data.get("priority"), - "convertToScanline": render_instance.data.get("convertToScanline") or False # noqa: E501 + "convertToScanline": render_instance.data.get("convertToScanline") or False, # noqa: E501 + "vrayUseReferencedAovs": render_instance.data.get("vrayUseReferencedAovs") or False # noqa: E501 } if self.sync_workfile_version: diff --git a/pype/plugins/maya/publish/validate_vray_referenced_aovs.py b/pype/plugins/maya/publish/validate_vray_referenced_aovs.py new file mode 100644 index 0000000000..120677021d --- /dev/null +++ b/pype/plugins/maya/publish/validate_vray_referenced_aovs.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +"""Validate if there are AOVs pulled from references.""" +import pyblish.api +import types +from maya import cmds + +import pype.hosts.maya.action + + +class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): + """Validate whether the V-Ray Render Elements (AOVs) include references. + + This will check if there are AOVs pulled from references. If + `Vray Use Referenced Aovs` is checked on render instance, u must add those + manually to Render Elements as Pype will expect them to be rendered. + + """ + + order = pyblish.api.ValidatorOrder + label = 'VRay Referenced AOVs' + hosts = ['maya'] + families = ['renderlayer'] + actions = [pype.api.RepairContextAction] + + def process(self, instance): + """Plugin main entry point.""" + if instance.data.get("renderer") != "vray": + # If not V-Ray ignore.. + return + + ref_aovs = cmds.ls( + type=["VRayRenderElement", "VRayRenderElementSet"], + referencedNodes=True) + ref_aovs_enabled = ValidateVrayReferencedAOVs.maya_is_true( + cmds.getAttr("vraySettings.relements_usereferenced")) + + if not instance.data.get("vrayUseReferencedAovs"): + if ref_aovs_enabled and ref_aovs: + self.log.warning(( + "Referenced AOVs are enabled in Vray " + "Render Settings and are detected in scene, but " + "Pype render instance option for referenced AOVs is " + "disabled. Those AOVs will be rendered but not published " + "by Pype." + )) + self.log.warning(", ".join(ref_aovs)) + else: + if not ref_aovs: + self.log.warning(( + "Use of referenced AOVs enabled but there are none " + "in the scene." + )) + if not ref_aovs_enabled: + self.log.error(( + "'Use referenced' not enabled in Vray Render Settings." + )) + raise AssertionError("Invalid render settings") + + @classmethod + def repair(cls, context): + """Repair action.""" + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + cmds.setAttr("{}.relements_usereferenced".format(node), True) + + @staticmethod + def maya_is_true(attr_val): + """Whether a Maya attr evaluates to True. + + When querying an attribute value from an ambiguous object the + Maya API will return a list of values, which need to be properly + handled to evaluate properly. + + Args: + attr_val (mixed): Maya attribute to be evaluated as bool. + + Returns: + bool: cast Maya attribute to Pythons boolean value. + + """ + if isinstance(attr_val, types.BooleanType): + return attr_val + elif isinstance(attr_val, (types.ListType, types.GeneratorType)): + return any(attr_val) + else: + return bool(attr_val) diff --git a/pype/plugins/nuke/load/actions.py b/pype/plugins/nuke/load/actions.py index 96f5f3b8c4..d0c95b6b16 100644 --- a/pype/plugins/nuke/load/actions.py +++ b/pype/plugins/nuke/load/actions.py @@ -5,7 +5,7 @@ from avalon import api from pype.api import Logger -log = Logger().get_logger(__name__, "nuke") +log = Logger().get_logger(__name__) class SetFrameRangeLoader(api.Loader): diff --git a/pype/plugins/photoshop/publish/extract_review.py b/pype/plugins/photoshop/publish/extract_review.py index 2753cbf366..aa9151bf6d 100644 --- a/pype/plugins/photoshop/publish/extract_review.py +++ b/pype/plugins/photoshop/publish/extract_review.py @@ -60,7 +60,7 @@ class ExtractReview(pype.api.Extractor): "-vframes", "1", thumbnail_path ] - output = pype.lib._subprocess(args) + output = pype.lib.run_subprocess(args) instance.data["representations"].append({ "name": "thumbnail", @@ -78,7 +78,7 @@ class ExtractReview(pype.api.Extractor): "-vframes", "1", mov_path ] - output = pype.lib._subprocess(args) + output = pype.lib.run_subprocess(args) self.log.debug(output) instance.data["representations"].append({ "name": "mov", diff --git a/pype/plugins/resolve/_publish/collect_clip_resolution.py b/pype/plugins/resolve/_publish/collect_clip_resolution.py new file mode 100644 index 0000000000..3bea68c677 --- /dev/null +++ b/pype/plugins/resolve/_publish/collect_clip_resolution.py @@ -0,0 +1,38 @@ +import pyblish.api + + +class CollectClipResolution(pyblish.api.InstancePlugin): + """Collect clip geometry resolution""" + + order = pyblish.api.CollectorOrder - 0.1 + label = "Collect Clip Resoluton" + hosts = ["resolve"] + families = ["clip"] + + def process(self, instance): + sequence = instance.context.data['activeSequence'] + item = instance.data["item"] + source_resolution = instance.data.get("sourceResolution", None) + + resolution_width = int(sequence.format().width()) + resolution_height = int(sequence.format().height()) + pixel_aspect = sequence.format().pixelAspect() + + # source exception + if source_resolution: + resolution_width = int(item.source().mediaSource().width()) + resolution_height = int(item.source().mediaSource().height()) + pixel_aspect = item.source().mediaSource().pixelAspect() + + resolution_data = { + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect + } + # add to instacne data + instance.data.update(resolution_data) + + self.log.info("Resolution of instance '{}' is: {}".format( + instance, + resolution_data + )) diff --git a/pype/plugins/resolve/publish/collect_clips.py b/pype/plugins/resolve/_publish/collect_clips.py similarity index 100% rename from pype/plugins/resolve/publish/collect_clips.py rename to pype/plugins/resolve/_publish/collect_clips.py diff --git a/pype/plugins/resolve/create/create_shot_clip.py b/pype/plugins/resolve/create/create_shot_clip.py index bd2e013fac..19e613ee7a 100644 --- a/pype/plugins/resolve/create/create_shot_clip.py +++ b/pype/plugins/resolve/create/create_shot_clip.py @@ -1,4 +1,4 @@ -from pprint import pformat +# from pprint import pformat from pype.hosts import resolve from pype.hosts.resolve import lib @@ -6,45 +6,216 @@ from pype.hosts.resolve import lib class CreateShotClip(resolve.Creator): """Publishable clip""" - label = "Shot" + label = "Create Publishable Clip" family = "clip" icon = "film" defaults = ["Main"] - gui_name = "Pype sequencial rename with hirerarchy" - gui_info = "Define sequencial rename and fill hierarchy data." + gui_tracks = resolve.get_video_track_names() + gui_name = "Pype publish attributes creator" + gui_info = "Define sequential rename and fill hierarchy data." gui_inputs = { - "clipName": "{episode}{sequence}{shot}", - "hierarchy": "{folder}/{sequence}/{shot}", - "countFrom": 10, - "steps": 10, + "renameHierarchy": { + "type": "section", + "label": "Shot Hierarchy And Rename Settings", + "target": "ui", + "order": 0, + "value": { + "hierarchy": { + "value": "{folder}/{sequence}", + "type": "QLineEdit", + "label": "Shot Parent Hierarchy", + "target": "tag", + "toolTip": "Parents folder for shot root folder, Template filled with `Hierarchy Data` section", # noqa + "order": 0}, + "clipRename": { + "value": False, + "type": "QCheckBox", + "label": "Rename clips", + "target": "ui", + "toolTip": "Renaming selected clips on fly", # noqa + "order": 1}, + "clipName": { + "value": "{sequence}{shot}", + "type": "QLineEdit", + "label": "Clip Name Template", + "target": "ui", + "toolTip": "template for creating shot namespaused for renaming (use rename: on)", # noqa + "order": 2}, + "countFrom": { + "value": 10, + "type": "QSpinBox", + "label": "Count sequence from", + "target": "ui", + "toolTip": "Set when the sequence number stafrom", # noqa + "order": 3}, + "countSteps": { + "value": 10, + "type": "QSpinBox", + "label": "Stepping number", + "target": "ui", + "toolTip": "What number is adding every new step", # noqa + "order": 4}, + } + }, "hierarchyData": { - "folder": "shots", - "shot": "sh####", - "track": "{track}", - "sequence": "sc010", - "episode": "ep01" + "type": "dict", + "label": "Shot Template Keywords", + "target": "tag", + "order": 1, + "value": { + "folder": { + "value": "shots", + "type": "QLineEdit", + "label": "{folder}", + "target": "tag", + "toolTip": "Name of folder used for root of generated shots.\nUsable tokens:\n\t{_clip_}: name of used clip\n\t{_track_}: name of parent track layer\n\t{_sequence_}: name of parent sequence (timeline)", # noqa + "order": 0}, + "episode": { + "value": "ep01", + "type": "QLineEdit", + "label": "{episode}", + "target": "tag", + "toolTip": "Name of episode.\nUsable tokens:\n\t{_clip_}: name of used clip\n\t{_track_}: name of parent track layer\n\t{_sequence_}: name of parent sequence (timeline)", # noqa + "order": 1}, + "sequence": { + "value": "sq01", + "type": "QLineEdit", + "label": "{sequence}", + "target": "tag", + "toolTip": "Name of sequence of shots.\nUsable tokens:\n\t{_clip_}: name of used clip\n\t{_track_}: name of parent track layer\n\t{_sequence_}: name of parent sequence (timeline)", # noqa + "order": 2}, + "track": { + "value": "{_track_}", + "type": "QLineEdit", + "label": "{track}", + "target": "tag", + "toolTip": "Name of sequence of shots.\nUsable tokens:\n\t{_clip_}: name of used clip\n\t{_track_}: name of parent track layer\n\t{_sequence_}: name of parent sequence (timeline)", # noqa + "order": 3}, + "shot": { + "value": "sh###", + "type": "QLineEdit", + "label": "{shot}", + "target": "tag", + "toolTip": "Name of shot. `#` is converted to paded number. \nAlso could be used with usable tokens:\n\t{_clip_}: name of used clip\n\t{_track_}: name of parent track layer\n\t{_sequence_}: name of parent sequence (timeline)", # noqa + "order": 4} + } + }, + "verticalSync": { + "type": "section", + "label": "Vertical Synchronization Of Attributes", + "target": "ui", + "order": 2, + "value": { + "vSyncOn": { + "value": True, + "type": "QCheckBox", + "label": "Enable Vertical Sync", + "target": "ui", + "toolTip": "Switch on if you want clips above each other to share its attributes", # noqa + "order": 0}, + "vSyncTrack": { + "value": gui_tracks, # noqa + "type": "QComboBox", + "label": "Master track", + "target": "ui", + "toolTip": "Select driving track name which should be mastering all others", # noqa + "order": 1} + } + }, + "publishSettings": { + "type": "section", + "label": "Publish Settings", + "target": "ui", + "order": 3, + "value": { + "subsetName": { + "value": ["", "main", "bg", "fg", "bg", + "animatic"], + "type": "QComboBox", + "label": "Subset Name", + "target": "ui", + "toolTip": "chose subset name patern, if is selected, name of track layer will be used", # noqa + "order": 0}, + "subsetFamily": { + "value": ["plate", "take"], + "type": "QComboBox", + "label": "Subset Family", + "target": "ui", "toolTip": "What use of this subset is for", # noqa + "order": 1}, + "reviewTrack": { + "value": ["< none >"] + gui_tracks, + "type": "QComboBox", + "label": "Use Review Track", + "target": "ui", + "toolTip": "Generate preview videos on fly, if `< none >` is defined nothing will be generated.", # noqa + "order": 2}, + "audio": { + "value": False, + "type": "QCheckBox", + "label": "Include audio", + "target": "tag", + "toolTip": "Process subsets with corresponding audio", # noqa + "order": 3}, + "sourceResolution": { + "value": False, + "type": "QCheckBox", + "label": "Source resolution", + "target": "tag", + "toolTip": "Is resloution taken from timeline or source?", # noqa + "order": 4}, + } + }, + "shotAttr": { + "type": "section", + "label": "Shot Attributes", + "target": "ui", + "order": 4, + "value": { + "workfileFrameStart": { + "value": 1001, + "type": "QSpinBox", + "label": "Workfiles Start Frame", + "target": "tag", + "toolTip": "Set workfile starting frame number", # noqa + "order": 0}, + "handleStart": { + "value": 0, + "type": "QSpinBox", + "label": "Handle start (head)", + "target": "tag", + "toolTip": "Handle at start of clip", # noqa + "order": 1}, + "handleEnd": { + "value": 0, + "type": "QSpinBox", + "label": "Handle end (tail)", + "target": "tag", + "toolTip": "Handle at end of clip", # noqa + "order": 2}, + } } } + presets = None def process(self): - # solve gui inputs overwrites from presets - # overwrite gui inputs from presets + # get key pares from presets and match it on ui inputs for k, v in self.gui_inputs.items(): - if isinstance(v, dict): - # nested dictionary (only one level allowed) - for _k, _v in v.items(): - if self.presets.get(_k): - self.gui_inputs[k][_k] = self.presets[_k] + if v["type"] in ("dict", "section"): + # nested dictionary (only one level allowed + # for sections and dict) + for _k, _v in v["value"].items(): + if self.presets.get(_k) is not None: + self.gui_inputs[k][ + "value"][_k]["value"] = self.presets[_k] if self.presets.get(k): - self.gui_inputs[k] = self.presets[k] + self.gui_inputs[k]["value"] = self.presets[k] # open widget for plugins inputs widget = self.widget(self.gui_name, self.gui_info, self.gui_inputs) widget.exec_() - print(f"__ selected_clips: {self.selected}") if len(self.selected) < 1: return @@ -52,28 +223,41 @@ class CreateShotClip(resolve.Creator): print("Operation aborted") return + self.rename_add = 0 + + # get ui output for track name for vertical sync + v_sync_track = widget.result["vSyncTrack"]["value"] + + # sort selected trackItems by + sorted_selected_track_items = list() + unsorted_selected_track_items = list() + for track_item_data in self.selected: + if track_item_data["track"]["name"] in v_sync_track: + sorted_selected_track_items.append(track_item_data) + else: + unsorted_selected_track_items.append(track_item_data) + + sorted_selected_track_items.extend(unsorted_selected_track_items) + # sequence attrs sq_frame_start = self.sequence.GetStartFrame() sq_markers = self.sequence.GetMarkers() - print(f"__ sq_frame_start: {pformat(sq_frame_start)}") - print(f"__ seq_markers: {pformat(sq_markers)}") # create media bin for compound clips (trackItems) mp_folder = resolve.create_current_sequence_media_bin(self.sequence) - print(f"_ mp_folder: {mp_folder.GetName()}") - lib.rename_add = 0 - for i, t_data in enumerate(self.selected): - lib.rename_index = i + kwargs = { + "ui_inputs": widget.result, + "avalon": self.data, + "mp_folder": mp_folder, + "sq_frame_start": sq_frame_start, + "sq_markers": sq_markers + } - # clear color after it is done - t_data["clip"]["item"].ClearClipColor() + for i, track_item_data in enumerate(sorted_selected_track_items): + self.rename_index = i # convert track item to timeline media pool item - resolve.create_compound_clip( - t_data, - mp_folder, - rename=True, - **dict( - {"presets": widget.result}) - ) + track_item = resolve.PublishClip( + self, track_item_data, **kwargs).convert() + track_item.SetClipColor(lib.publish_clip_color) diff --git a/pype/plugins/resolve/publish/collect_instances.py b/pype/plugins/resolve/publish/collect_instances.py new file mode 100644 index 0000000000..76332b03c2 --- /dev/null +++ b/pype/plugins/resolve/publish/collect_instances.py @@ -0,0 +1,129 @@ +import pyblish +from pype.hosts import resolve + +# # developer reload modules +from pprint import pformat + + +class CollectInstances(pyblish.api.ContextPlugin): + """Collect all Track items selection.""" + + order = pyblish.api.CollectorOrder - 0.59 + label = "Collect Instances" + hosts = ["resolve"] + + def process(self, context): + otio_timeline = context.data["otioTimeline"] + selected_track_items = resolve.get_current_track_items( + filter=True, selecting_color=resolve.publish_clip_color) + + self.log.info( + "Processing enabled track items: {}".format( + len(selected_track_items))) + + for track_item_data in selected_track_items: + + data = dict() + track_item = track_item_data["clip"]["item"] + + # get pype tag data + tag_data = resolve.get_track_item_pype_tag(track_item) + self.log.debug(f"__ tag_data: {pformat(tag_data)}") + + if not tag_data: + continue + + if tag_data.get("id") != "pyblish.avalon.instance": + continue + + media_pool_item = track_item.GetMediaPoolItem() + clip_property = media_pool_item.GetClipProperty() + self.log.debug(f"clip_property: {clip_property}") + + # add tag data to instance data + data.update({ + k: v for k, v in tag_data.items() + if k not in ("id", "applieswhole", "label") + }) + + asset = tag_data["asset"] + subset = tag_data["subset"] + + # insert family into families + family = tag_data["family"] + families = [str(f) for f in tag_data["families"]] + families.insert(0, str(family)) + + data.update({ + "name": "{} {} {}".format(asset, subset, families), + "asset": asset, + "item": track_item, + "families": families, + "publish": resolve.get_publish_attribute(track_item), + "fps": context.data["fps"] + }) + + # otio clip data + otio_data = resolve.get_otio_clip_instance_data( + otio_timeline, track_item_data) or {} + data.update(otio_data) + + # add resolution + self.get_resolution_to_data(data, context) + + # create instance + instance = context.create_instance(**data) + + # create shot instance for shot attributes create/update + self.create_shot_instance(context, track_item, **data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) + + def get_resolution_to_data(self, data, context): + assert data.get("otioClip"), "Missing `otioClip` data" + + # solve source resolution option + if data.get("sourceResolution", None): + otio_clip_metadata = data[ + "otioClip"].media_reference.metadata + data.update({ + "resolutionWidth": otio_clip_metadata["width"], + "resolutionHeight": otio_clip_metadata["height"], + "pixelAspect": otio_clip_metadata["pixelAspect"] + }) + else: + otio_tl_metadata = context.data["otioTimeline"].metadata + data.update({ + "resolutionWidth": otio_tl_metadata["width"], + "resolutionHeight": otio_tl_metadata["height"], + "pixelAspect": otio_tl_metadata["pixelAspect"] + }) + + def create_shot_instance(self, context, track_item, **data): + master_layer = data.get("masterLayer") + hierarchy_data = data.get("hierarchyData") + + if not master_layer: + return + + if not hierarchy_data: + return + + asset = data["asset"] + subset = "shotMain" + + # insert family into families + family = "shot" + + data.update({ + "name": "{} {} {}".format(asset, subset, family), + "subset": subset, + "asset": asset, + "family": family, + "families": [], + "publish": resolve.get_publish_attribute(track_item) + }) + + context.create_instance(**data) diff --git a/pype/plugins/resolve/publish/collect_project.py b/pype/plugins/resolve/publish/collect_project.py deleted file mode 100644 index aa57f93619..0000000000 --- a/pype/plugins/resolve/publish/collect_project.py +++ /dev/null @@ -1,29 +0,0 @@ -import os -import pyblish.api -from pype.hosts.resolve.utils import get_resolve_module - - -class CollectProject(pyblish.api.ContextPlugin): - """Collect Project object""" - - order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Project" - hosts = ["resolve"] - - def process(self, context): - exported_projet_ext = ".drp" - current_dir = os.getenv("AVALON_WORKDIR") - resolve = get_resolve_module() - PM = resolve.GetProjectManager() - P = PM.GetCurrentProject() - name = P.GetName() - - fname = name + exported_projet_ext - current_file = os.path.join(current_dir, fname) - normalised = os.path.normpath(current_file) - - context.data["project"] = P - context.data["currentFile"] = normalised - - self.log.info(name) - self.log.debug(normalised) diff --git a/pype/plugins/resolve/publish/collect_workfile.py b/pype/plugins/resolve/publish/collect_workfile.py new file mode 100644 index 0000000000..8c8e2b66c8 --- /dev/null +++ b/pype/plugins/resolve/publish/collect_workfile.py @@ -0,0 +1,55 @@ +import pyblish.api +from pype.hosts import resolve +from avalon import api as avalon +from pprint import pformat + +# dev +from importlib import reload +from pype.hosts.resolve.otio import davinci_export +reload(davinci_export) + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + label = "Collect Workfile" + order = pyblish.api.CollectorOrder - 0.6 + + def process(self, context): + + asset = avalon.Session["AVALON_ASSET"] + subset = "workfile" + project = resolve.get_current_project() + fps = project.GetSetting("timelineFrameRate") + + active_sequence = resolve.get_current_sequence() + video_tracks = resolve.get_video_track_names() + + # adding otio timeline to context + otio_timeline = davinci_export.create_otio_timeline(project) + + instance_data = { + "name": "{}_{}".format(asset, subset), + "asset": asset, + "subset": "{}{}".format(asset, subset.capitalize()), + "item": project, + "family": "workfile" + } + + # create instance with workfile + instance = context.create_instance(**instance_data) + + # update context with main project attributes + context_data = { + "activeProject": project, + "activeSequence": active_sequence, + "otioTimeline": otio_timeline, + "videoTracks": video_tracks, + "currentFile": project.GetName(), + "fps": fps, + } + context.data.update(context_data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.debug("__ instance.data: {}".format(pformat(instance.data))) + self.log.debug("__ context_data: {}".format(pformat(context_data))) diff --git a/pype/plugins/resolve/publish/extract_workfile.py b/pype/plugins/resolve/publish/extract_workfile.py new file mode 100644 index 0000000000..e52e829ee4 --- /dev/null +++ b/pype/plugins/resolve/publish/extract_workfile.py @@ -0,0 +1,50 @@ +import os +import pyblish.api +import pype.api +from pype.hosts import resolve + + +class ExtractWorkfile(pype.api.Extractor): + """ + Extractor export DRP workfile file representation + """ + + label = "Extract Workfile" + order = pyblish.api.ExtractorOrder + families = ["workfile"] + hosts = ["resolve"] + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + name = instance.data["name"] + project = instance.context.data["activeProject"] + staging_dir = self.staging_dir(instance) + + resolve_workfile_ext = ".drp" + drp_file_name = name + resolve_workfile_ext + drp_file_path = os.path.normpath( + os.path.join(staging_dir, drp_file_name)) + + # write out the drp workfile + resolve.get_project_manager().ExportProject( + project.GetName(), drp_file_path) + + # create drp workfile representation + representation_drp = { + 'name': resolve_workfile_ext[1:], + 'ext': resolve_workfile_ext[1:], + 'files': drp_file_name, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation_drp) + + # add sourcePath attribute to instance + if not instance.data.get("sourcePath"): + instance.data["sourcePath"] = drp_file_path + + self.log.info("Added Resolve file representation: {}".format( + representation_drp)) diff --git a/pype/plugins/standalonepublisher/publish/collect_context.py b/pype/plugins/standalonepublisher/publish/collect_context.py index 9dbeec93fb..cd7246c0a2 100644 --- a/pype/plugins/standalonepublisher/publish/collect_context.py +++ b/pype/plugins/standalonepublisher/publish/collect_context.py @@ -66,23 +66,23 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): in_data_list = list() representations = in_data.pop("representations") - for repr in representations: + for repre in representations: in_data_copy = copy.deepcopy(in_data) - ext = repr["ext"][1:] + ext = repre["ext"][1:] subset = in_data_copy["subset"] # filter out non editorial files if ext not in self.batch_extensions: - in_data_copy["representations"] = [repr] + in_data_copy["representations"] = [repre] in_data_copy["subset"] = f"{ext}{subset}" in_data_list.append(in_data_copy) - files = repr.get("files") + files = repre.get("files") # delete unneeded keys delete_repr_keys = ["frameStart", "frameEnd"] for k in delete_repr_keys: - if repr.get(k): - repr.pop(k) + if repre.get(k): + repre.pop(k) # convert files to list if it isnt if not isinstance(files, (tuple, list)): @@ -145,12 +145,16 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): component["stagingDir"] = component["stagingDir"] if isinstance(component["files"], list): - collections, remainder = clique.assemble(component["files"]) + collections, _remainder = clique.assemble(component["files"]) self.log.debug("collecting sequence: {}".format(collections)) instance.data["frameStart"] = int(component["frameStart"]) instance.data["frameEnd"] = int(component["frameEnd"]) instance.data["fps"] = int(component["fps"]) + ext = component["ext"] + if ext.startswith("."): + component["ext"] = ext[1:] + if component["preview"]: instance.data["families"].append("review") instance.data["repreProfiles"] = ["h264"] diff --git a/pype/plugins/standalonepublisher/publish/collect_editorial.py b/pype/plugins/standalonepublisher/publish/collect_editorial.py index 7e532c3741..afbdd87b2d 100644 --- a/pype/plugins/standalonepublisher/publish/collect_editorial.py +++ b/pype/plugins/standalonepublisher/publish/collect_editorial.py @@ -35,7 +35,7 @@ class OTIO_View(pyblish.api.Action): file_path = os.path.join( representation["stagingDir"], representation["files"] ) - plib._subprocess(["otioview", file_path]) + plib.run_subprocess(["otioview", file_path]) class CollectEditorial(pyblish.api.InstancePlugin): diff --git a/pype/plugins/standalonepublisher/publish/extract_trim_video_audio.py b/pype/plugins/standalonepublisher/publish/extract_trim_video_audio.py index 193902a9f6..1c53ae5f46 100644 --- a/pype/plugins/standalonepublisher/publish/extract_trim_video_audio.py +++ b/pype/plugins/standalonepublisher/publish/extract_trim_video_audio.py @@ -80,8 +80,9 @@ class ExtractTrimVideoAudio(pype.api.Extractor): self.log.info(f"Processing: {args}") ffmpeg_args = " ".join(args) - output = pype.api.subprocess(ffmpeg_args, shell=True) - self.log.info(output) + pype.api.run_subprocess( + ffmpeg_args, shell=True, logger=self.log + ) repr = { "name": ext[1:], diff --git a/pype/pype_commands.py b/pype/pype_commands.py index ecaaf68a47..36e5ac639f 100644 --- a/pype/pype_commands.py +++ b/pype/pype_commands.py @@ -3,6 +3,7 @@ import os import subprocess import sys +from pype.lib import PypeLogger class PypeCommands: @@ -12,6 +13,8 @@ class PypeCommands: """ @staticmethod def launch_tray(debug=False): + PypeLogger.set_process_name("Tray") + from pype.tools import tray tray.main() diff --git a/pype/settings/defaults/project_anatomy/templates.json b/pype/settings/defaults/project_anatomy/templates.json index 32f962556f..dcb0991a06 100644 --- a/pype/settings/defaults/project_anatomy/templates.json +++ b/pype/settings/defaults/project_anatomy/templates.json @@ -10,18 +10,18 @@ }, "render": { "folder": "{root}/{project[name]}/{hierarchy}/{asset}/publish/render/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{representation}", + "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}", "path": "{@folder}/{@file}" }, "publish": { "folder": "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{representation}", + "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}", "path": "{@folder}/{@file}", - "thumbnail": "{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}{ext}" + "thumbnail": "{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}" }, "master": { "folder": "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/master", - "file": "{project[code]}_{asset}_{subset}_master<_{output}><.{frame}>.{representation}", + "file": "{project[code]}_{asset}_{subset}_master<_{output}><.{frame}>.{ext}", "path": "{@folder}/{@file}" }, "delivery": {}, diff --git a/pype/settings/defaults/project_settings/ftrack.json b/pype/settings/defaults/project_settings/ftrack.json index 2bf11de468..debc92f2b5 100644 --- a/pype/settings/defaults/project_settings/ftrack.json +++ b/pype/settings/defaults/project_settings/ftrack.json @@ -9,15 +9,21 @@ "not ready" ] }, - "push_frame_values_to_task": { + "sync_hier_entity_attributes": { "enabled": true, "interest_entity_types": [ - "shot", - "asset build" + "Shot", + "Asset Build" ], - "interest_attributess": [ + "interest_attributes": [ "frameStart", "frameEnd" + ], + "action_enabled": true, + "role_list": [ + "Pypeclub", + "Administrator", + "Project Manager" ] }, "thumbnail_updates": { @@ -84,8 +90,12 @@ "next_task_update": { "enabled": true, "mapping": { - "Ready": "Not Ready" - } + "Not Ready": "Ready" + }, + "ignored_statuses": [ + "Omitted" + ], + "name_sorting": false } }, "user_handlers": { @@ -94,13 +104,11 @@ "ignored_statuses": [ "In Progress", "Omitted", - "On hold" + "On hold", + "Approved" ], "status_change": { - "In Progress": [], - "Ready": [ - "Not Ready" - ] + "In Progress": [] } }, "create_update_attributes": { @@ -167,7 +175,8 @@ "sync_to_avalon_local": { "enabled": true, "role_list": [ - "Pypeclub" + "Pypeclub", + "Administrator" ] }, "seed_project": { diff --git a/pype/settings/defaults/project_settings/resolve.json b/pype/settings/defaults/project_settings/resolve.json index cb7064ee76..b6fbdecc95 100644 --- a/pype/settings/defaults/project_settings/resolve.json +++ b/pype/settings/defaults/project_settings/resolve.json @@ -1,9 +1,20 @@ { "create": { "CreateShotClip": { + "hierarchy": "{folder}/{sequence}", + "clipRename": true, "clipName": "{track}{sequence}{shot}", - "folder": "takes", - "steps": 20 + "countFrom": 10, + "countSteps": 10, + "folder": "shots", + "episode": "ep01", + "sequence": "sq01", + "track": "{_track_}", + "shot": "sh###", + "vSyncOn": false, + "workfileFrameStart": 1001, + "handleStart": 10, + "handleEnd": 10 } } } \ No newline at end of file diff --git a/pype/settings/defaults/system_settings/applications.json b/pype/settings/defaults/system_settings/applications.json index 79d39c94f9..639b52e423 100644 --- a/pype/settings/defaults/system_settings/applications.json +++ b/pype/settings/defaults/system_settings/applications.json @@ -788,9 +788,7 @@ "RESOLVE_DEV" ] }, - "RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR": [ - "{STUDIO_SOFT}/davinci_resolve/scripts/python" - ], + "RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR": [], "RESOLVE_SCRIPT_API": { "windows": "{PROGRAMDATA}/Blackmagic Design/DaVinci Resolve/Support/Developer/Scripting", "darvin": "/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting", @@ -834,7 +832,12 @@ "variant_label": "16", "icon": "", "executables": { - "windows": [], + "windows": [ + [ + "C:/Program Files/Blackmagic Design/DaVinci Resolve/Resolve.exe", + "" + ] + ], "darwin": [], "linux": [] }, diff --git a/pype/settings/defaults/system_settings/modules.json b/pype/settings/defaults/system_settings/modules.json index 0f4b0b37f3..6493901bac 100644 --- a/pype/settings/defaults/system_settings/modules.json +++ b/pype/settings/defaults/system_settings/modules.json @@ -28,78 +28,102 @@ "custom_attributes": { "show": { "avalon_auto_sync": { - "default": "", - "write_security_role": [ + "write_security_roles": [ "API", "Administrator" ], - "read_security_role": [ + "read_security_roles": [ "API", "Administrator" ] }, "library_project": { - "default": "", - "write_security_role": [ + "write_security_roles": [ "API", "Administrator" ], - "read_security_role": [ + "read_security_roles": [ "API", "Administrator" ] + }, + "applications": { + "write_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ], + "read_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ] } }, "is_hierarchical": { + "avalon_mongo_id": { + "write_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ], + "read_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ] + }, + "tools_env": { + "write_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ], + "read_security_roles": [ + "API", + "Administrator", + "Pypeclub" + ] + }, "fps": { - "default": "25", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "frameStart": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "frameEnd": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "clipIn": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "clipOut": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "handleStart": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "handleEnd": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "resolutionWidth": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "resolutionHeight": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] }, "pixelAspect": { - "default": "", - "write_security_role": [], - "read_security_role": [] + "write_security_roles": [], + "read_security_roles": [] } } } @@ -153,4 +177,4 @@ "idle_manager": { "enabled": true } -} +} \ No newline at end of file diff --git a/pype/tests/test_lib_restructuralization.py b/pype/tests/test_lib_restructuralization.py index 957167a8bf..840baff5e6 100644 --- a/pype/tests/test_lib_restructuralization.py +++ b/pype/tests/test_lib_restructuralization.py @@ -29,7 +29,7 @@ def test_backward_compatibility(printer): from pype.hosts.fusion.lib import switch_item from pype.lib import source_hash - from pype.lib import _subprocess + from pype.lib import run_subprocess except ImportError as e: raise diff --git a/pype/tools/settings/__init__.py b/pype/tools/settings/__init__.py index 89abd262e8..3090adcf0a 100644 --- a/pype/tools/settings/__init__.py +++ b/pype/tools/settings/__init__.py @@ -20,6 +20,7 @@ def main(user_role=None): widget = MainWidget(user_role) widget.show() + widget.reset() sys.exit(app.exec_()) diff --git a/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_ftrack.json b/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_ftrack.json index 1554989c55..cbff26e135 100644 --- a/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_ftrack.json +++ b/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_ftrack.json @@ -61,7 +61,7 @@ }, { "type": "dict", - "key": "push_frame_values_to_task", + "key": "sync_hier_entity_attributes", "label": "Sync Hierarchical and Entity Attributes", "checkbox_key": "enabled", "children": [ @@ -81,12 +81,26 @@ }, { "type": "list", - "key": "interest_attributess", + "key": "interest_attributes", "label": "Attributes to sync", "object_type": { "type": "text", "multiline": false } + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "action_enabled", + "label": "Enable Action" + }, + { + "type": "list", + "key": "role_list", + "label": "Roles for action", + "object_type": "text" } ] }, @@ -302,12 +316,44 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "label", + "label": "Change status on next task by task types order when task status state changed to \"Done\". All tasks with same Task type must be \"Done\"." + }, + { + "type": "label", + "label": "Mapping of next task status changes From -> To." + }, { "type": "dict-modifiable", "key": "mapping", "object_type": { "type": "text" } + }, + { + "type": "separator" + }, + { + "type": "label", + "label": "Status names that are ignored on \"Done\" check (e.g. \"Omitted\")." + }, + { + "type": "list", + "key": "ignored_statuses", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "label", + "label": "Allow to break rule that all tasks with same Task type must be \"Done\" and change statuses with same type tasks ordered by name." + }, + { + "label": "Name sorting", + "type": "boolean", + "key": "name_sorting" } ] } diff --git a/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_resolve.json b/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_resolve.json index fcd649db83..fb9b9b7a0a 100644 --- a/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_resolve.json +++ b/pype/tools/settings/settings/gui_schemas/projects_schema/schema_project_resolve.json @@ -19,19 +19,102 @@ "is_group": true, "children": [ { - "type": "text", - "key": "clipName", - "label": "Clip name template" + "type": "collapsible-wrap", + "label": "Shot Hierarchy And Rename Settings", + "collapsable": false, + "children": [ + { + "type": "text", + "key": "hierarchy", + "label": "Shot parent hierarchy" + }, + { + "type": "boolean", + "key": "clipRename", + "label": "Rename clips" + }, + { + "type": "text", + "key": "clipName", + "label": "Clip name template" + }, + { + "type": "number", + "key": "countFrom", + "label": "Count sequence from" + }, + { + "type": "number", + "key": "countSteps", + "label": "Stepping number" + } + ] }, { - "type": "text", - "key": "folder", - "label": "Folder" + "type": "collapsible-wrap", + "label": "Shot Template Keywords", + "collapsable": false, + "children": [ + { + "type": "text", + "key": "folder", + "label": "{folder}" + }, + { + "type": "text", + "key": "episode", + "label": "{episode}" + }, + { + "type": "text", + "key": "sequence", + "label": "{sequence}" + }, + { + "type": "text", + "key": "track", + "label": "{track}" + }, + { + "type": "text", + "key": "shot", + "label": "{shot}" + } + ] }, { - "type": "number", - "key": "steps", - "label": "Steps" + "type": "collapsible-wrap", + "label": "Vertical Synchronization Of Attributes", + "collapsable": false, + "children": [ + { + "type": "boolean", + "key": "vSyncOn", + "label": "Enable Vertical Sync" + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Shot Attributes", + "collapsable": false, + "children": [ + { + "type": "number", + "key": "workfileFrameStart", + "label": "Workfiles Start Frame" + }, + { + "type": "number", + "key": "handleStart", + "label": "Handle start (head)" + }, + { + "type": "number", + "key": "handleEnd", + "label": "Handle end (tail)" + } + ] } ] } diff --git a/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_aftereffects.json b/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_aftereffects.json index 8134071ebb..4578e836b5 100644 --- a/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_aftereffects.json +++ b/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_aftereffects.json @@ -21,7 +21,7 @@ "env_group_key": "aftereffects" }, { - "type": "dict-invisible", + "type": "dict", "key": "variants", "children": [ { diff --git a/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_tvpaint.json b/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_tvpaint.json index 44d5a54b6a..b328e5cf79 100644 --- a/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_tvpaint.json +++ b/pype/tools/settings/settings/gui_schemas/system_schema/host_settings/schema_tvpaint.json @@ -21,7 +21,7 @@ "env_group_key": "tvpaint" }, { - "type": "dict-invisible", + "type": "dict", "key": "variants", "children": [ { diff --git a/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/schema_ftrack.json b/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/schema_ftrack.json index ec4f415a12..0683bd10b5 100644 --- a/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/schema_ftrack.json +++ b/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/schema_ftrack.json @@ -87,66 +87,75 @@ "type": "dict", "children": [ { - "type": "dict-modifiable", - "label": "Show Attributes", + "type": "dict", "key": "show", - "object_type": { - "type": "dict", - "children": [ - { - "key": "default", - "label": "default", - "type": "text" - }, - { - "key": "write_security_role", - "label": "write", - "type": "list", - "object_type": { - "type": "text" + "label": "Project Custom attributes", + "children": [ + { + "type": "schema_template", + "name": "template_custom_attribute", + "template_data": [ + { + "key": "avalon_auto_sync" + }, + { + "key": "library_project" + }, + { + "key": "applications" } - }, - { - "key": "read_security_role", - "label": "Read", - "type": "list", - "object_type": { - "type": "text" - } - } - ] - } + ] + } + ] }, { - "type": "dict-modifiable", - "label": "Hierarchical Attributes", + "type": "dict", "key": "is_hierarchical", - "object_type": { - "type": "dict", - "children": [ - { - "key": "default", - "label": "default", - "type": "text" - }, - { - "key": "write_security_role", - "label": "write", - "type": "list", - "object_type": { - "type": "text" + "label": "Hierarchical Attributes", + "children": [ + { + "type": "schema_template", + "name": "template_custom_attribute", + "template_data": [ + { + "key": "tools_env" + }, + { + "key": "avalon_mongo_id" + }, + { + "key": "fps" + }, + { + "key": "frameStart" + }, + { + "key": "frameEnd" + }, + { + "key": "clipIn" + }, + { + "key": "clipOut" + }, + { + "key": "handleStart" + }, + { + "key": "handleEnd" + }, + { + "key": "resolutionWidth" + }, + { + "key": "resolutionHeight" + }, + { + "key": "pixelAspect" } - }, - { - "key": "read_security_role", - "label": "Read", - "type": "list", - "object_type": { - "type": "text" - } - } - ] - } + ] + } + ] } ] } diff --git a/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/template_custom_attribute.json b/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/template_custom_attribute.json new file mode 100644 index 0000000000..71b7f2ea53 --- /dev/null +++ b/pype/tools/settings/settings/gui_schemas/system_schema/module_settings/template_custom_attribute.json @@ -0,0 +1,21 @@ +[ + { + "key": "{key}", + "label": "{key}", + "type": "dict", + "children": [ + { + "key": "write_security_roles", + "label": "Write roles", + "type": "list", + "object_type": "text" + }, + { + "key": "read_security_roles", + "label": "Read roles", + "type": "list", + "object_type": "text" + } + ] + } +] diff --git a/pype/tools/settings/settings/style/style.css b/pype/tools/settings/settings/style/style.css index 82313d5cfa..f3eb3a258e 100644 --- a/pype/tools/settings/settings/style/style.css +++ b/pype/tools/settings/settings/style/style.css @@ -193,6 +193,9 @@ QPushButton[btn-type="expand-toggle"] { background-color: #21252B; } +#ShadowWidget { + font-size: 36pt; +} QTabWidget::pane { border-top-style: none; } diff --git a/pype/tools/settings/settings/widgets/base.py b/pype/tools/settings/settings/widgets/base.py index 8e16c3614c..0a788e7684 100644 --- a/pype/tools/settings/settings/widgets/base.py +++ b/pype/tools/settings/settings/widgets/base.py @@ -1,6 +1,7 @@ import os import copy import json +from enum import Enum from Qt import QtWidgets, QtCore, QtGui from pype.settings.constants import ( SYSTEM_SETTINGS_KEY, @@ -26,27 +27,60 @@ from pype.settings.lib import ( save_project_anatomy, apply_overrides, + get_system_settings, find_environments, DuplicatedEnvGroups ) from .widgets import UnsavedChangesDialog from . import lib -from avalon import io +from avalon.mongodb import ( + AvalonMongoConnection, + AvalonMongoDB +) from avalon.vendor import qtawesome +class CategoryState(Enum): + Idle = object() + Working = object() + + class SettingsCategoryWidget(QtWidgets.QWidget): schema_category = None initial_schema_name = None + state_changed = QtCore.Signal() + saved = QtCore.Signal(QtWidgets.QWidget) + def __init__(self, user_role, parent=None): super(SettingsCategoryWidget, self).__init__(parent) self.user_role = user_role + self._state = CategoryState.Idle + self.initialize_attributes() self.create_ui() - self.reset() + + @property + def state(self): + return self._state + + @state.setter + def state(self, value): + self.set_state(value) + + def set_state(self, state): + if self._state == state: + return + + self._state = state + self.state_changed.emit() + + # Process events so emitted signal is processed + app = QtWidgets.QApplication.instance() + if app: + app.processEvents() def initialize_attributes(self): self._hide_studio_overrides = False @@ -84,7 +118,9 @@ class SettingsCategoryWidget(QtWidgets.QWidget): scroll_widget.setWidgetResizable(True) scroll_widget.setWidget(content_widget) - footer_widget = QtWidgets.QWidget() + configurations_widget = QtWidgets.QWidget(self) + + footer_widget = QtWidgets.QWidget(configurations_widget) footer_layout = QtWidgets.QHBoxLayout(footer_widget) if self.user_role == "developer": @@ -95,7 +131,6 @@ class SettingsCategoryWidget(QtWidgets.QWidget): footer_layout.addWidget(spacer_widget, 1) footer_layout.addWidget(save_btn, 0) - configurations_widget = QtWidgets.QWidget() configurations_layout = QtWidgets.QVBoxLayout(configurations_widget) configurations_layout.setContentsMargins(0, 0, 0, 0) configurations_layout.setSpacing(0) @@ -186,12 +221,15 @@ class SettingsCategoryWidget(QtWidgets.QWidget): input_field.hierarchical_style_update() def reset(self): + self.set_state(CategoryState.Working) + reset_default_settings() self.keys.clear() self.input_fields.clear() while self.content_layout.count() != 0: widget = self.content_layout.itemAt(0).widget() + widget.setVisible(False) self.content_layout.removeWidget(widget) widget.deleteLater() @@ -203,8 +241,11 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.add_children_gui(self.schema) self._update_values() + self.hierarchical_style_update() + self.set_state(CategoryState.Idle) + def items_are_valid(self): has_invalid = False for item in self.input_fields: @@ -232,13 +273,21 @@ class SettingsCategoryWidget(QtWidgets.QWidget): first_invalid_item.setFocus(True) return False + def on_saved(self, saved_tab_widget): + """Callback on any tab widget save.""" + return + def _save(self): - if not self.items_are_valid(): - return + self.set_state(CategoryState.Working) - self.save() + if self.items_are_valid(): + self.save() - self._update_values() + self._update_values() + + self.set_state(CategoryState.Idle) + + self.saved.emit(self) def _on_refresh(self): self.reset() @@ -433,7 +482,7 @@ class ProjectListWidget(QtWidgets.QWidget): self.project_list = project_list - self.refresh() + self.dbcon = None def on_item_clicked(self, new_index): new_project_name = new_index.data(QtCore.Qt.DisplayRole) @@ -501,10 +550,32 @@ class ProjectListWidget(QtWidgets.QWidget): model = self.project_list.model() model.clear() + items = [self.default] - io.install() - for project_doc in tuple(io.projects()): - items.append(project_doc["name"]) + + system_settings = get_system_settings() + mongo_url = system_settings["modules"]["avalon"]["AVALON_MONGO"] + if not mongo_url: + mongo_url = os.environ["PYPE_MONGO"] + + # Force uninstall of whole avalon connection if url does not match + # to current environment and set it as environment + if mongo_url != os.environ["AVALON_MONGO"]: + AvalonMongoConnection.uninstall(self.dbcon, force=True) + os.environ["AVALON_MONGO"] = mongo_url + self.dbcon = None + + if not self.dbcon: + try: + self.dbcon = AvalonMongoDB() + self.dbcon.install() + except Exception: + self.dbcon = None + self.current_project = None + + if self.dbcon: + for project_doc in tuple(self.dbcon.projects()): + items.append(project_doc["name"]) for item in items: model.appendRow(QtGui.QStandardItem(item)) @@ -527,6 +598,7 @@ class ProjectWidget(SettingsCategoryWidget): def ui_tweaks(self): project_list_widget = ProjectListWidget(self) + project_list_widget.refresh() self.main_layout.insertWidget(0, project_list_widget, 0) @@ -541,7 +613,26 @@ class ProjectWidget(SettingsCategoryWidget): # Projects does not have any specific validations return True + def on_saved(self, saved_tab_widget): + """Callback on any tab widget save. + + Check if AVALON_MONGO is still same. + """ + if self is saved_tab_widget: + return + + system_settings = get_system_settings() + mongo_url = system_settings["modules"]["avalon"]["AVALON_MONGO"] + if not mongo_url: + mongo_url = os.environ["PYPE_MONGO"] + + # If mongo url is not the same as was then refresh projects + if mongo_url != os.environ["AVALON_MONGO"]: + self.project_list_widget.refresh() + def _on_project_change(self): + self.set_state(CategoryState.Working) + project_name = self.project_list_widget.project_name() if project_name is None: _project_overrides = lib.NOT_SET @@ -566,6 +657,8 @@ class ProjectWidget(SettingsCategoryWidget): item.apply_overrides(overrides) self.ignore_value_changes = False + self.set_state(CategoryState.Idle) + def save(self): data = {} studio_overrides = bool(self.project_name is None) diff --git a/pype/tools/settings/settings/widgets/item_types.py b/pype/tools/settings/settings/widgets/item_types.py index c5112473e6..2e40a627d9 100644 --- a/pype/tools/settings/settings/widgets/item_types.py +++ b/pype/tools/settings/settings/widgets/item_types.py @@ -430,7 +430,7 @@ class SettingObject: return self.mouseReleaseEvent(event) return - menu = QtWidgets.QMenu() + menu = QtWidgets.QMenu(self) actions_mapping = {} if self.child_modified: @@ -4121,7 +4121,6 @@ TypeToKlass.types["dict-modifiable"] = ModifiableDict # DEPRECATED - remove when removed from schemas TypeToKlass.types["splitter"] = SplitterWidget TypeToKlass.types["dict-item"] = DictWidget -TypeToKlass.types["dict-invisible"] = DictWidget # --------------------------------------------- TypeToKlass.types["dict"] = DictWidget TypeToKlass.types["path-widget"] = PathWidget diff --git a/pype/tools/settings/settings/widgets/widgets.py b/pype/tools/settings/settings/widgets/widgets.py index 092591c165..e9fa6d0326 100644 --- a/pype/tools/settings/settings/widgets/widgets.py +++ b/pype/tools/settings/settings/widgets/widgets.py @@ -2,6 +2,48 @@ from Qt import QtWidgets, QtCore, QtGui from avalon.vendor import qtawesome +class ShadowWidget(QtWidgets.QWidget): + def __init__(self, message, parent): + super(ShadowWidget, self).__init__(parent) + self.setObjectName("ShadowWidget") + + self.parent_widget = parent + self.message = message + + def wrapper(func): + def wrapped(*args, **kwarg): + result = func(*args, **kwarg) + self._update_geometry() + return result + return wrapped + + parent.resizeEvent = wrapper(parent.resizeEvent) + parent.moveEvent = wrapper(parent.moveEvent) + parent.showEvent = wrapper(parent.showEvent) + + def set_message(self, message): + self.message = message + if self.isVisible(): + self.repaint() + + def _update_geometry(self): + self.setGeometry(self.parent_widget.rect()) + + def paintEvent(self, event): + painter = QtGui.QPainter(self) + painter.setRenderHint(QtGui.QPainter.Antialiasing) + painter.fillRect( + event.rect(), QtGui.QBrush(QtGui.QColor(0, 0, 0, 127)) + ) + if self.message: + painter.drawText( + event.rect(), + QtCore.Qt.AlignCenter | QtCore.Qt.AlignCenter, + self.message + ) + painter.end() + + class IconButton(QtWidgets.QPushButton): def __init__(self, icon_name, color, hover_color, *args, **kwargs): super(IconButton, self).__init__(*args, **kwargs) diff --git a/pype/tools/settings/settings/widgets/window.py b/pype/tools/settings/settings/widgets/window.py index 2dd5111d74..3afa47e3db 100644 --- a/pype/tools/settings/settings/widgets/window.py +++ b/pype/tools/settings/settings/widgets/window.py @@ -1,5 +1,6 @@ from Qt import QtWidgets, QtGui -from .base import SystemWidget, ProjectWidget +from .base import CategoryState, SystemWidget, ProjectWidget +from .widgets import ShadowWidget from .. import style @@ -22,6 +23,12 @@ class MainWidget(QtWidgets.QWidget): studio_widget = SystemWidget(user_role, header_tab_widget) project_widget = ProjectWidget(user_role, header_tab_widget) + + tab_widgets = [ + studio_widget, + project_widget + ] + header_tab_widget.addTab(studio_widget, "System") header_tab_widget.addTab(project_widget, "Project") @@ -31,3 +38,39 @@ class MainWidget(QtWidgets.QWidget): layout.addWidget(header_tab_widget) self.setLayout(layout) + + self._shadow_widget = ShadowWidget("Working...", self) + + for tab_widget in tab_widgets: + tab_widget.saved.connect(self._on_tab_save) + tab_widget.state_changed.connect(self._on_state_change) + + self.tab_widgets = tab_widgets + + def _on_tab_save(self, source_widget): + for tab_widget in self.tab_widgets: + tab_widget.on_saved(source_widget) + + def _on_state_change(self): + any_working = False + for widget in self.tab_widgets: + if widget.state is CategoryState.Working: + any_working = True + break + + if ( + (any_working and self._shadow_widget.isVisible()) + or (not any_working and not self._shadow_widget.isVisible()) + ): + return + + self._shadow_widget.setVisible(any_working) + + # Process events to apply shadow widget visibility + app = QtWidgets.QApplication.instance() + if app: + app.processEvents() + + def reset(self): + for tab_widget in self.tab_widgets: + tab_widget.reset() diff --git a/pype/tools/tray/pype_tray.py b/pype/tools/tray/pype_tray.py index 1fec95906d..c27df16276 100644 --- a/pype/tools/tray/pype_tray.py +++ b/pype/tools/tray/pype_tray.py @@ -55,6 +55,9 @@ class TrayManager: # Tell each module which modules were imported self.modules_manager.start_modules() + # Print time report + self.modules_manager.print_report() + def _add_version_item(self): subversion = os.environ.get("PYPE_SUBVERSION") client_name = os.environ.get("PYPE_CLIENT") diff --git a/pype/tools/workfiles/README.md b/pype/tools/workfiles/README.md new file mode 100644 index 0000000000..92ad4a8577 --- /dev/null +++ b/pype/tools/workfiles/README.md @@ -0,0 +1,143 @@ +# Workfiles App + +The Workfiles app facilitates easy saving, creation and launching of work files. + +The current supported hosts are: + +- Maya +- Houdini +- Fusion + +The app is available inside hosts via. the ```Avalon > Work Files``` menu. + +## Enabling Workfiles on launch + +By default the Workfiles app will not launch on startup, so it has to be explicitly enabled in a config. + +```python +workfiles.show() +``` + +## Naming Files + +Workfiles app enables user to easily save and create new work files. + +The user is presented with a two parameters; ```version``` and ```comment```. The name of the work file is determined from a template. + +### ```Next Available Version``` + +Will search for the next version number that is not in use. + +## Templates + +The default template for work files is ```{task[name]}_v{version:0>4}<_{comment}>```. Launching Maya on an animation task and creating a version 1 will result in ```animation_v0001.ma```. Adding "blocking" to the optional comment input will result in ```animation_v0001_blocking.ma```. + +This template can be customized per project with the ```workfile``` template. + +There are other variables to customize the template with: + +```python +{ + "project": project, # The project data from the database. + "asset": asset, # The asset data from the database. + "task": { + "label": label, # Label of task chosen. + "name": name # Sanitize version of the label. + }, + "user": user, # Name of the user on the machine. + "version": version, # Chosen version of the user. + "comment": comment, # Chosen comment of the user. +} +``` + +### Optional template groups + +The default template contains an optional template group ```<_{comment}>```. If any template group (```{comment}```) within angle bracket ```<>``` does not exist, the whole optional group is discarded. + + +## Implementing a new host integration for Work Files + +For the Work Files tool to work with a new host integration the host must +implement the following functions: + +- `file_extensions()`: The files the host should allow to open and show in the Work Files view. +- `open_file(filepath)`: Open a file. +- `save_file(filepath)`: Save the current file. This should return None if it failed to save, and return the path if it succeeded +- `has_unsaved_changes()`: Return whether the current scene has unsaved changes. +- `current_file()`: The path to the current file. None if not saved. +- `work_root()`: The path to where the work files for this app should be saved. + +Here's an example code layout: + +```python +def file_extensions(): + """Return the filename extension formats that should be shown. + + Note: + The first entry in the list will be used as the default file + format to save to when the current scene is not saved yet. + + Returns: + list: A list of the file extensions supported by Work Files. + + """ + return list() + + +def has_unsaved_changes(): + """Return whether current file has unsaved modifications.""" + + +def save_file(filepath): + """Save to filepath. + + This should return None if it failed to save, and return the path if it + succeeded. + """ + pass + + +def open_file(filepath): + """Open file""" + pass + + +def current_file(): + """Return path to currently open file or None if not saved. + + Returns: + str or None: The full path to current file or None when not saved. + + """ + pass + + +def work_root(): + """Return the default root for the Host to browse in for Work Files + + Returns: + str: The path to look in. + + """ + pass +``` + +#### Work Files Scenes root (AVALON_SCENEDIR) + +Whenever the host application has no built-in implementation that defines +where scene files should be saved to then the Work Files API for that host +should fall back to the `AVALON_SCENEDIR` variable in `api.Session`. + +When `AVALON_SCENEDIR` is set the directory is the relative folder inside the +`AVALON_WORKDIR`. Otherwise, when it is not set or empty it should fall back +to the Work Directory's root, `AVALON_WORKDIR` + +```python +AVALON_WORKDIR="/path/to/work" +AVALON_SCENEDIR="scenes" +# Result: /path/to/work/scenes + +AVALON_WORKDIR="/path/to/work" +AVALON_SCENEDIR=None +# Result: /path/to/work +``` \ No newline at end of file diff --git a/pype/tools/workfiles/__init__.py b/pype/tools/workfiles/__init__.py new file mode 100644 index 0000000000..cde7293931 --- /dev/null +++ b/pype/tools/workfiles/__init__.py @@ -0,0 +1,9 @@ +from .app import ( + show, + Window +) + +__all__ = [ + "show", + "Window" +] diff --git a/pype/tools/workfiles/app.py b/pype/tools/workfiles/app.py new file mode 100644 index 0000000000..e6b211152a --- /dev/null +++ b/pype/tools/workfiles/app.py @@ -0,0 +1,1166 @@ +import sys +import os +import copy +import getpass +import shutil +import logging +import datetime + +import Qt +from Qt import QtWidgets, QtCore +from avalon import style, io, api, pipeline + +from avalon.tools import lib as tools_lib +from avalon.tools.widgets import AssetWidget +from avalon.tools.models import TasksModel +from avalon.tools.delegates import PrettyTimeDelegate + +from .model import FilesModel +from .view import FilesView + +from pype.lib import ( + Anatomy, + get_workdir, + get_workfile_doc, + create_workfile_doc, + save_workfile_data_to_doc +) + +log = logging.getLogger(__name__) + +module = sys.modules[__name__] +module.window = None + + +class NameWindow(QtWidgets.QDialog): + """Name Window to define a unique filename inside a root folder + + The filename will be based on the "workfile" template defined in the + project["config"]["template"]. + + """ + + def __init__(self, parent, root, anatomy, template_key, session=None): + super(NameWindow, self).__init__(parent=parent) + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.result = None + self.host = api.registered_host() + self.root = root + self.work_file = None + + if not session: + # Fallback to active session + session = api.Session + + # Set work file data for template formatting + asset_name = session["AVALON_ASSET"] + project_doc = io.find_one({ + "type": "project" + }) + self.data = { + "project": { + "name": project_doc["name"], + "code": project_doc["data"].get("code") + }, + "asset": asset_name, + "task": session["AVALON_TASK"], + "version": 1, + "user": getpass.getuser(), + "comment": "", + "ext": None + } + + # Store project anatomy + self.anatomy = anatomy + self.template = anatomy.templates[template_key]["file"] + self.template_key = template_key + + # Btns widget + btns_widget = QtWidgets.QWidget(self) + + btn_ok = QtWidgets.QPushButton("Ok", btns_widget) + btn_cancel = QtWidgets.QPushButton("Cancel", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.addWidget(btn_ok) + btns_layout.addWidget(btn_cancel) + + # Inputs widget + inputs_widget = QtWidgets.QWidget(self) + + # Version widget + version_widget = QtWidgets.QWidget(inputs_widget) + + # Version number input + version_input = QtWidgets.QSpinBox(version_widget) + version_input.setMinimum(1) + version_input.setMaximum(9999) + + # Last version checkbox + last_version_check = QtWidgets.QCheckBox( + "Next Available Version", version_widget + ) + last_version_check.setChecked(True) + + version_layout = QtWidgets.QHBoxLayout(version_widget) + version_layout.setContentsMargins(0, 0, 0, 0) + version_layout.addWidget(version_input) + version_layout.addWidget(last_version_check) + + # Preview widget + preview_label = QtWidgets.QLabel("Preview filename", inputs_widget) + + # Subversion input + subversion_input = QtWidgets.QLineEdit(inputs_widget) + subversion_input.setPlaceholderText("Will be part of filename.") + + # Extensions combobox + ext_combo = QtWidgets.QComboBox(inputs_widget) + ext_combo.addItems(self.host.file_extensions()) + + # Build inputs + inputs_layout = QtWidgets.QFormLayout(inputs_widget) + # Add version only if template contain version key + # - since the version can be padded with "{version:0>4}" we only search + # for "{version". + if "{version" in self.template: + inputs_layout.addRow("Version:", version_widget) + + # Add subversion only if template containt `{comment}` + if "{comment}" in self.template: + inputs_layout.addRow("Subversion:", subversion_input) + inputs_layout.addRow("Extension:", ext_combo) + inputs_layout.addRow("Preview:", preview_label) + + # Build layout + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(inputs_widget) + main_layout.addWidget(btns_widget) + + # Singal callback registration + version_input.valueChanged.connect(self.on_version_spinbox_changed) + last_version_check.stateChanged.connect( + self.on_version_checkbox_changed + ) + + subversion_input.textChanged.connect(self.on_comment_changed) + ext_combo.currentIndexChanged.connect(self.on_extension_changed) + + btn_ok.pressed.connect(self.on_ok_pressed) + btn_cancel.pressed.connect(self.on_cancel_pressed) + + # Allow "Enter" key to accept the save. + btn_ok.setDefault(True) + + # Force default focus to comment, some hosts didn't automatically + # apply focus to this line edit (e.g. Houdini) + subversion_input.setFocus() + + # Store widgets + self.btn_ok = btn_ok + + self.version_widget = version_widget + + self.version_input = version_input + self.last_version_check = last_version_check + + self.preview_label = preview_label + self.subversion_input = subversion_input + self.ext_combo = ext_combo + + self.refresh() + + def on_version_spinbox_changed(self, value): + self.data["version"] = value + self.refresh() + + def on_version_checkbox_changed(self, _value): + self.refresh() + + def on_comment_changed(self, text): + self.data["comment"] = text + self.refresh() + + def on_extension_changed(self): + ext = self.ext_combo.currentText() + if ext == self.data["ext"]: + return + self.data["ext"] = ext + self.refresh() + + def on_ok_pressed(self): + self.result = self.work_file + self.close() + + def on_cancel_pressed(self): + self.close() + + def get_result(self): + return self.result + + def get_work_file(self): + data = copy.deepcopy(self.data) + if not data["comment"]: + data.pop("comment", None) + + data["ext"] = data["ext"][1:] + + anatomy_filled = self.anatomy.format(data) + return anatomy_filled[self.template_key]["file"] + + def refresh(self): + extensions = self.host.file_extensions() + extension = self.data["ext"] + if extension is None: + # Define saving file extension + current_file = self.host.current_file() + if current_file: + # Match the extension of current file + _, extension = os.path.splitext(current_file) + else: + extension = extensions[0] + + if extension != self.data["ext"]: + self.data["ext"] = extension + index = self.ext_combo.findText( + extension, QtCore.Qt.MatchFixedString + ) + if index >= 0: + self.ext_combo.setCurrentIndex(index) + + if not self.last_version_check.isChecked(): + self.version_input.setEnabled(True) + self.data["version"] = self.version_input.value() + + work_file = self.get_work_file() + + else: + self.version_input.setEnabled(False) + + data = copy.deepcopy(self.data) + template = str(self.template) + + if not data["comment"]: + data.pop("comment", None) + + data["ext"] = data["ext"][1:] + + version = api.last_workfile_with_version( + self.root, template, data, extensions + )[1] + + if version is None: + version = 1 + else: + version += 1 + + found_valid_version = False + # Check if next version is valid version and give a chance to try + # next 100 versions + for idx in range(100): + # Store version to data + self.data["version"] = version + + work_file = self.get_work_file() + # Safety check + path = os.path.join(self.root, work_file) + if not os.path.exists(path): + found_valid_version = True + break + + # Try next version + version += 1 + # Log warning + if idx == 0: + log.warning(( + "BUG: Function `last_workfile_with_version` " + "didn't return last version." + )) + # Raise exception if even 100 version fallback didn't help + if not found_valid_version: + raise AssertionError( + "This is a bug. Couldn't find valid version!" + ) + + self.work_file = work_file + + path_exists = os.path.exists(os.path.join(self.root, work_file)) + + self.btn_ok.setEnabled(not path_exists) + + if path_exists: + self.preview_label.setText( + "Cannot create \"{0}\" because file exists!" + "".format(work_file) + ) + else: + self.preview_label.setText( + "{0}".format(work_file) + ) + + +class TasksWidget(QtWidgets.QWidget): + """Widget showing active Tasks""" + + task_changed = QtCore.Signal() + + def __init__(self, parent=None): + super(TasksWidget, self).__init__(parent) + self.setContentsMargins(0, 0, 0, 0) + + view = QtWidgets.QTreeView() + view.setIndentation(0) + model = TasksModel(io) + view.setModel(model) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(view) + + # Hide the default tasks "count" as we don't need that data here. + view.setColumnHidden(1, True) + + selection = view.selectionModel() + selection.currentChanged.connect(self.task_changed) + + self.models = { + "tasks": model + } + + self.widgets = { + "view": view, + } + + self._last_selected_task = None + + def set_asset(self, asset): + if asset is None: + # Asset deselected + return + + # Try and preserve the last selected task and reselect it + # after switching assets. If there's no currently selected + # asset keep whatever the "last selected" was prior to it. + current = self.get_current_task() + if current: + self._last_selected_task = current + + self.models["tasks"].set_assets(asset_docs=[asset]) + + if self._last_selected_task: + self.select_task(self._last_selected_task) + + # Force a task changed emit. + self.task_changed.emit() + + def select_task(self, task): + """Select a task by name. + + If the task does not exist in the current model then selection is only + cleared. + + Args: + task (str): Name of the task to select. + + """ + + # Clear selection + view = self.widgets["view"] + model = view.model() + selection_model = view.selectionModel() + selection_model.clearSelection() + + # Select the task + mode = selection_model.Select | selection_model.Rows + for row in range(model.rowCount(QtCore.QModelIndex())): + index = model.index(row, 0, QtCore.QModelIndex()) + name = index.data(QtCore.Qt.DisplayRole) + if name == task: + selection_model.select(index, mode) + + # Set the currently active index + view.setCurrentIndex(index) + + def get_current_task(self): + """Return name of task at current index (selected) + + Returns: + str: Name of the current task. + + """ + view = self.widgets["view"] + index = view.currentIndex() + index = index.sibling(index.row(), 0) # ensure column zero for name + + selection = view.selectionModel() + if selection.isSelected(index): + # Ignore when the current task is not selected as the "No task" + # placeholder might be the current index even though it's + # disallowed to be selected. So we only return if it is selected. + return index.data(QtCore.Qt.DisplayRole) + + +class FilesWidget(QtWidgets.QWidget): + """A widget displaying files that allows to save and open files.""" + file_selected = QtCore.Signal(str) + workfile_created = QtCore.Signal(str) + + def __init__(self, parent=None): + super(FilesWidget, self).__init__(parent=parent) + + # Setup + self._asset = None + self._task = None + + # Pype's anatomy object for current project + self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) + # Template key used to get work template from anatomy templates + # TODO change template key based on task + self.template_key = "work" + + # This is not root but workfile directory + self.root = None + self.host = api.registered_host() + + # Whether to automatically select the latest modified + # file on a refresh of the files model. + self.auto_select_latest_modified = True + + # Avoid crash in Blender and store the message box + # (setting parent doesn't work as it hides the message box) + self._messagebox = None + + files_view = FilesView(self) + + # Create the Files model + extensions = set(self.host.file_extensions()) + files_model = FilesModel(file_extensions=extensions) + + # Create proxy model for files to be able sort and filter + proxy_model = QtCore.QSortFilterProxyModel() + proxy_model.setSourceModel(files_model) + proxy_model.setDynamicSortFilter(True) + proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) + + # Set up the file list tree view + files_view.setModel(proxy_model) + files_view.setSortingEnabled(True) + files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + + # Date modified delegate + time_delegate = PrettyTimeDelegate() + files_view.setItemDelegateForColumn(1, time_delegate) + files_view.setIndentation(3) # smaller indentation + + # Default to a wider first filename column it is what we mostly care + # about and the date modified is relatively small anyway. + files_view.setColumnWidth(0, 330) + + # Filtering input + filter_input = QtWidgets.QLineEdit(self) + filter_input.textChanged.connect(proxy_model.setFilterFixedString) + filter_input.setPlaceholderText("Filter files..") + + # Home Page + # Build buttons widget for files widget + btns_widget = QtWidgets.QWidget(self) + btn_save = QtWidgets.QPushButton("Save As", btns_widget) + btn_browse = QtWidgets.QPushButton("Browse", btns_widget) + btn_open = QtWidgets.QPushButton("Open", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addWidget(btn_open) + btns_layout.addWidget(btn_browse) + btns_layout.addWidget(btn_save) + + # Build files widgets for home page + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(filter_input) + main_layout.addWidget(files_view) + main_layout.addWidget(btns_widget) + + # Register signal callbacks + files_view.doubleClickedLeft.connect(self.on_open_pressed) + files_view.customContextMenuRequested.connect(self.on_context_menu) + files_view.selectionModel().selectionChanged.connect( + self.on_file_select + ) + + btn_open.pressed.connect(self.on_open_pressed) + btn_browse.pressed.connect(self.on_browse_pressed) + btn_save.pressed.connect(self.on_save_as_pressed) + + # Store attributes + self.time_delegate = time_delegate + + self.filter_input = filter_input + + self.files_view = files_view + self.files_model = files_model + + self.btns_widget = btns_widget + self.btn_open = btn_open + self.btn_browse = btn_browse + self.btn_save = btn_save + + def set_asset_task(self, asset, task): + self._asset = asset + self._task = task + + # Define a custom session so we can query the work root + # for a "Work area" that is not our current Session. + # This way we can browse it even before we enter it. + if self._asset and self._task: + session = self._get_session() + self.root = self.host.work_root(session) + self.files_model.set_root(self.root) + + else: + self.files_model.set_root(None) + + # Disable/Enable buttons based on available files in model + has_filenames = self.files_model.has_filenames() + self.btn_browse.setEnabled(has_filenames) + self.btn_open.setEnabled(has_filenames) + if not has_filenames: + # Manually trigger file selection + self.on_file_select() + + def _get_session(self): + """Return a modified session for the current asset and task""" + + session = api.Session.copy() + changes = pipeline.compute_session_changes( + session, + asset=self._asset, + task=self._task + ) + session.update(changes) + + return session + + def _enter_session(self): + """Enter the asset and task session currently selected""" + + session = api.Session.copy() + changes = pipeline.compute_session_changes( + session, + asset=self._asset, + task=self._task + ) + if not changes: + # Return early if we're already in the right Session context + # to avoid any unwanted Task Changed callbacks to be triggered. + return + + api.update_current_task(asset=self._asset, task=self._task) + + def open_file(self, filepath): + host = self.host + if host.has_unsaved_changes(): + result = self.save_changes_prompt() + if result is None: + # Cancel operation + return False + + # Save first if has changes + if result: + current_file = host.current_file() + if not current_file: + # If the user requested to save the current scene + # we can't actually automatically do so if the current + # file has not been saved with a name yet. So we'll have + # to opt out. + log.error("Can't save scene with no filename. Please " + "first save your work file using 'Save As'.") + return + + # Save current scene, continue to open file + host.save_file(current_file) + + self._enter_session() + host.open_file(filepath) + self.window().close() + + def save_changes_prompt(self): + self._messagebox = messagebox = QtWidgets.QMessageBox() + + messagebox.setWindowFlags(QtCore.Qt.FramelessWindowHint) + messagebox.setIcon(messagebox.Warning) + messagebox.setWindowTitle("Unsaved Changes!") + messagebox.setText( + "There are unsaved changes to the current file." + "\nDo you want to save the changes?" + ) + messagebox.setStandardButtons( + messagebox.Yes | messagebox.No | messagebox.Cancel + ) + + # Parenting the QMessageBox to the Widget seems to crash + # so we skip parenting and explicitly apply the stylesheet. + messagebox.setStyleSheet(style.load_stylesheet()) + + result = messagebox.exec_() + if result == messagebox.Yes: + return True + elif result == messagebox.No: + return False + return None + + def get_filename(self): + """Show save dialog to define filename for save or duplicate + + Returns: + str: The filename to create. + + """ + session = self._get_session() + + window = NameWindow( + parent=self, + root=self.root, + anatomy=self.anatomy, + template_key=self.template_key, + session=session + ) + window.exec_() + + return window.get_result() + + def on_duplicate_pressed(self): + work_file = self.get_filename() + if not work_file: + return + + src = self._get_selected_filepath() + dst = os.path.join(self.root, work_file) + shutil.copy(src, dst) + + self.workfile_created.emit(dst) + + self.refresh() + + def _get_selected_filepath(self): + """Return current filepath selected in view""" + selection = self.files_view.selectionModel() + index = selection.currentIndex() + if not index.isValid(): + return + + return index.data(self.files_model.FilePathRole) + + def on_open_pressed(self): + path = self._get_selected_filepath() + if not path: + print("No file selected to open..") + return + + self.open_file(path) + + def on_browse_pressed(self): + ext_filter = "Work File (*{0})".format( + " *".join(self.host.file_extensions()) + ) + kwargs = { + "caption": "Work Files", + "filter": ext_filter + } + if Qt.__binding__ in ("PySide", "PySide2"): + kwargs["dir"] = self.root + else: + kwargs["directory"] = self.root + + work_file = QtWidgets.QFileDialog.getOpenFileName(**kwargs)[0] + if work_file: + self.open_file(work_file) + + def on_save_as_pressed(self): + work_file = self.get_filename() + if not work_file: + return + + # Initialize work directory if it has not been initialized before + if not os.path.exists(self.root): + log.debug("Initializing Work Directory: %s", self.root) + self.initialize_work_directory() + if not os.path.exists(self.root): + # Failed to initialize Work Directory + log.error( + "Failed to initialize Work Directory: {}".format(self.root) + ) + return + + file_path = os.path.join(self.root, work_file) + + self._enter_session() # Make sure we are in the right session + self.host.save_file(file_path) + + self.set_asset_task(self._asset, self._task) + + self.workfile_created.emit(file_path) + + self.refresh() + + def on_file_select(self): + self.file_selected.emit(self._get_selected_filepath()) + + def initialize_work_directory(self): + """Initialize Work Directory. + + This is used when the Work Directory does not exist yet. + + This finds the current AVALON_APP_NAME and tries to triggers its + `.toml` initialization step. Note that this will only be valid + whenever `AVALON_APP_NAME` is actually set in the current session. + + """ + + # Inputs (from the switched session and running app) + session = api.Session.copy() + changes = pipeline.compute_session_changes( + session, + asset=self._asset, + task=self._task + ) + session.update(changes) + + # Prepare documents to get workdir data + project_doc = io.find_one({"type": "project"}) + asset_doc = io.find_one( + { + "type": "asset", + "name": session["AVALON_ASSET"] + } + ) + task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] + + # Get workdir from collected documents + workdir = get_workdir(project_doc, asset_doc, task_name, host_name) + # Create workdir if does not exist yet + if not os.path.exists(workdir): + os.makedirs(workdir) + + # Force a full to the asset as opposed to just self.refresh() so + # that it will actually check again whether the Work directory exists + self.set_asset_task(self._asset, self._task) + + def refresh(self): + """Refresh listed files for current selection in the interface""" + self.files_model.refresh() + + if self.auto_select_latest_modified: + tools_lib.schedule(self._select_last_modified_file, 100) + + def on_context_menu(self, point): + index = self.files_view.indexAt(point) + if not index.isValid(): + return + + is_enabled = index.data(FilesModel.IsEnabled) + if not is_enabled: + return + + menu = QtWidgets.QMenu(self) + + # Duplicate + action = QtWidgets.QAction("Duplicate", menu) + tip = "Duplicate selected file." + action.setToolTip(tip) + action.setStatusTip(tip) + action.triggered.connect(self.on_duplicate_pressed) + menu.addAction(action) + + # Show the context action menu + global_point = self.files_view.mapToGlobal(point) + action = menu.exec_(global_point) + if not action: + return + + def _select_last_modified_file(self): + """Utility function to select the file with latest date modified""" + role = self.files_model.DateModifiedRole + model = self.files_view.model() + + highest_index = None + highest = 0 + for row in range(model.rowCount()): + index = model.index(row, 0, parent=QtCore.QModelIndex()) + if not index.isValid(): + continue + + modified = index.data(role) + if modified is not None and modified > highest: + highest_index = index + highest = modified + + if highest_index: + self.files_view.setCurrentIndex(highest_index) + + +class SidePanelWidget(QtWidgets.QWidget): + save_clicked = QtCore.Signal() + + def __init__(self, parent=None): + super(SidePanelWidget, self).__init__(parent) + + details_label = QtWidgets.QLabel("Details", self) + details_input = QtWidgets.QPlainTextEdit(self) + details_input.setReadOnly(True) + + note_label = QtWidgets.QLabel("Artist note", self) + note_input = QtWidgets.QPlainTextEdit(self) + btn_note_save = QtWidgets.QPushButton("Save note", self) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(details_label, 0) + main_layout.addWidget(details_input, 0) + main_layout.addWidget(note_label, 0) + main_layout.addWidget(note_input, 1) + main_layout.addWidget(btn_note_save, alignment=QtCore.Qt.AlignRight) + + note_input.textChanged.connect(self.on_note_change) + btn_note_save.clicked.connect(self.on_save_click) + + self.details_input = details_input + self.note_input = note_input + self.btn_note_save = btn_note_save + + self._orig_note = "" + self._workfile_doc = None + + def on_note_change(self): + text = self.note_input.toPlainText() + self.btn_note_save.setEnabled(self._orig_note != text) + + def on_save_click(self): + self._orig_note = self.note_input.toPlainText() + self.on_note_change() + self.save_clicked.emit() + + def set_context(self, asset_doc, task_name, filepath, workfile_doc): + # Check if asset, task and file are selected + # NOTE workfile document is not requirement + enabled = bool(asset_doc) and bool(task_name) and bool(filepath) + + self.details_input.setEnabled(enabled) + self.note_input.setEnabled(enabled) + self.btn_note_save.setEnabled(enabled) + + # Make sure workfile doc is overriden + self._workfile_doc = workfile_doc + # Disable inputs and remove texts if any required arguments are missing + if not enabled: + self._orig_note = "" + self.details_input.setPlainText("") + self.note_input.setPlainText("") + return + + orig_note = "" + if workfile_doc: + orig_note = workfile_doc["data"].get("note") or orig_note + + self._orig_note = orig_note + self.note_input.setPlainText(orig_note) + # Set as empty string + self.details_input.setPlainText("") + + filestat = os.stat(filepath) + size_ending_mapping = { + "KB": 1024 ** 1, + "MB": 1024 ** 2, + "GB": 1024 ** 3 + } + size = filestat.st_size + ending = "B" + for _ending, _size in size_ending_mapping.items(): + if filestat.st_size < _size: + break + size = filestat.st_size / _size + ending = _ending + + # Append html string + datetime_format = "%b %d %Y %H:%M:%S" + creation_time = datetime.datetime.fromtimestamp(filestat.st_ctime) + modification_time = datetime.datetime.fromtimestamp(filestat.st_mtime) + lines = ( + "Size:", + "{:.2f} {}".format(size, ending), + "Created:", + creation_time.strftime(datetime_format), + "Modified:", + modification_time.strftime(datetime_format) + ) + self.details_input.appendHtml("
".join(lines)) + + def get_workfile_data(self): + data = { + "note": self.note_input.toPlainText() + } + return self._workfile_doc, data + + +class Window(QtWidgets.QMainWindow): + """Work Files Window""" + title = "Work Files" + + def __init__(self, parent=None): + super(Window, self).__init__(parent=parent) + self.setWindowTitle(self.title) + self.setWindowFlags(QtCore.Qt.Window | QtCore.Qt.WindowCloseButtonHint) + + # Create pages widget and set it as central widget + pages_widget = QtWidgets.QStackedWidget(self) + self.setCentralWidget(pages_widget) + + home_page_widget = QtWidgets.QWidget(pages_widget) + home_body_widget = QtWidgets.QWidget(home_page_widget) + + assets_widget = AssetWidget(io, parent=home_body_widget) + tasks_widget = TasksWidget(home_body_widget) + files_widget = FilesWidget(home_body_widget) + side_panel = SidePanelWidget(home_body_widget) + + pages_widget.addWidget(home_page_widget) + + # Build home + home_page_layout = QtWidgets.QVBoxLayout(home_page_widget) + home_page_layout.addWidget(home_body_widget) + + # Build home - body + body_layout = QtWidgets.QVBoxLayout(home_body_widget) + split_widget = QtWidgets.QSplitter(home_body_widget) + split_widget.addWidget(assets_widget) + split_widget.addWidget(tasks_widget) + split_widget.addWidget(files_widget) + split_widget.addWidget(side_panel) + split_widget.setStretchFactor(0, 1) + split_widget.setStretchFactor(1, 1) + split_widget.setStretchFactor(2, 3) + split_widget.setStretchFactor(3, 1) + body_layout.addWidget(split_widget) + + # Add top margin for tasks to align it visually with files as + # the files widget has a filter field which tasks does not. + tasks_widget.setContentsMargins(0, 32, 0, 0) + + # Connect signals + assets_widget.current_changed.connect(self.on_asset_changed) + tasks_widget.task_changed.connect(self.on_task_changed) + files_widget.file_selected.connect(self.on_file_select) + files_widget.workfile_created.connect(self.on_workfile_create) + side_panel.save_clicked.connect(self.on_side_panel_save) + + self.home_page_widget = home_page_widget + self.pages_widget = pages_widget + self.home_body_widget = home_body_widget + self.split_widget = split_widget + + self.assets_widget = assets_widget + self.tasks_widget = tasks_widget + self.files_widget = files_widget + self.side_panel = side_panel + + self.refresh() + + # Force focus on the open button by default, required for Houdini. + files_widget.btn_open.setFocus() + + self.resize(1000, 600) + + def keyPressEvent(self, event): + """Custom keyPressEvent. + + Override keyPressEvent to do nothing so that Maya's panels won't + take focus when pressing "SHIFT" whilst mouse is over viewport or + outliner. This way users don't accidently perform Maya commands + whilst trying to name an instance. + + """ + + def on_task_changed(self): + # Since we query the disk give it slightly more delay + tools_lib.schedule(self._on_task_changed, 100, channel="mongo") + + def on_asset_changed(self): + tools_lib.schedule(self._on_asset_changed, 50, channel="mongo") + + def on_file_select(self, filepath): + asset_docs = self.assets_widget.get_selected_assets() + asset_doc = None + if asset_docs: + asset_doc = asset_docs[0] + + task_name = self.tasks_widget.get_current_task() + + workfile_doc = None + if asset_doc and task_name and filepath: + filename = os.path.split(filepath)[1] + workfile_doc = get_workfile_doc( + asset_doc["_id"], task_name, filename, io + ) + self.side_panel.set_context( + asset_doc, task_name, filepath, workfile_doc + ) + + def on_workfile_create(self, filepath): + self._create_workfile_doc(filepath) + + def on_side_panel_save(self): + workfile_doc, data = self.side_panel.get_workfile_data() + if not workfile_doc: + filepath = self.files_widget._get_selected_filepath() + self._create_workfile_doc(filepath, force=True) + workfile_doc = self._get_current_workfile_doc() + + save_workfile_data_to_doc(workfile_doc, data, io) + + def _get_current_workfile_doc(self, filepath=None): + if filepath is None: + filepath = self.files_widget._get_selected_filepath() + task_name = self.tasks_widget.get_current_task() + asset_docs = self.assets_widget.get_selected_assets() + if not task_name or not asset_docs or not filepath: + return + + asset_doc = asset_docs[0] + filename = os.path.split(filepath)[1] + return get_workfile_doc( + asset_doc["_id"], task_name, filename, io + ) + + def _create_workfile_doc(self, filepath, force=False): + workfile_doc = None + if not force: + workfile_doc = self._get_current_workfile_doc(filepath) + + if not workfile_doc: + workdir, filename = os.path.split(filepath) + asset_docs = self.assets_widget.get_selected_assets() + asset_doc = asset_docs[0] + task_name = self.tasks_widget.get_current_task() + create_workfile_doc(asset_doc, task_name, filename, workdir, io) + + def set_context(self, context): + if "asset" in context: + asset = context["asset"] + asset_document = io.find_one( + { + "name": asset, + "type": "asset" + }, + { + "data.tasks": 1 + } + ) + + # Select the asset + self.assets_widget.select_assets([asset], expand=True) + + # Force a refresh on Tasks? + self.tasks_widget.set_asset(asset_document) + + if "task" in context: + self.tasks_widget.select_task(context["task"]) + + def refresh(self): + # Refresh asset widget + self.assets_widget.refresh() + + self._on_task_changed() + + def _on_asset_changed(self): + asset = self.assets_widget.get_selected_assets() or None + + if not asset: + # Force disable the other widgets if no + # active selection + self.tasks_widget.setEnabled(False) + self.files_widget.setEnabled(False) + else: + asset = asset[0] + self.tasks_widget.setEnabled(True) + + self.tasks_widget.set_asset(asset) + + def _on_task_changed(self): + asset = self.assets_widget.get_selected_assets() or None + if asset is not None: + asset = asset[0] + task = self.tasks_widget.get_current_task() + + self.tasks_widget.setEnabled(bool(asset)) + + self.files_widget.setEnabled(all([bool(task), bool(asset)])) + self.files_widget.set_asset_task(asset, task) + self.files_widget.refresh() + + +def validate_host_requirements(host): + if host is None: + raise RuntimeError("No registered host.") + + # Verify the host has implemented the api for Work Files + required = [ + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "work_root", + "file_extensions", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + if missing: + raise RuntimeError( + "Host is missing required Work Files interfaces: " + "%s (host: %s)" % (", ".join(missing), host) + ) + return True + + +def show(root=None, debug=False, parent=None, use_context=True, save=True): + """Show Work Files GUI""" + # todo: remove `root` argument to show() + + try: + module.window.close() + del(module.window) + except (AttributeError, RuntimeError): + pass + + host = api.registered_host() + validate_host_requirements(host) + + if debug: + api.Session["AVALON_ASSET"] = "Mock" + api.Session["AVALON_TASK"] = "Testing" + + with tools_lib.application(): + window = Window(parent=parent) + window.refresh() + + if use_context: + context = { + "asset": api.Session["AVALON_ASSET"], + "silo": api.Session["AVALON_SILO"], + "task": api.Session["AVALON_TASK"] + } + window.set_context(context) + + window.files_widget.btn_save.setEnabled(save) + + window.show() + window.setStyleSheet(style.load_stylesheet()) + + module.window = window + + # Pull window to the front. + module.window.raise_() + module.window.activateWindow() diff --git a/pype/tools/workfiles/model.py b/pype/tools/workfiles/model.py new file mode 100644 index 0000000000..368988fd4e --- /dev/null +++ b/pype/tools/workfiles/model.py @@ -0,0 +1,153 @@ +import os +import logging + +from Qt import QtCore + +from avalon import style +from avalon.vendor import qtawesome +from avalon.tools.models import TreeModel, Item + +log = logging.getLogger(__name__) + + +class FilesModel(TreeModel): + """Model listing files with specified extensions in a root folder""" + Columns = ["filename", "date"] + + FileNameRole = QtCore.Qt.UserRole + 2 + DateModifiedRole = QtCore.Qt.UserRole + 3 + FilePathRole = QtCore.Qt.UserRole + 4 + IsEnabled = QtCore.Qt.UserRole + 5 + + def __init__(self, file_extensions, parent=None): + super(FilesModel, self).__init__(parent=parent) + + self._root = None + self._file_extensions = file_extensions + self._icons = { + "file": qtawesome.icon("fa.file-o", color=style.colors.default) + } + + def set_root(self, root): + self._root = root + self.refresh() + + def _add_empty(self): + item = Item() + item.update({ + # Put a display message in 'filename' + "filename": "No files found.", + # Not-selectable + "enabled": False, + "date": None, + "filepath": None + }) + + self.add_child(item) + + def refresh(self): + self.clear() + self.beginResetModel() + + root = self._root + + if not root: + self.endResetModel() + return + + if not os.path.exists(root): + # Add Work Area does not exist placeholder + log.debug("Work Area does not exist: %s", root) + message = "Work Area does not exist. Use Save As to create it." + item = Item({ + "filename": message, + "date": None, + "filepath": None, + "enabled": False, + "icon": qtawesome.icon("fa.times", color=style.colors.mid) + }) + self.add_child(item) + self.endResetModel() + return + + extensions = self._file_extensions + + for filename in os.listdir(root): + path = os.path.join(root, filename) + if os.path.isdir(path): + continue + + ext = os.path.splitext(filename)[1] + if extensions and ext not in extensions: + continue + + modified = os.path.getmtime(path) + + item = Item({ + "filename": filename, + "date": modified, + "filepath": path + }) + + self.add_child(item) + + if self.rowCount() == 0: + self._add_empty() + + self.endResetModel() + + def has_filenames(self): + for item in self._root_item.children(): + if item.get("enabled", True): + return True + return False + + def rowCount(self, parent=None): + if parent is None or not parent.isValid(): + parent_item = self._root_item + else: + parent_item = parent.internalPointer() + return parent_item.childCount() + + def data(self, index, role): + if not index.isValid(): + return + + if role == QtCore.Qt.DecorationRole: + # Add icon to filename column + item = index.internalPointer() + if index.column() == 0: + if item["filepath"]: + return self._icons["file"] + return item.get("icon", None) + + if role == self.FileNameRole: + item = index.internalPointer() + return item["filename"] + + if role == self.DateModifiedRole: + item = index.internalPointer() + return item["date"] + + if role == self.FilePathRole: + item = index.internalPointer() + return item["filepath"] + + if role == self.IsEnabled: + item = index.internalPointer() + return item.get("enabled", True) + + return super(FilesModel, self).data(index, role) + + def headerData(self, section, orientation, role): + # Show nice labels in the header + if ( + role == QtCore.Qt.DisplayRole + and orientation == QtCore.Qt.Horizontal + ): + if section == 0: + return "Name" + elif section == 1: + return "Date modified" + + return super(FilesModel, self).headerData(section, orientation, role) diff --git a/pype/tools/workfiles/view.py b/pype/tools/workfiles/view.py new file mode 100644 index 0000000000..8e3993e4c7 --- /dev/null +++ b/pype/tools/workfiles/view.py @@ -0,0 +1,15 @@ +from Qt import QtWidgets, QtCore + + +class FilesView(QtWidgets.QTreeView): + doubleClickedLeft = QtCore.Signal() + doubleClickedRight = QtCore.Signal() + + def mouseDoubleClickEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self.doubleClickedLeft.emit() + + elif event.button() == QtCore.Qt.RightButton: + self.doubleClickedRight.emit() + + return super(FilesView, self).mouseDoubleClickEvent(event) diff --git a/pype/vendor/ftrack_api_old/__init__.py b/pype/vendor/ftrack_api_old/__init__.py deleted file mode 100644 index d8ee30bd8f..0000000000 --- a/pype/vendor/ftrack_api_old/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from ._version import __version__ -from .session import Session - - -def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. - - *name* can be used to specify new class name. If not specified then one will - be generated. - - ''' - if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) - - # Check mixin class not already present in mro in order to avoid consistent - # method resolution failure. - if mixin_class in instance.__class__.mro(): - return - - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) diff --git a/pype/vendor/ftrack_api_old/_centralized_storage_scenario.py b/pype/vendor/ftrack_api_old/_centralized_storage_scenario.py deleted file mode 100644 index 566e374af4..0000000000 --- a/pype/vendor/ftrack_api_old/_centralized_storage_scenario.py +++ /dev/null @@ -1,656 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -from __future__ import absolute_import - -import logging -import json -import sys -import os - -import ftrack_api_old -import ftrack_api_old.structure.standard as _standard -from ftrack_api_old.logging import LazyLogMessage as L - - -scenario_name = 'ftrack.centralized-storage' - - -class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - @property - def storage_scenario(self): - '''Return storage scenario setting.''' - return self.session.query( - 'select value from Setting ' - 'where name is "storage_scenario" and group is "STORAGE"' - ).one() - - @property - def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' - storage_scenario = self.storage_scenario - - try: - configuration = json.loads(storage_scenario['value']) - except (ValueError, TypeError): - return None - - if not isinstance(configuration, dict): - return None - - if configuration.get('scenario') != scenario_name: - return None - - return configuration.get('data', {}) - - def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') - - if configure_location: - location_text = unicode( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' - ).format(**configure_location) - else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) - ) - - mount_points_text = unicode( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' - ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' - ) - - mount_points_not_set = [] - - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') - - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') - - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') - - if mount_points_not_set: - mount_points_text += unicode( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) - - text = unicode( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) - - return text - - def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' - steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' - ) - - warning_message = '' - values = event['data'].get('values', {}) - - # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') - next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) - - if 'configuration' in values: - configuration = values.pop('configuration') - else: - configuration = {} - - if values: - # Update configuration with values from the previous step. - configuration[previous_step] = values - - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': - location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) - ).first() - if not location_exists: - next_step = 'select_location' - warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' - ) - - if next_step == 'select_location': - try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) - except (KeyError, TypeError): - location_id = None - - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] - for location in self.session.query( - 'select name, label, description from Location' - ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' - ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) - - warning = '' - if location_id is not None: - # If there is already a location configured we must make the - # user aware that changing the location may be problematic. - warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' - ) - default_value = location_id - elif location_id is None and len(options) == 1: - # No location configured and no existing locations to use. - default_value = 'create_new_location' - else: - # There are existing locations to choose from but non of them - # are currently active in the centralized storage scenario. - default_value = None - - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' - default_location_description = ( - 'The studio central location where all components are ' - 'stored.' - ) - - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) - - if configure_location: - try: - existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') - ) - ).first() - except UnicodeEncodeError: - next_step = 'configure_location' - warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' - ) - values = configuration['select_location'] - else: - if existing_location: - next_step = 'configure_location' - warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') - ) - ) - values = configuration['select_location'] - - if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') - ): - next_step = 'configure_location' - warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' - ) - values = configuration['select_location'] - - if next_step == 'configure_location': - # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': - # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] - - else: - # The user selected an existing location. Move on to next - # step. - next_step = 'select_mount_point' - - if next_step == 'select_structure': - # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' - # items = [ - # { - # 'type': 'label', - # 'value': ( - # '#Select structure#\n' - # 'Select which structure to use with your location. ' - # 'The structure is used to generate the filesystem ' - # 'path for components that are added to this location.' - # ) - # }, - # { - # 'type': 'enumerator', - # 'label': 'Structure', - # 'name': 'structure_id', - # 'value': 'standard', - # 'data': [{ - # 'label': 'Standard', - # 'value': 'standard', - # 'description': ( - # 'The Standard structure uses the names in your ' - # 'project structure to determine the path.' - # ) - # }] - # } - # ] - - if next_step == 'select_mount_point': - try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) - except (KeyError, TypeError): - mount_points = dict() - - items = [ - { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' - ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } - ] - - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' - - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] - - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] - location = self.session.create( - 'Location', - { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } - ) - - else: - location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) - ).one() - - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } - } - }) - - self.storage_scenario['value'] = setting_value - self.session.commit() - - # Broadcast an event that storage scenario has been configured. - event = ftrack_api_old.event.base.Event( - topic='ftrack.storage-scenario.configure-done' - ) - self.session.event_hub.publish(event) - - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' - - if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) - - return { - 'items': items, - 'state': state - } - - def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' - return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) - } - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - #: TODO: Move these to a separate function. - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.discover ' - 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario - ) - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.configure ' - 'and data.scenario_id="{0}" ' - 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario - ) - - -class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] - - try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] - - except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) - self.logger.error(L(error_message)) - raise ftrack_api_old.exception.LocationError( - 'Unable to configure location based on scenario.' - ) - - else: - location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True - ) - - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - else: - raise ftrack_api_old.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) - ) - - location.accessor = ftrack_api_old.accessor.disk.DiskAccessor( - prefix=prefix - ) - location.structure = _standard.StandardStructure() - location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) - - def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] - - prefix = None - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - - if not prefix: - return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' - ) - - if not os.path.isdir(prefix): - return ( - unicode( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - session.event_hub.subscribe( - ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self.activate - ) - - # Listen to verify startup event from ftrack connect to allow responding - # with a message if something is not working correctly with this - # scenario that the user should be notified about. - self.session.event_hub.subscribe( - ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self._verify_startup - ) - -def register(session): - '''Register storage scenario.''' - scenario = ActivateCentralizedStorageScenario() - scenario.register(session) - - -def register_configuration(session): - '''Register storage scenario.''' - scenario = ConfigureCentralizedStorageScenario() - scenario.register(session) diff --git a/pype/vendor/ftrack_api_old/_python_ntpath.py b/pype/vendor/ftrack_api_old/_python_ntpath.py deleted file mode 100644 index 9f79a1850c..0000000000 --- a/pype/vendor/ftrack_api_old/_python_ntpath.py +++ /dev/null @@ -1,534 +0,0 @@ -# pragma: no cover -# Module 'ntpath' -- common operations on WinNT/Win95 pathnames -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import sys -import stat -import genericpath -import warnings - -from genericpath import * - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all slashes into backslashes.""" - return s.replace("/", "\\").lower() - - -# Return whether a path is absolute. -# Trivial in Posix, harder on the Mac or MS-DOS. -# For DOS it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume letter and colon / UNC resource -# starts with a slash or backslash. - -def isabs(s): - """Test whether a path is absolute""" - s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting "\\" as needed. - If any component is an absolute path, all previous path components - will be discarded.""" - path = a - for b in p: - b_wins = 0 # set to 1 iff b makes path irrelevant - if path == "": - b_wins = 1 - - elif isabs(b): - # This probably wipes out path so far. However, it's more - # complicated if path begins with a drive letter: - # 1. join('c:', '/a') == 'c:/a' - # 2. join('c:/', '/a') == 'c:/a' - # But - # 3. join('c:/a', '/b') == '/b' - # 4. join('c:', 'd:/') = 'd:/' - # 5. join('c:/', 'd:/') = 'd:/' - if path[1:2] != ":" or b[1:2] == ":": - # Path doesn't start with a drive letter, or cases 4 and 5. - b_wins = 1 - - # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): - # case 3 - b_wins = 1 - - if b_wins: - path = b - else: - # Join, and ensure there's a separator. - assert len(path) > 0 - if path[-1] in "/\\": - if b and b[0] in "/\\": - path += b[1:] - else: - path += b - elif path[-1] == ":": - path += b - elif b: - if b[0] in "/\\": - path += b - else: - path += "\\" + b - else: - # path is not empty and does not end with a backslash, - # but b is empty; since, e.g., split('a/') produces - # ('a', ''), it's best if join() adds a backslash in - # this case. - path += '\\' - - return path - - -# Split a path in a drive specification (a drive letter followed by a -# colon) and the path specification. -# It is always true that drivespec + pathspec == p -def splitdrive(p): - """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': - return p[0:2], p[2:] - return '', p - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('\\', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('\\', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Split a path in head (everything up to the last '/') and tail (the -# rest). After the trailing '/' is stripped, the invariant -# join(head, tail) == p holds. -# The resulting head won't end in '/' unless it is the root. - -def split(p): - """Split a pathname. - - Return tuple (head, tail) where tail is everything after the final slash. - Either part may be empty.""" - - d, p = splitdrive(p) - # set i to index beyond p's last slash - i = len(p) - while i and p[i-1] not in '/\\': - i = i - 1 - head, tail = p[:i], p[i:] # now tail has no slashes - # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1] in '/\\': - head2 = head2[:-1] - head = head2 or head - return d + head, tail - - -# Split a path in root and extension. -# The extension is everything starting at the last dot in the last -# pathname component; the root is everything before that. -# It is always true that root + ext == p. - -def splitext(p): - return genericpath._splitext(p, sep, altsep, extsep) -splitext.__doc__ = genericpath._splitext.__doc__ - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - -# Is a path a symbolic link? -# This will always return false on systems where posix.lstat doesn't exist. - -def islink(path): - """Test for symbolic link. - On WindowsNT/95 and OS/2 always returns false - """ - return False - -# alias exists to lexists -lexists = exists - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Directory tree walk. -# For each directory under top (including top itself, but excluding -# '.' and '..'), func(arg, dirname, filenames) is called, where -# dirname is the name of the directory and filenames is the list -# of files (and subdirectories etc.) in the directory. -# The func may modify the filenames list, to implement a filter, -# or to impose a different order of visiting. - -def walk(top, func, arg): - """Directory tree walk with callback function. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), call func(arg, dirname, fnames). - dirname is the name of the directory, and fnames a list of the names of - the files and subdirectories in dirname (excluding '.' and '..'). func - may modify the fnames list in-place (e.g. via del or slice assignment), - and walk will only recurse into the subdirectories whose names remain in - fnames; this can be used to implement a filter, or to impose a specific - order of visiting. No semantics are defined for, or required of, arg, - beyond that arg is always passed to func. It can be used, e.g., to pass - a filename pattern, or a mutable object designed to accumulate - statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) - try: - names = os.listdir(top) - except os.error: - return - func(arg, top, names) - for name in names: - name = join(top, name) - if isdir(name): - walk(name, func, arg) - - -# Expand paths beginning with '~' or '~user'. -# '~' means $HOME; '~user' means that user's home directory. -# If the path doesn't begin with '~', or if the user or $HOME is unknown, -# the path is returned unchanged (leaving error reporting to whatever -# function is called with the expanded path as argument). -# See also module 'glob' for expansion of *, ? and [...] in pathnames. -# (A function should also be defined to do full *sh-style environment -# variable expansion.) - -def expanduser(path): - """Expand ~ and ~user constructs. - - If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': - return path - i, n = 1, len(path) - while i < n and path[i] not in '/\\': - i = i + 1 - - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: - return path - else: - try: - drive = os.environ['HOMEDRIVE'] - except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) - - if i != 1: #~user - userhome = join(dirname(userhome), path[1:i]) - - return userhome + path[i:] - - -# Expand paths containing shell variable substitutions. -# The following rules apply: -# - no expansion within single quotes -# - '$$' is translated into '$' -# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% -# - ${varname} is accepted. -# - $varname is accepted. -# - %varname% is accepted. -# - varnames can be made out of letters, digits and the characters '_-' -# (though is not verified in the ${varname} and %varname% cases) -# XXX With COMMAND.COM you can use any characters in a variable name, -# XXX except '^|<>='. - -def expandvars(path): - """Expand shell variables of the forms $var, ${var} and %var%. - - Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: - return path - import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' - index = 0 - pathlen = len(path) - while index < pathlen: - c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] - pathlen = len(path) - try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] - except ValueError: - res = res + path - index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': - res = res + c - index = index + 1 - else: - path = path[index+1:] - pathlen = len(path) - try: - index = path.index('%') - except ValueError: - res = res + '%' + path - index = pathlen - 1 - else: - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': - res = res + c - index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] - pathlen = len(path) - try: - index = path.index('}') - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '${' + var + '}' - except ValueError: - res = res + '${' + path - index = pathlen - 1 - else: - var = '' - index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: - var = var + c - index = index + 1 - c = path[index:index + 1] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '$' + var - if c != '': - index = index - 1 - else: - res = res + c - index = index + 1 - return res - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. -# Previously, this function also truncated pathnames to 8+3 format, -# but as this module is called "ntpath", that's obviously wrong! - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): - # in the case of paths with these prefixes: - # \\.\ -> device names - # \\?\ -> literal paths - # do not do any normalization, but return the path unchanged - return path - path = path.replace("/", "\\") - prefix, path = splitdrive(path) - # We need to be careful here. If the prefix is empty, and the path starts - # with a backslash, it could either be an absolute path on the current - # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It - # is therefore imperative NOT to collapse multiple backslashes blindly in - # that case. - # The code below preserves multiple backslashes when there is no drive - # letter. This means that the invalid filename \\\a\b is preserved - # unchanged, where a\\\b is normalised to a\b. It's not clear that there - # is any better behaviour for such edge cases. - if prefix == '': - # No drive letter - preserve initial backslashes - while path[:1] == "\\": - prefix = prefix + backslash - path = path[1:] - else: - # We have a drive letter - collapse initial backslashes - if path.startswith("\\"): - prefix = prefix + backslash - path = path.lstrip("\\") - comps = path.split("\\") - i = 0 - while i < len(comps): - if comps[i] in ('.', ''): - del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] - i -= 1 - elif i == 0 and prefix.endswith("\\"): - del comps[i] - else: - i += 1 - else: - i += 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append(dot) - return prefix + backslash.join(comps) - - -# Return an absolute path. -try: - from nt import _getfullpathname - -except ImportError: # not running on Windows - mock up something sensible - def abspath(path): - """Return the absolute version of a path.""" - if not isabs(path): - if isinstance(path, unicode): - cwd = os.getcwdu() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - -else: # use native Windows method on Windows - def abspath(path): - """Return the absolute version of a path.""" - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - elif isinstance(path, unicode): - path = os.getcwdu() - else: - path = os.getcwd() - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath -# Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) - -def _abspath_split(path): - abs = abspath(normpath(path)) - prefix, rest = splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(sep) if x] - -def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -try: - # The genericpath.isdir implementation uses os.stat and checks the mode - # attribute to tell whether or not the path is a directory. - # This is overkill on Windows - just pass the path to GetFileAttributes - # and check the attribute from there. - from nt import _isdir as isdir -except ImportError: - # Use genericpath.isdir as imported above. - pass diff --git a/pype/vendor/ftrack_api_old/_version.py b/pype/vendor/ftrack_api_old/_version.py deleted file mode 100644 index aa1a8c4aba..0000000000 --- a/pype/vendor/ftrack_api_old/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.8.2' diff --git a/pype/vendor/ftrack_api_old/_weakref.py b/pype/vendor/ftrack_api_old/_weakref.py deleted file mode 100644 index 69cc6f4b4f..0000000000 --- a/pype/vendor/ftrack_api_old/_weakref.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Yet another backport of WeakMethod for Python 2.7. -Changes include removing exception chaining and adding args to super() calls. - -Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. - -Full license available in LICENSE.python. -""" -from weakref import ref - - -class WeakMethod(ref): - """ - A custom `weakref.ref` subclass which simulates a weak reference to - a bound method, working around the lifetime problem of bound methods. - """ - - __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" - - def __new__(cls, meth, callback=None): - try: - obj = meth.__self__ - func = meth.__func__ - except AttributeError: - raise TypeError( - "argument should be a bound method, not {}".format(type(meth)) - ) - - def _cb(arg): - # The self-weakref trick is needed to avoid creating a reference - # cycle. - self = self_wr() - if self._alive: - self._alive = False - if callback is not None: - callback(self) - - self = ref.__new__(cls, obj, _cb) - self._func_ref = ref(func, _cb) - self._meth_type = type(meth) - self._alive = True - self_wr = ref(self) - return self - - def __call__(self): - obj = super(WeakMethod, self).__call__() - func = self._func_ref() - if obj is None or func is None: - return None - return self._meth_type(func, obj) - - def __eq__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is other - return ref.__eq__(self, other) and self._func_ref == other._func_ref - return NotImplemented - - def __ne__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is not other - return ref.__ne__(self, other) or self._func_ref != other._func_ref - return NotImplemented - - __hash__ = ref.__hash__ diff --git a/pype/vendor/ftrack_api_old/accessor/__init__.py b/pype/vendor/ftrack_api_old/accessor/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api_old/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api_old/accessor/base.py b/pype/vendor/ftrack_api_old/accessor/base.py deleted file mode 100644 index d15d0208db..0000000000 --- a/pype/vendor/ftrack_api_old/accessor/base.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import abc - -import ftrack_api_old.exception - - -class Accessor(object): - '''Provide data access to a location. - - A location represents a specific storage, but access to that storage may - vary. For example, both local filesystem and FTP access may be possible for - the same storage. An accessor implements these different ways of accessing - the same data location. - - As different accessors may access the same location, only part of a data - path that is commonly understood may be stored in the database. The format - of this path should be a contract between the accessors that require access - to the same location and is left as an implementation detail. As such, this - system provides no guarantee that two different accessors can provide access - to the same location, though this is a clear goal. The path stored centrally - is referred to as the **resource identifier** and should be used when - calling any of the accessor methods that accept a *resource_identifier* - argument. - - ''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise location accessor.''' - super(Accessor, self).__init__() - - @abc.abstractmethod - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api_old.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api_old.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - - @abc.abstractmethod - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - - @abc.abstractmethod - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - - @abc.abstractmethod - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - - @abc.abstractmethod - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - - @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api_old.data.Data` for *resource_identifier*.''' - - @abc.abstractmethod - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - - @abc.abstractmethod - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - Should silently ignore existing containers and not recreate them. - - ''' - - @abc.abstractmethod - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorParentResourceNotFoundError` - if container of *resource_identifier* could not be determined. - - ''' - - def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' - return self.remove(resource_identifier) - - def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorFilesystemPathError` if - filesystem path could not be determined from *resource_identifier* or - :exc:`~ftrack_api_old.exception.AccessorUnsupportedOperationError` if - retrieving filesystem paths is not supported by this accessor. - - ''' - raise ftrack_api_old.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier - ) - - def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api_old.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by this accessor. - - ''' - raise ftrack_api_old.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier - ) diff --git a/pype/vendor/ftrack_api_old/accessor/disk.py b/pype/vendor/ftrack_api_old/accessor/disk.py deleted file mode 100644 index 7c6c91fcb9..0000000000 --- a/pype/vendor/ftrack_api_old/accessor/disk.py +++ /dev/null @@ -1,250 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -import sys -import errno -import contextlib - -import ftrack_api_old._python_ntpath as ntpath -import ftrack_api_old.accessor.base -import ftrack_api_old.data -from ftrack_api_old.exception import ( - AccessorFilesystemPathError, - AccessorUnsupportedOperationError, - AccessorResourceNotFoundError, - AccessorOperationFailedError, - AccessorPermissionDeniedError, - AccessorResourceInvalidError, - AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError -) - - -class DiskAccessor(ftrack_api_old.accessor.base.Accessor): - '''Provide disk access to a location. - - Expect resource identifiers to refer to relative filesystem paths. - - ''' - - def __init__(self, prefix, **kw): - '''Initialise location accessor. - - *prefix* specifies the base folder for the disk based structure and - will be prepended to any path. It should be specified in the syntax of - the current OS. - - ''' - if prefix: - prefix = os.path.expanduser(os.path.expandvars(prefix)) - prefix = os.path.abspath(prefix) - self.prefix = prefix - - super(DiskAccessor, self).__init__(**kw) - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api_old.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api_old.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='list', resource_identifier=resource_identifier - ): - listing = [] - for entry in os.listdir(filesystem_path): - listing.append(os.path.join(resource_identifier, entry)) - - return listing - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.exists(filesystem_path) - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isfile(filesystem_path) - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isdir(filesystem_path) - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') - - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api_old.Data` for *resource_identifier*.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='open', resource_identifier=resource_identifier - ): - data = ftrack_api_old.data.File(filesystem_path, mode) - - return data - - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - if self.is_file(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.remove(filesystem_path) - - elif self.is_container(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.rmdir(filesystem_path) - - else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='makeContainer', resource_identifier=resource_identifier - ): - try: - if recursive: - os.makedirs(filesystem_path) - else: - try: - os.mkdir(filesystem_path) - except OSError as error: - if error.errno == errno.ENOENT: - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier - ) - else: - raise - - except OSError, error: - if error.errno != errno.EEXIST: - raise - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api_old.exception.AccessorParentResourceNotFoundError` if - container of *resource_identifier* could not be determined. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - container = os.path.dirname(filesystem_path) - - if self.prefix: - if not container.startswith(self.prefix): - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' - ) - - # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] - if ntpath.isabs(container): - # Ensure that resulting path is relative by stripping any - # leftover prefixed slashes from string. - # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the - # result will be 'foo/bar'. - container = container.lstrip('\\/') - - return container - - def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. - - For example:: - - >>> accessor = DiskAccessor('my.location', '/mountpoint') - >>> print accessor.get_filesystem_path('test.txt') - /mountpoint/test.txt - >>> print accessor.get_filesystem_path('/mountpoint/test.txt') - /mountpoint/test.txt - - Raise :exc:`ftrack_api_old.exception.AccessorFilesystemPathError` if filesystem - path could not be determined from *resource_identifier*. - - ''' - filesystem_path = resource_identifier - if filesystem_path: - filesystem_path = os.path.normpath(filesystem_path) - - if self.prefix: - if not os.path.isabs(filesystem_path): - filesystem_path = os.path.normpath( - os.path.join(self.prefix, filesystem_path) - ) - - if not filesystem_path.startswith(self.prefix): - raise AccessorFilesystemPathError( - resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' - ) - - return filesystem_path - - -@contextlib.contextmanager -def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' - try: - yield - - except (OSError, IOError) as error: - (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - - error_code = getattr(error, 'errno') - if not error_code: - raise AccessorOperationFailedError(**kw), None, traceback - - if error_code == errno.ENOENT: - raise AccessorResourceNotFoundError(**kw), None, traceback - - elif error_code == errno.EPERM: - raise AccessorPermissionDeniedError(**kw), None, traceback - - elif error_code == errno.ENOTEMPTY: - raise AccessorContainerNotEmptyError(**kw), None, traceback - - elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): - raise AccessorResourceInvalidError(**kw), None, traceback - - else: - raise AccessorOperationFailedError(**kw), None, traceback - - except Exception: - raise diff --git a/pype/vendor/ftrack_api_old/accessor/server.py b/pype/vendor/ftrack_api_old/accessor/server.py deleted file mode 100644 index bd88093837..0000000000 --- a/pype/vendor/ftrack_api_old/accessor/server.py +++ /dev/null @@ -1,240 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import hashlib -import base64 -import json - -import requests - -from .base import Accessor -from ..data import String -import ftrack_api_old.exception -import ftrack_api_old.symbol - - -class ServerFile(String): - '''Representation of a server file.''' - - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' - self.mode = mode - self.resource_identifier = resource_identifier - self._session = session - self._has_read = False - - super(ServerFile, self).__init__() - - def flush(self): - '''Flush all changes.''' - super(ServerFile, self).flush() - - if self.mode == 'wb': - self._write() - - def read(self, limit=None): - '''Read file.''' - if not self._has_read: - self._read() - self._has_read = True - - return super(ServerFile, self).read(limit) - - def _read(self): - '''Read all remote content from key into wrapped_file.''' - position = self.tell() - self.seek(0) - - response = requests.get( - '{0}/component/get'.format(self._session.server_url), - params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - }, - stream=True - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api_old.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) - ) - - for block in response.iter_content(ftrack_api_old.symbol.CHUNK_SIZE): - self.wrapped_file.write(block) - - self.flush() - self.seek(position) - - def _write(self): - '''Write current data to remote key.''' - position = self.tell() - self.seek(0) - - # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) - if not component: - raise ftrack_api_old.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( - self.resource_identifier - ) - ) - - # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) - - try: - metadata = self._session.get_upload_metadata( - component_id=self.resource_identifier, - file_name=name, - file_size=self._get_size(), - checksum=self._compute_checksum() - ) - except Exception as error: - raise ftrack_api_old.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) - ) - - # Ensure at beginning of file before put. - self.seek(0) - - # Put the file based on the metadata. - response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api_old.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) - ) - - self.seek(position) - - def _get_size(self): - '''Return size of file in bytes.''' - position = self.tell() - self.seek(0, os.SEEK_END) - length = self.tell() - self.seek(position) - return length - - def _compute_checksum(self): - '''Return checksum for file.''' - fp = self.wrapped_file - buf_size = ftrack_api_old.symbol.CHUNK_SIZE - hash_obj = hashlib.md5() - spos = fp.tell() - - s = fp.read(buf_size) - while s: - hash_obj.update(s) - s = fp.read(buf_size) - - base64_digest = base64.encodestring(hash_obj.digest()) - if base64_digest[-1] == '\n': - base64_digest = base64_digest[0:-1] - - fp.seek(spos) - return base64_digest - - -class _ServerAccessor(Accessor): - '''Provide server location access.''' - - def __init__(self, session, **kw): - '''Initialise location accessor.''' - super(_ServerAccessor, self).__init__(**kw) - - self._session = session - - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api_old.Data` for *resource_identifier*.''' - return ServerFile(resource_identifier, session=self._session, mode=mode) - - def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' - response = requests.get( - '{0}/component/remove'.format(self._session.server_url), - params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } - ) - if response.status_code != 200: - raise ftrack_api_old.exception.AccessorOperationFailedError( - 'Failed to remove file.' - ) - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' - return None - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' - raise NotImplementedError() - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - return False - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - raise NotImplementedError() - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - raise NotImplementedError() - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise NotImplementedError() - - def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' - url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - return url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - - def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - ''' - url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - url = url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - if size: - url += u'&size={0}'.format(size) - - return url diff --git a/pype/vendor/ftrack_api_old/attribute.py b/pype/vendor/ftrack_api_old/attribute.py deleted file mode 100644 index 47fd6c9616..0000000000 --- a/pype/vendor/ftrack_api_old/attribute.py +++ /dev/null @@ -1,707 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import collections -import copy -import logging -import functools - -import ftrack_api_old.symbol -import ftrack_api_old.exception -import ftrack_api_old.collection -import ftrack_api_old.inspection -import ftrack_api_old.operation - -logger = logging.getLogger( - __name__ -) - - -def merge_references(function): - '''Decorator to handle merging of references / collections.''' - - @functools.wraps(function) - def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' - - if attribute.name not in entity._inflated: - # Only merge on first access to avoid - # inflating them multiple times. - - logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) - ) - - # Local attributes. - local_value = attribute.get_local_value(entity) - if isinstance( - local_value, - ( - ftrack_api_old.entity.base.Entity, - ftrack_api_old.collection.Collection, - ftrack_api_old.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) - - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) - - if merged_local_value is not local_value: - with entity.session.operation_recording(False): - attribute.set_local_value(entity, merged_local_value) - - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - if isinstance( - remote_value, - ( - ftrack_api_old.entity.base.Entity, - ftrack_api_old.collection.Collection, - ftrack_api_old.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) - - if merged_remote_value is not remote_value: - attribute.set_remote_value(entity, merged_remote_value) - - entity._inflated.add( - attribute.name - ) - - return function( - attribute, entity - ) - - return get_value - - -class Attributes(object): - '''Collection of properties accessible by name.''' - - def __init__(self, attributes=None): - super(Attributes, self).__init__() - self._data = dict() - if attributes is not None: - for attribute in attributes: - self.add(attribute) - - def add(self, attribute): - '''Add *attribute*.''' - existing = self._data.get(attribute.name, None) - if existing: - raise ftrack_api_old.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) - ) - - self._data[attribute.name] = attribute - - def remove(self, attribute): - '''Remove attribute.''' - self._data.pop(attribute.name) - - def get(self, name): - '''Return attribute by *name*. - - If no attribute matches *name* then return None. - - ''' - return self._data.get(name, None) - - def keys(self): - '''Return list of attribute names.''' - return self._data.keys() - - def __contains__(self, item): - '''Return whether *item* present.''' - if not isinstance(item, Attribute): - return False - - return item.name in self._data - - def __iter__(self): - '''Return iterator over attributes.''' - return self._data.itervalues() - - def __len__(self): - '''Return count of attributes.''' - return len(self._data) - - -class Attribute(object): - '''A name and value pair persisted remotely.''' - - def __init__( - self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True, - computed=False - ): - '''Initialise attribute with *name*. - - *default_value* represents the default value for the attribute. It may - be a callable. It is not used within the attribute when providing - values, but instead exists for other parts of the system to reference. - - If *mutable* is set to False then the local value of the attribute on an - entity can only be set when both the existing local and remote values - are :attr:`ftrack_api_old.symbol.NOT_SET`. The exception to this is when the - target value is also :attr:`ftrack_api_old.symbol.NOT_SET`. - - If *computed* is set to True the value is a remote side computed value - and should not be long-term cached. - - ''' - super(Attribute, self).__init__() - self._name = name - self._mutable = mutable - self._computed = computed - self.default_value = default_value - - self._local_key = 'local' - self._remote_key = 'remote' - - def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) - ) - - def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' - storage = getattr(entity, storage_key, None) - if storage is None: - storage = collections.defaultdict( - lambda: - { - self._local_key: ftrack_api_old.symbol.NOT_SET, - self._remote_key: ftrack_api_old.symbol.NOT_SET - } - ) - setattr(entity, storage_key, storage) - - return storage - - @property - def name(self): - '''Return name.''' - return self._name - - @property - def mutable(self): - '''Return whether attribute is mutable.''' - return self._mutable - - @property - def computed(self): - '''Return whether attribute is computed.''' - return self._computed - - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - ''' - value = self.get_local_value(entity) - if value is not ftrack_api_old.symbol.NOT_SET: - return value - - value = self.get_remote_value(entity) - if value is not ftrack_api_old.symbol.NOT_SET: - return value - - if not entity.session.auto_populate: - return value - - self.populate_remote_value(entity) - return self.get_remote_value(entity) - - def get_local_value(self, entity): - '''Return locally set value for *entity*.''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._local_key] - - def get_remote_value(self, entity): - '''Return remote value for *entity*. - - .. note:: - - Only return locally stored remote value, do not fetch from remote. - - ''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._remote_key] - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if ( - not self.mutable - and self.is_set(entity) - and value is not ftrack_api_old.symbol.NOT_SET - ): - raise ftrack_api_old.exception.ImmutableAttributeError(self) - - old_value = self.get_local_value(entity) - - storage = self.get_entity_storage(entity) - storage[self.name][self._local_key] = value - - # Record operation. - if entity.session.record_operations: - entity.session.recorded_operations.push( - ftrack_api_old.operation.UpdateEntityOperation( - entity.entity_type, - ftrack_api_old.inspection.primary_key(entity), - self.name, - old_value, - value - ) - ) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - storage = self.get_entity_storage(entity) - storage[self.name][self._remote_key] = value - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' - entity.session.populate([entity], self.name) - - def is_modified(self, entity): - '''Return whether local value set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - return ( - local_value is not ftrack_api_old.symbol.NOT_SET - and local_value != remote_value - ) - - def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api_old.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api_old.symbol.NOT_SET - ]) - - -class ScalarAttribute(Attribute): - '''Represent a scalar value.''' - - def __init__(self, name, data_type, **kw): - '''Initialise property.''' - super(ScalarAttribute, self).__init__(name, **kw) - self.data_type = data_type - - -class ReferenceAttribute(Attribute): - '''Reference another entity.''' - - def __init__(self, name, entity_type, **kw): - '''Initialise property.''' - super(ReferenceAttribute, self).__init__(name, **kw) - self.entity_type = entity_type - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*. - - As attribute references another entity, use that entity's configured - default projections to auto populate useful attributes when loading. - - ''' - reference_entity_type = entity.session.types[self.entity_type] - default_projections = reference_entity_type.default_projections - - projections = [] - if default_projections: - for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) - else: - projections.append(self.name) - - entity.session.populate([entity], ', '.join(projections)) - - def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - - if local_value is ftrack_api_old.symbol.NOT_SET: - return False - - if remote_value is ftrack_api_old.symbol.NOT_SET: - return True - - if ( - ftrack_api_old.inspection.identity(local_value) - != ftrack_api_old.inspection.identity(remote_value) - ): - return True - - return False - - - @merge_references - def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) - -class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' - - #: Collection class used by attribute. - collection_class = None - - @merge_references - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - .. note:: - - As value is a collection that is mutable, will transfer a remote - value into the local value on access if no local value currently - set. - - ''' - super(AbstractCollectionAttribute, self).get_value(entity) - - # Conditionally, copy remote value into local value so that it can be - # mutated without side effects. - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api_old.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) - ): - try: - with entity.session.operation_recording(False): - self.set_local_value(entity, copy.copy(remote_value)) - except ftrack_api_old.exception.ImmutableAttributeError: - pass - - value = self.get_local_value(entity) - - # If the local value is still not set then attempt to set it with a - # suitable placeholder collection so that the caller can interact with - # the collection using its normal interface. This is required for a - # newly created entity for example. It *could* be done as a simple - # default value, but that would incur cost for every collection even - # when they are not modified before commit. - if value is ftrack_api_old.symbol.NOT_SET: - try: - with entity.session.operation_recording(False): - self.set_local_value( - entity, - # None should be treated as empty collection. - None - ) - except ftrack_api_old.exception.ImmutableAttributeError: - pass - - return self.get_local_value(entity) - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if value is not ftrack_api_old.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = self.mutable - - super(AbstractCollectionAttribute, self).set_local_value(entity, value) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - if value is not ftrack_api_old.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = False - - super(AbstractCollectionAttribute, self).set_remote_value(entity, value) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. - - .. note:: - - If *value* is None then return a suitable empty collection. - - ''' - raise NotImplementedError() - - -class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api_old.collection.Collection - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' - - if not isinstance(value, ftrack_api_old.collection.Collection): - - if value is None: - value = ftrack_api_old.collection.Collection(entity, self) - - elif isinstance(value, list): - value = ftrack_api_old.collection.Collection( - entity, self, data=value - ) - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - - else: - if value.attribute is not self: - raise ftrack_api_old.exception.AttributeError( - 'Collection already bound to a different attribute' - ) - - return value - - -class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api_old.collection.KeyValueMappedCollectionProxy - - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. - - *creator* should be a function that accepts a dictionary of data and - is used by the referenced collection to create new entities in the - collection. - - *key_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'key' of the dictionary. - - *value_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'value' of the dictionary. - - ''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - - super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api_old.collection.KeyValueMappedCollectionProxy - ): - - if value is None: - value = ftrack_api_old.collection.KeyValueMappedCollectionProxy( - ftrack_api_old.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, (list, ftrack_api_old.collection.Collection)): - - if isinstance(value, list): - value = ftrack_api_old.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api_old.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api_old.collection.KeyValueMappedCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api_old.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api_old.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api_old.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value - - -class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' - - #: Collection class used by attribute. - collection_class = ( - ftrack_api_old.collection.CustomAttributeCollectionProxy - ) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api_old.collection.CustomAttributeCollectionProxy - ): - - if value is None: - value = ftrack_api_old.collection.CustomAttributeCollectionProxy( - ftrack_api_old.collection.Collection(entity, self) - ) - - elif isinstance(value, (list, ftrack_api_old.collection.Collection)): - - # Why are we creating a new if it is a list? This will cause - # any merge to create a new proxy and collection. - if isinstance(value, list): - value = ftrack_api_old.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api_old.collection.CustomAttributeCollectionProxy( - value - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api_old.collection.CustomAttributeCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api_old.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api_old.collection.CustomAttributeCollectionProxy( - collection - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api_old.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value diff --git a/pype/vendor/ftrack_api_old/cache.py b/pype/vendor/ftrack_api_old/cache.py deleted file mode 100644 index b62fe176bc..0000000000 --- a/pype/vendor/ftrack_api_old/cache.py +++ /dev/null @@ -1,579 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''Caching framework. - -Defines a standardised :class:`Cache` interface for storing data against -specific keys. Key generation is also standardised using a :class:`KeyMaker` -interface. - -Combining a Cache and KeyMaker allows for memoisation of function calls with -respect to the arguments used by using a :class:`Memoiser`. - -As a convenience a simple :func:`memoise` decorator is included for quick -memoisation of function using a global cache and standard key maker. - -''' - -import collections -import functools -import abc -import copy -import inspect -import re -import anydbm -import contextlib -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -import ftrack_api_old.inspection -import ftrack_api_old.symbol - - -class Cache(object): - '''Cache interface. - - Derive from this to define concrete cache implementations. A cache is - centered around the concept of key:value pairings where the key is unique - across the cache. - - ''' - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - @abc.abstractmethod - def set(self, key, value): - '''Set *value* for *key*.''' - - @abc.abstractmethod - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - raise NotImplementedError() # pragma: no cover - - def values(self): - '''Return values for current keys.''' - values = [] - for key in self.keys(): - try: - value = self.get(key) - except KeyError: - continue - else: - values.append(value) - - return values - - def clear(self, pattern=None): - '''Remove all keys matching *pattern*. - - *pattern* should be a regular expression string. - - If *pattern* is None then all keys will be removed. - - ''' - if pattern is not None: - pattern = re.compile(pattern) - - for key in self.keys(): - if pattern is not None: - if not pattern.search(key): - continue - - try: - self.remove(key) - except KeyError: - pass - - -class ProxyCache(Cache): - '''Proxy another cache.''' - - def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' - self.proxied = proxied - super(ProxyCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.get(key) - - def set(self, key, value): - '''Set *value* for *key*.''' - return self.proxied.set(key, value) - - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.remove(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self.proxied.keys() - - -class LayeredCache(Cache): - '''Layered cache.''' - - def __init__(self, caches): - '''Initialise cache with *caches*.''' - super(LayeredCache, self).__init__() - self.caches = caches - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - Attempt to retrieve from cache layers in turn, starting with shallowest. - If value retrieved, then also set the value in each higher level cache - up from where retrieved. - - ''' - target_caches = [] - value = ftrack_api_old.symbol.NOT_SET - - for cache in self.caches: - try: - value = cache.get(key) - except KeyError: - target_caches.append(cache) - continue - else: - break - - if value is ftrack_api_old.symbol.NOT_SET: - raise KeyError(key) - - # Set value on all higher level caches. - for cache in target_caches: - cache.set(key, value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - for cache in self.caches: - cache.set(key, value) - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found in any layer. - - ''' - removed = False - for cache in self.caches: - try: - cache.remove(key) - except KeyError: - pass - else: - removed = True - - if not removed: - raise KeyError(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - keys = [] - for cache in self.caches: - keys.extend(cache.keys()) - - return list(set(keys)) - - -class MemoryCache(Cache): - '''Memory based cache.''' - - def __init__(self): - '''Initialise cache.''' - self._cache = {} - super(MemoryCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self._cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - self._cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - del self._cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self._cache.keys() - - -class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. - - .. note:: - - No locking of the underlying file is performed. - - ''' - - def __init__(self, path): - '''Initialise cache at *path*.''' - self.path = path - - # Initialise cache. - cache = anydbm.open(self.path, 'c') - cache.close() - - super(FileCache, self).__init__() - - @contextlib.contextmanager - def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') - try: - yield cache - finally: - cache.close() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - return cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - with self._database() as cache: - cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - del cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - with self._database() as cache: - return cache.keys() - - -class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' - - def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. - - *proxied* is the underlying cache to use for storage. - - ''' - self.encode = encode - self.decode = decode - super(SerialisedCache, self).__init__(proxied) - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - value = super(SerialisedCache, self).get(key) - if self.decode: - value = self.decode(value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - if self.encode: - value = self.encode(value) - - super(SerialisedCache, self).set(key, value) - - -class KeyMaker(object): - '''Generate unique keys.''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise key maker.''' - super(KeyMaker, self).__init__() - self.item_separator = '' - - def key(self, *items): - '''Return key for *items*.''' - keys = [] - for item in items: - keys.append(self._key(item)) - - return self.item_separator.join(keys) - - @abc.abstractmethod - def _key(self, obj): - '''Return key for *obj*.''' - - -class StringKeyMaker(KeyMaker): - '''Generate string key.''' - - def _key(self, obj): - '''Return key for *obj*.''' - return str(obj) - - -class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' - - def __init__(self): - '''Initialise key maker.''' - super(ObjectKeyMaker, self).__init__() - self.item_separator = '\0' - self.mapping_identifier = '\1' - self.mapping_pair_separator = '\2' - self.iterable_identifier = '\3' - self.name_identifier = '\4' - - def _key(self, item): - '''Return key for *item*. - - Returned key will be a pickle like string representing the *item*. This - allows for typically non-hashable objects to be used in key generation - (such as dictionaries). - - If *item* is iterable then each item in it shall also be passed to this - method to ensure correct key generation. - - Special markers are used to distinguish handling of specific cases in - order to ensure uniqueness of key corresponds directly to *item*. - - Example:: - - >>> key_maker = ObjectKeyMaker() - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... return x + y - ... - >>> key_maker.key(add, (1, 2)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' - >>> key_maker.key(add, (1, 3)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - - ''' - # TODO: Consider using a more robust and comprehensive solution such as - # dill (https://github.com/uqfoundation/dill). - if isinstance(item, collections.Iterable): - if isinstance(item, basestring): - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - if isinstance(item, collections.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - return ( - self.mapping_identifier + - contents + - self.mapping_identifier - ) - - else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) - - elif inspect.ismethod(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.im_class.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isfunction(item) or inspect.isclass(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isbuiltin(item): - return self.name_identifier + item.__name__ - - else: - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - -class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. - - Example:: - - >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... print 'Called' - ... return x + y - ... - >>> memoiser.call(add, (1, 2), {}) - Called - >>> memoiser.call(add, (1, 2), {}) - >>> memoiser.call(add, (1, 3), {}) - Called - - ''' - - def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. - - If *cache* is not specified a default :class:`MemoryCache` will be - used. Similarly, if *key_maker* is not specified a default - :class:`ObjectKeyMaker` will be used. - - If *return_copies* is True then all results returned from the cache will - be deep copies to avoid indirect mutation of cached values. - - ''' - self.cache = cache - if self.cache is None: - self.cache = MemoryCache() - - self.key_maker = key_maker - if self.key_maker is None: - self.key_maker = ObjectKeyMaker() - - self.return_copies = return_copies - super(Memoiser, self).__init__() - - def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. - - If *function* was previously called with exactly the same arguments - then return cached result if available. - - Store result for call in cache. - - ''' - if args is None: - args = () - - if kw is None: - kw = {} - - # Support arguments being passed as positionals or keywords. - arguments = inspect.getcallargs(function, *args, **kw) - - key = self.key_maker.key(function, arguments) - try: - value = self.cache.get(key) - - except KeyError: - value = function(*args, **kw) - self.cache.set(key, value) - - # If requested, deep copy value to return in order to avoid cached value - # being inadvertently altered by the caller. - if self.return_copies: - value = copy.deepcopy(value) - - return value - - -def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): - - @functools.wraps(function) - def inner(*args, **kw): - return memoiser.call(function, args, kw) - - return inner - - return outer - - -#: Default memoiser. -memoiser = Memoiser() - -#: Default memoise decorator using standard cache and key maker. -memoise = memoise_decorator(memoiser) diff --git a/pype/vendor/ftrack_api_old/collection.py b/pype/vendor/ftrack_api_old/collection.py deleted file mode 100644 index 177c423df0..0000000000 --- a/pype/vendor/ftrack_api_old/collection.py +++ /dev/null @@ -1,507 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging - -import collections -import copy - -import ftrack_api_old.exception -import ftrack_api_old.inspection -import ftrack_api_old.symbol -import ftrack_api_old.operation -import ftrack_api_old.cache -from ftrack_api_old.logging import LazyLogMessage as L - - -class Collection(collections.MutableSequence): - '''A collection of entities.''' - - def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' - self.entity = entity - self.attribute = attribute - self._data = [] - self._identities = set() - - # Set initial dataset. - # Note: For initialisation, immutability is deferred till after initial - # population as otherwise there would be no public way to initialise an - # immutable collection. The reason self._data is not just set directly - # is to ensure other logic can be applied without special handling. - self.mutable = True - try: - if data is None: - data = [] - - with self.entity.session.operation_recording(False): - self.extend(data) - finally: - self.mutable = mutable - - def _identity_key(self, entity): - '''Return identity key for *entity*.''' - return str(ftrack_api_old.inspection.identity(entity)) - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying data store. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance._data = copy.copy(self._data) - copied_instance._identities = copy.copy(self._identities) - - return copied_instance - - def _notify(self, old_value): - '''Notify about modification.''' - # Record operation. - if self.entity.session.record_operations: - self.entity.session.recorded_operations.push( - ftrack_api_old.operation.UpdateEntityOperation( - self.entity.entity_type, - ftrack_api_old.inspection.primary_key(self.entity), - self.attribute.name, - old_value, - self - ) - ) - - def insert(self, index, item): - '''Insert *item* at *index*.''' - if not self.mutable: - raise ftrack_api_old.exception.ImmutableCollectionError(self) - - if item in self: - raise ftrack_api_old.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - self._data.insert(index, item) - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __contains__(self, value): - '''Return whether *value* present in collection.''' - return self._identity_key(value) in self._identities - - def __getitem__(self, index): - '''Return item at *index*.''' - return self._data[index] - - def __setitem__(self, index, item): - '''Set *item* against *index*.''' - if not self.mutable: - raise ftrack_api_old.exception.ImmutableCollectionError(self) - - try: - existing_index = self.index(item) - except ValueError: - pass - else: - if index != existing_index: - raise ftrack_api_old.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - try: - existing_item = self._data[index] - except IndexError: - pass - else: - self._identities.remove(self._identity_key(existing_item)) - - self._data[index] = item - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __delitem__(self, index): - '''Remove item at *index*.''' - if not self.mutable: - raise ftrack_api_old.exception.ImmutableCollectionError(self) - - old_value = copy.copy(self) - item = self._data[index] - del self._data[index] - self._identities.remove(self._identity_key(item)) - self._notify(old_value) - - def __len__(self): - '''Return count of items.''' - return len(self._data) - - def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' - if not isinstance(other, Collection): - return False - - return sorted(self._identities) == sorted(other._identities) - - def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' - return not self == other - - -class MappedCollectionProxy(collections.MutableMapping): - '''Common base class for mapped collection of entities.''' - - def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.collection = collection - super(MappedCollectionProxy, self).__init__() - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying collection. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance.collection = copy.copy(self.collection) - - return copied_instance - - @property - def mutable(self): - '''Return whether collection is mutable.''' - return self.collection.mutable - - @mutable.setter - def mutable(self, value): - '''Set whether collection is mutable to *value*.''' - self.collection.mutable = value - - @property - def attribute(self): - '''Return attribute bound to.''' - return self.collection.attribute - - @attribute.setter - def attribute(self, value): - '''Set bound attribute to *value*.''' - self.collection.attribute = value - - -class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. - - Proxy a standard :class:`Collection` as a mapping where certain attributes - from the entities in the collection are mapped to key, value pairs. - - For example:: - - >>> collection = [Metadata(key='foo', value='bar'), ...] - >>> mapped = KeyValueMappedCollectionProxy( - ... collection, create_metadata, - ... key_attribute='key', value_attribute='value' - ... ) - >>> print mapped['foo'] - 'bar' - >>> mapped['bam'] = 'biz' - >>> print mapped.collection[-1] - Metadata(key='bam', value='biz') - - ''' - - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - super(KeyValueMappedCollectionProxy, self).__init__(collection) - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - for entity in self.collection: - if entity[self.key_attribute] == key: - return entity - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - return entity[self.value_attribute] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - try: - entity = self._get_entity_by_key(key) - except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } - entity = self.creator(self, data) - - if ( - ftrack_api_old.inspection.state(entity) is - ftrack_api_old.symbol.CREATED - ): - # Persisting this entity will be handled here, record the - # operation. - self.collection.append(entity) - - else: - # The entity is created and persisted separately by the - # creator. Do not record this operation. - with self.collection.entity.session.operation_recording(False): - # Do not record this operation since it will trigger - # redudant and potentially failing operations. - self.collection.append(entity) - - else: - entity[self.value_attribute] = value - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - for index, entity in enumerate(self.collection): - if entity[self.key_attribute] == key: - break - else: - raise KeyError(key) - - del self.collection[index] - entity.session.delete(entity) - - def __iter__(self): - '''Iterate over all keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return len(keys) - - -class PerSessionDefaultKeyMaker(ftrack_api_old.cache.KeyMaker): - '''Generate key for session.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - session = obj.get('session') - if session is not None: - # Key by session only. - return str(id(session)) - - return str(obj) - - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api_old.cache.memoise_decorator( - ftrack_api_old.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' - ).all() - - -class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' - super(CustomAttributeCollectionProxy, self).__init__(collection) - - def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' - entity = self.collection.entity - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - def _get_keys(self): - '''Return a list of all keys.''' - keys = [] - for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) - - return keys - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - configuration_id = self.get_configuration_id_from_key(key) - for entity in self.collection: - if entity[self.key_attribute] == configuration_id: - return entity - - return None - - def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. - - Raise :exc:`KeyError` if no configuration with matching *key* found. - - ''' - for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - - if entity: - return entity[self.value_attribute] - - for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] - - raise KeyError(key) - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - custom_attribute_value[self.value_attribute] = value - else: - entity = self.collection.entity - session = entity.session - data = { - self.key_attribute: self.get_configuration_id_from_key(key), - self.value_attribute: value, - 'entity_id': entity['id'] - } - - # Make sure to use the currently active collection. This is - # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) - ) - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - index = self.collection.index(custom_attribute_value) - del self.collection[index] - - custom_attribute_value.session.delete(custom_attribute_value) - else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) - - def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' - if collection is ftrack_api_old.symbol.NOT_SET: - return False - - return collection.collection == self.collection - - def __iter__(self): - '''Iterate over all keys.''' - keys = self._get_keys() - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = self._get_keys() - return len(keys) diff --git a/pype/vendor/ftrack_api_old/data.py b/pype/vendor/ftrack_api_old/data.py deleted file mode 100644 index 1802e380c0..0000000000 --- a/pype/vendor/ftrack_api_old/data.py +++ /dev/null @@ -1,119 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -from abc import ABCMeta, abstractmethod -import tempfile - - -class Data(object): - '''File-like object for manipulating data.''' - - __metaclass__ = ABCMeta - - def __init__(self): - '''Initialise data access.''' - self.closed = False - - @abstractmethod - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - @abstractmethod - def write(self, content): - '''Write content at current position.''' - - def flush(self): - '''Flush buffers ensuring data written.''' - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. - - The *whence* argument is optional and defaults to os.SEEK_SET or 0 - (absolute file positioning); other values are os.SEEK_CUR or 1 - (seek relative to the current position) and os.SEEK_END or 2 - (seek relative to the file's end). - - ''' - raise NotImplementedError('Seek not supported.') - - def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') - - def close(self): - '''Flush buffers and prevent further access.''' - self.flush() - self.closed = True - - -class FileWrapper(Data): - '''Data wrapper for Python file objects.''' - - def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' - self.wrapped_file = wrapped_file - self._read_since_last_write = False - super(FileWrapper, self).__init__() - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - self._read_since_last_write = True - - if limit is None: - limit = -1 - - return self.wrapped_file.read(limit) - - def write(self, content): - '''Write content at current position.''' - if self._read_since_last_write: - # Windows requires a seek before switching from read to write. - self.seek(self.tell()) - - self.wrapped_file.write(content) - self._read_since_last_write = False - - def flush(self): - '''Flush buffers ensuring data written.''' - super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): - self.wrapped_file.flush() - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' - self.wrapped_file.seek(offset, whence) - - def tell(self): - '''Return current position of internal pointer.''' - return self.wrapped_file.tell() - - def close(self): - '''Flush buffers and prevent further access.''' - if not self.closed: - super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): - self.wrapped_file.close() - - -class File(FileWrapper): - '''Data wrapper accepting filepath.''' - - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' - file_object = open(path, mode) - super(File, self).__init__(file_object) - - -class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' - - def __init__(self, content=None): - '''Initialise data with *content*.''' - super(String, self).__init__( - tempfile.TemporaryFile() - ) - - if content is not None: - self.wrapped_file.write(content) - self.wrapped_file.seek(0) diff --git a/pype/vendor/ftrack_api_old/entity/__init__.py b/pype/vendor/ftrack_api_old/entity/__init__.py deleted file mode 100644 index 1d452f2828..0000000000 --- a/pype/vendor/ftrack_api_old/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/pype/vendor/ftrack_api_old/entity/asset_version.py b/pype/vendor/ftrack_api_old/entity/asset_version.py deleted file mode 100644 index 0473dde80f..0000000000 --- a/pype/vendor/ftrack_api_old/entity/asset_version.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api_old.entity.base - - -class AssetVersion(ftrack_api_old.entity.base.Entity): - '''Represent asset version.''' - - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). This version is - automatically set as the component's version. - - If *location* is specified then automatically add component to that - location. - - ''' - if data is None: - data = {} - - data.pop('version_id', None) - data['version'] = self - - return self.session.create_component(path, data=data, location=location) - - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible, and will be - set as the version's thumbnail. - - The new components will automatically be associated with the version. - A server version of 3.3.32 or higher is required for this to function - properly. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original - ) diff --git a/pype/vendor/ftrack_api_old/entity/base.py b/pype/vendor/ftrack_api_old/entity/base.py deleted file mode 100644 index f4942b5181..0000000000 --- a/pype/vendor/ftrack_api_old/entity/base.py +++ /dev/null @@ -1,402 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import abc -import collections -import logging - -import ftrack_api_old.symbol -import ftrack_api_old.attribute -import ftrack_api_old.inspection -import ftrack_api_old.exception -import ftrack_api_old.operation -from ftrack_api_old.logging import LazyLogMessage as L - - -class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. - - .. note:: - - Derive from same metaclass as derived bases to avoid conflicts. - - ''' - def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) - - -class Entity(collections.MutableMapping): - '''Base class for all entities.''' - - __metaclass__ = DynamicEntityTypeMetaclass - - entity_type = 'Entity' - attributes = None - primary_key_attributes = None - default_projections = None - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api_old.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.session = session - self._inflated = set() - - if data is None: - data = {} - - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) - - self._ignore_data_keys = ['__entity_type__'] - if not reconstructing: - self._construct(data) - else: - self._reconstruct(data) - - def _construct(self, data): - '''Construct from *data*.''' - # Suspend operation recording so that all modifications can be applied - # in single create operation. In addition, recording a modification - # operation requires a primary key which may not be available yet. - - relational_attributes = dict() - - with self.session.operation_recording(False): - # Set defaults for any unset local attributes. - for attribute in self.__class__.attributes: - if attribute.name not in data: - default_value = attribute.default_value - if callable(default_value): - default_value = default_value(self) - - attribute.set_local_value(self, default_value) - - - # Data represents locally set values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) - continue - - if not isinstance(attribute, ftrack_api_old.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) - - else: - attribute.set_local_value(self, value) - - # Record create operation. - # Note: As this operation is recorded *before* any Session.merge takes - # place there is the possibility that the operation will hold references - # to outdated data in entity_data. However, this would be unusual in - # that it would mean the same new entity was created twice and only one - # altered. Conversely, if this operation were recorded *after* - # Session.merge took place, any cache would not be able to determine - # the status of the entity, which could be important if the cache should - # not store newly created entities that have not yet been persisted. Out - # of these two 'evils' this approach is deemed the lesser at this time. - # A third, more involved, approach to satisfy both might be to record - # the operation with a PENDING entity_data value and then update with - # merged values post merge. - if self.session.record_operations: - entity_data = {} - - # Lower level API used here to avoid including any empty - # collections that are automatically generated on access. - for attribute in self.attributes: - value = attribute.get_local_value(self) - if value is not ftrack_api_old.symbol.NOT_SET: - entity_data[attribute.name] = value - - self.session.recorded_operations.push( - ftrack_api_old.operation.CreateEntityOperation( - self.entity_type, - ftrack_api_old.inspection.primary_key(self), - entity_data - ) - ) - - for attribute, value in relational_attributes.items(): - # Finally we set values for "relational" attributes, we need - # to do this at the end in order to get the create operations - # in the correct order as the newly created attributes might - # contain references to the newly created entity. - - attribute.set_local_value( - self, value - ) - - def _reconstruct(self, data): - '''Reconstruct from *data*.''' - # Data represents remote values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) - continue - - attribute.set_remote_value(self, value) - - def __repr__(self): - '''Return representation of instance.''' - return ''.format( - self.__class__.__name__, id(self) - ) - - def __str__(self): - '''Return string representation of instance.''' - with self.session.auto_populating(False): - primary_key = ['Unknown'] - try: - primary_key = ftrack_api_old.inspection.primary_key(self).values() - except KeyError: - pass - - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) - - def __hash__(self): - '''Return hash representing instance.''' - return hash(str(ftrack_api_old.inspection.identity(self))) - - def __eq__(self, other): - '''Return whether *other* is equal to this instance. - - .. note:: - - Equality is determined by both instances having the same identity. - Values of attributes are not considered. - - ''' - try: - return ( - ftrack_api_old.inspection.identity(other) - == ftrack_api_old.inspection.identity(self) - ) - except (AttributeError, KeyError): - return False - - def __getitem__(self, key): - '''Return attribute value for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - return attribute.get_value(self) - - def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - attribute.set_local_value(self, value) - - def __delitem__(self, key): - '''Clear attribute value for *key*. - - .. note:: - - Will not remove the attribute, but instead clear any local value - and revert to the last known server value. - - ''' - attribute = self.__class__.attributes.get(key) - attribute.set_local_value(self, ftrack_api_old.symbol.NOT_SET) - - def __iter__(self): - '''Iterate over all attributes keys.''' - for attribute in self.__class__.attributes: - yield attribute.name - - def __len__(self): - '''Return count of attributes.''' - return len(self.__class__.attributes) - - def values(self): - '''Return list of values.''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).values() - - def items(self): - '''Return list of tuples of (key, value) pairs. - - .. note:: - - Will fetch all values from the server if not already fetched or set - locally. - - ''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).items() - - def clear(self): - '''Reset all locally modified attribute values.''' - for attribute in self: - del self[attribute] - - def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. - - Only merge values from *entity* that are not - :attr:`ftrack_api_old.symbol.NOT_SET`. - - Return a list of changes made with each change being a mapping with - the keys: - - * type - Either 'remote_attribute', 'local_attribute' or 'property'. - * name - The name of the attribute / property modified. - * old_value - The previous value. - * new_value - The new merged value. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' - changes = [] - - # Attributes. - - # Prioritise by type so that scalar values are set first. This should - # guarantee that the attributes making up the identity of the entity - # are merged before merging any collections that may have references to - # this entity. - attributes = collections.deque() - for attribute in entity.attributes: - if isinstance(attribute, ftrack_api_old.attribute.ScalarAttribute): - attributes.appendleft(attribute) - else: - attributes.append(attribute) - - for other_attribute in attributes: - attribute = self.attributes.get(other_attribute.name) - - # Local attributes. - other_local_value = other_attribute.get_local_value(entity) - if other_local_value is not ftrack_api_old.symbol.NOT_SET: - local_value = attribute.get_local_value(self) - if local_value != other_local_value: - merged_local_value = self.session.merge( - other_local_value, merged=merged - ) - - attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # Remote attributes. - other_remote_value = other_attribute.get_remote_value(entity) - if other_remote_value is not ftrack_api_old.symbol.NOT_SET: - remote_value = attribute.get_remote_value(self) - if remote_value != other_remote_value: - merged_remote_value = self.session.merge( - other_remote_value, merged=merged - ) - - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # We need to handle collections separately since - # they may store a local copy of the remote attribute - # even though it may not be modified. - if not isinstance( - attribute, ftrack_api_old.attribute.AbstractCollectionAttribute - ): - continue - - local_value = attribute.get_local_value( - self - ) - - # Populated but not modified, update it. - if ( - local_value is not ftrack_api_old.symbol.NOT_SET and - local_value == remote_value - ): - attribute.set_local_value( - self, merged_remote_value - ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - return changes - - def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' - projections = [] - for attribute in self.attributes: - if isinstance(attribute, ftrack_api_old.attribute.ScalarAttribute): - if attribute.get_remote_value(self) is ftrack_api_old.symbol.NOT_SET: - projections.append(attribute.name) - - if projections: - self.session.populate([self], ', '.join(projections)) diff --git a/pype/vendor/ftrack_api_old/entity/component.py b/pype/vendor/ftrack_api_old/entity/component.py deleted file mode 100644 index 2f43ae1614..0000000000 --- a/pype/vendor/ftrack_api_old/entity/component.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api_old.entity.base - - -class Component(ftrack_api_old.entity.base.Entity): - '''Represent a component.''' - - def get_availability(self, locations=None): - '''Return availability in *locations*. - - If *locations* is None, all known locations will be checked. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.session.get_component_availability( - self, locations=locations - ) - - -class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' - - def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. - - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component - ` The thumbnail component - will be created using *data* if specified. If no component name is - given, `thumbnail` will be used. - - The file is expected to be of an appropriate size and valid file - type. - - .. note:: - - A :meth:`Session.commit` will be - automatically issued. - - ''' - if data is None: - data = {} - if not data.get('name'): - data['name'] = 'thumbnail' - - thumbnail_component = self.session.create_component( - path, data, location=None - ) - - origin_location = self.session.get( - 'Location', ftrack_api_old.symbol.ORIGIN_LOCATION_ID - ) - server_location = self.session.get( - 'Location', ftrack_api_old.symbol.SERVER_LOCATION_ID - ) - server_location.add_component(thumbnail_component, [origin_location]) - - # TODO: This commit can be avoided by reordering the operations in - # this method so that the component is transferred to ftrack.server - # after the thumbnail has been set. - # - # There is currently a bug in the API backend, causing the operations - # to *some* times be ordered wrongly, where the update occurs before - # the component has been created, causing an integrity error. - # - # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] - self.session.commit() - - return thumbnail_component diff --git a/pype/vendor/ftrack_api_old/entity/factory.py b/pype/vendor/ftrack_api_old/entity/factory.py deleted file mode 100644 index f47c92e563..0000000000 --- a/pype/vendor/ftrack_api_old/entity/factory.py +++ /dev/null @@ -1,435 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import uuid -import functools - -import ftrack_api_old.attribute -import ftrack_api_old.entity.base -import ftrack_api_old.entity.location -import ftrack_api_old.entity.component -import ftrack_api_old.entity.asset_version -import ftrack_api_old.entity.project_schema -import ftrack_api_old.entity.note -import ftrack_api_old.entity.job -import ftrack_api_old.entity.user -import ftrack_api_old.symbol -import ftrack_api_old.cache -from ftrack_api_old.logging import LazyLogMessage as L - - -class Factory(object): - '''Entity class factory.''' - - def __init__(self): - '''Initialise factory.''' - super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*. - - *bases* should be a list of bases to give the constructed class. If not - specified, default to :class:`ftrack_api_old.entity.base.Entity`. - - ''' - entity_type = schema['id'] - class_name = entity_type - - class_bases = bases - if class_bases is None: - class_bases = [ftrack_api_old.entity.base.Entity] - - class_namespace = dict() - - # Build attributes for class. - attributes = ftrack_api_old.attribute.Attributes() - immutable_properties = schema.get('immutable', []) - computed_properties = schema.get('computed', []) - for name, fragment in schema.get('properties', {}).items(): - mutable = name not in immutable_properties - computed = name in computed_properties - - default = fragment.get('default', ftrack_api_old.symbol.NOT_SET) - if default == '{uid}': - default = lambda instance: str(uuid.uuid4()) - - data_type = fragment.get('type', ftrack_api_old.symbol.NOT_SET) - - if data_type is not ftrack_api_old.symbol.NOT_SET: - - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable', - 'object' - ): - # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' - - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' - - attribute = self.create_scalar_attribute( - class_name, name, mutable, computed, default, data_type - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'array': - attribute = self.create_collection_attribute( - class_name, name, mutable - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') - if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_mapped_collection_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) - else: - # Reference attribute. - reference = fragment.get('$ref', ftrack_api_old.symbol.NOT_SET) - if reference is ftrack_api_old.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_reference_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - default_projections = schema.get('default_projections', []) - - # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections - - cls = type( - str(class_name), # type doesn't accept unicode. - tuple(class_bases), - class_namespace - ) - - return cls - - def create_scalar_attribute( - self, class_name, name, mutable, computed, default, data_type - ): - '''Return appropriate scalar attribute instance.''' - return ftrack_api_old.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable, - computed=computed - ) - - def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api_old.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) - - def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api_old.attribute.CollectionAttribute( - name, mutable=mutable - ) - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - - -class PerSessionDefaultKeyMaker(ftrack_api_old.cache.KeyMaker): - '''Generate key for defaults.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - entity = obj.get('entity') - if entity is not None: - # Key by session only. - return str(id(entity.session)) - - return str(obj) - - -#: Memoiser for use with default callables that should only be called once per -# session. -memoise_defaults = ftrack_api_old.cache.memoise_decorator( - ftrack_api_old.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api_old.cache.memoise_decorator( - ftrack_api_old.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' - ).all() - - -def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity.entity_type == 'Asset': - entity_type = 'asset' - - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - # The custom attribute configuration is for the target entity type. - configurations.append(configuration) - elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] - ): - # The target entity type allows hierarchical attributes. - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - -class StandardFactory(Factory): - '''Standard entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - if not bases: - bases = [] - - extra_bases = [] - # Customise classes. - if schema['id'] == 'ProjectSchema': - extra_bases = [ftrack_api_old.entity.project_schema.ProjectSchema] - - elif schema['id'] == 'Location': - extra_bases = [ftrack_api_old.entity.location.Location] - - elif schema['id'] == 'AssetVersion': - extra_bases = [ftrack_api_old.entity.asset_version.AssetVersion] - - elif schema['id'].endswith('Component'): - extra_bases = [ftrack_api_old.entity.component.Component] - - elif schema['id'] == 'Note': - extra_bases = [ftrack_api_old.entity.note.Note] - - elif schema['id'] == 'Job': - extra_bases = [ftrack_api_old.entity.job.Job] - - elif schema['id'] == 'User': - extra_bases = [ftrack_api_old.entity.user.User] - - bases = extra_bases + bases - - # If bases does not contain any items, add the base entity class. - if not bases: - bases = [ftrack_api_old.entity.base.Entity] - - # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api_old.entity.note.CreateNoteMixin - ) - - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api_old.entity.component.CreateThumbnailMixin - ) - - cls = super(StandardFactory, self).create(schema, bases=bases) - - return cls - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': - - def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' - entity = proxy.collection.entity - session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) - return session.create(reference, data) - - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api_old.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - elif reference == 'CustomAttributeValue': - return ( - ftrack_api_old.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) - ) - - elif reference.endswith('CustomAttributeValue'): - def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. - - Raise :py:exc:`KeyError` if related entity is already presisted - to the server. The proxy represents dense custom attribute - values and should never create new custom attribute values - through the proxy if entity exists on the remote. - - If the entity is not persisted the ususal - CustomAttributeValue items cannot be updated as - the related entity does not exist on remote and values not in - the proxy. Instead a CustomAttributeValue will - be reconstructed and an update operation will be recorded. - - ''' - entity = proxy.collection.entity - if ( - ftrack_api_old.inspection.state(entity) is not - ftrack_api_old.symbol.CREATED - ): - raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' - ) - - configuration = None - for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: - configuration = candidate - break - - if configuration is None: - raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) - ) - - create_data = dict(data.items()) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] - - session = entity.session - - # Create custom attribute by reconstructing it and update the - # value. This will prevent a create operation to be sent to the - # remote, as create operations for this entity type is not - # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) - - # Record update operation. - item['value'] = value - - return item - - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api_old.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) diff --git a/pype/vendor/ftrack_api_old/entity/job.py b/pype/vendor/ftrack_api_old/entity/job.py deleted file mode 100644 index 0d716f5914..0000000000 --- a/pype/vendor/ftrack_api_old/entity/job.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api_old.entity.base - - -class Job(ftrack_api_old.entity.base.Entity): - '''Represent job.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api_old.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - To set a job `description` visible in the web interface, *data* can - contain a key called `data` which should be a JSON serialised - dictionary containing description:: - - data = { - 'status': 'running', - 'data': json.dumps(dict(description='My job description.')), - ... - } - - Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` - is set to something not equal to "api_job". - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - - if not reconstructing: - if data.get('type') not in ('api_job', None): - raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) - ) - - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) diff --git a/pype/vendor/ftrack_api_old/entity/location.py b/pype/vendor/ftrack_api_old/entity/location.py deleted file mode 100644 index 8d9d52c654..0000000000 --- a/pype/vendor/ftrack_api_old/entity/location.py +++ /dev/null @@ -1,733 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections -import functools - -import ftrack_api_old.entity.base -import ftrack_api_old.exception -import ftrack_api_old.event.base -import ftrack_api_old.symbol -import ftrack_api_old.inspection -from ftrack_api_old.logging import LazyLogMessage as L - - -class Location(ftrack_api_old.entity.base.Entity): - '''Represent storage for components.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api_old.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - self.accessor = ftrack_api_old.symbol.NOT_SET - self.structure = ftrack_api_old.symbol.NOT_SET - self.resource_identifier_transformer = ftrack_api_old.symbol.NOT_SET - self.priority = 95 - super(Location, self).__init__( - session, data=data, reconstructing=reconstructing - ) - - def __str__(self): - '''Return string representation of instance.''' - representation = super(Location, self).__str__() - - with self.session.auto_populating(False): - name = self['name'] - if name is not ftrack_api_old.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) - - return representation - - def add_component(self, component, source, recursive=True): - '''Add *component* to location. - - *component* should be a single component instance. - - *source* should be an instance of another location that acts as the - source. - - Raise :exc:`ftrack_api_old.ComponentInLocationError` if the *component* - already exists in this location. - - Raise :exc:`ftrack_api_old.LocationError` if managing data and the generated - target structure for the component already exists according to the - accessor. This helps prevent potential data loss by avoiding overwriting - existing data. Note that there is a race condition between the check and - the write so if another process creates data at the same target during - that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component registration. - - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) - - def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. - - *components* should be a list of component instances. - - *sources* may be either a single source or a list of sources. If a list - then each corresponding index in *sources* will be used for each - *component*. A source should be an instance of another location. - - Raise :exc:`ftrack_api_old.exception.ComponentInLocationError` if any - component in *components* already exists in this location. In this case, - no changes will be made and no data transferred. - - Raise :exc:`ftrack_api_old.exception.LocationError` if managing data and the - generated target structure for the component already exists according to - the accessor. This helps prevent potential data loss by avoiding - overwriting existing data. Note that there is a race condition between - the check and the write so if another process creates data at the same - target during that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration. - - .. important:: - - If this location manages data then the *components* data is first - transferred to the target prescribed by the structure plugin, using - the configured accessor. If any component fails to transfer then - :exc:`ftrack_api_old.exception.LocationError` is raised and none of the - components are registered with the database. In this case it is left - up to the caller to decide and act on manually cleaning up any - transferred data using the 'transferred' detail in the raised error. - - Likewise, after transfer, all components are registered with the - database in a batch call. If any component causes an error then all - components will remain unregistered and - :exc:`ftrack_api_old.exception.LocationError` will be raised detailing - issues and any transferred data under the 'transferred' detail key. - - ''' - if ( - isinstance(sources, basestring) - or not isinstance(sources, collections.Sequence) - ): - sources = [sources] - - sources_count = len(sources) - if sources_count not in (1, len(components)): - raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' - ) - - if not self.structure: - raise ftrack_api_old.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) - ) - - if not components: - # Optimisation: Return early when no components to process, such as - # when called recursively on an empty sequence component. - return - - indent = ' ' * (_depth + 1) - - # Check that components not already added to location. - existing_components = [] - try: - self.get_resource_identifiers(components) - - except ftrack_api_old.exception.ComponentNotInLocationError as error: - missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] - ] - for component in components: - if component['id'] not in missing_component_ids: - existing_components.append(component) - - else: - existing_components.extend(components) - - if existing_components: - # Some of the components already present in location. - raise ftrack_api_old.exception.ComponentInLocationError( - existing_components, self - ) - - # Attempt to transfer each component's data to this location. - transferred = [] - - for index, component in enumerate(components): - try: - # Determine appropriate source. - if sources_count == 1: - source = sources[0] - else: - source = sources[index] - - # Add members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) - ) - - # Add component to this location. - context = self._get_context(component, source) - resource_identifier = self.structure.get_resource_identifier( - component, context - ) - - # Manage data transfer. - self._add_data(component, resource_identifier, source) - - except Exception as error: - raise ftrack_api_old.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', - details=dict( - indent=indent, - component=component, - location=self, - error=error, - transferred=transferred - ) - ) - - else: - transferred.append((component, resource_identifier)) - - # Register all successfully transferred components. - components_to_register = [] - component_resource_identifiers = [] - - try: - for component, resource_identifier in transferred: - if self.resource_identifier_transformer: - # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) - ) - - components_to_register.append(component) - component_resource_identifiers.append(resource_identifier) - - # Store component in location information. - self._register_components_in_location( - components, component_resource_identifiers - ) - - except Exception as error: - raise ftrack_api_old.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', - details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) - ) - - # Publish events. - for component in components_to_register: - - component_id = ftrack_api_old.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api_old.inspection.primary_key(self).values()[0] - - self.session.event_hub.publish( - ftrack_api_old.event.base.Event( - topic=ftrack_api_old.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), - ), - on_error='ignore' - ) - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - try: - source_resource_identifier = source.get_resource_identifier( - component - ) - except ftrack_api_old.exception.ComponentNotInLocationError: - pass - else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) - - return context - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) - - # Read data from source and write to this location. - if not source.accessor: - raise ftrack_api_old.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) - ) - - if not self.accessor: - raise ftrack_api_old.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) - ) - - is_container = 'members' in component.keys() - if is_container: - # TODO: Improve this check. Possibly introduce an inspection - # such as ftrack_api_old.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': - self.accessor.make_container(resource_identifier) - - else: - # Try to make container of component. - try: - container = self.accessor.get_container( - resource_identifier - ) - - except ftrack_api_old.exception.AccessorParentResourceNotFoundError: - # Container could not be retrieved from - # resource_identifier. Assume that there is no need to - # make the container. - pass - - else: - # No need for existence check as make_container does not - # recreate existing containers. - self.accessor.make_container(container) - - if self.accessor.exists(resource_identifier): - # Note: There is a race condition here in that the - # data may be added externally between the check for - # existence and the actual write which would still - # result in potential data loss. However, there is no - # good cross platform, cross accessor solution for this - # at present. - raise ftrack_api_old.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) - ) - - # Read and write data. - source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' - ) - target_data = self.accessor.open(resource_identifier, 'wb') - - # Read/write data in chunks to avoid reading all into memory at the - # same time. - chunked_read = functools.partial( - source_data.read, ftrack_api_old.symbol.CHUNK_SIZE - ) - for chunk in iter(chunked_read, ''): - target_data.write(chunk) - - target_data.close() - source_data.close() - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self.session.create( - 'ComponentLocation', data=dict( - component=component, - location=self, - resource_identifier=resource_identifier - ) - ) - - self.session.commit() - - def remove_component(self, component, recursive=True): - '''Remove *component* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component deregistration. - - ''' - return self.remove_components([component], recursive=recursive) - - def remove_components(self, components, recursive=True): - '''Remove *components* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components deregistration. - - ''' - for component in components: - # Check component is in this location - self.get_resource_identifier(component) - - # Remove members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) - - # Remove data. - self._remove_data(component) - - # Remove metadata. - self._deregister_component_in_location(component) - - # Emit event. - component_id = ftrack_api_old.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api_old.inspection.primary_key(self).values()[0] - self.session.event_hub.publish( - ftrack_api_old.event.base.Event( - topic=ftrack_api_old.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) - ), - on_error='ignore' - ) - - def _remove_data(self, component): - '''Remove data associated with *component*.''' - if not self.accessor: - raise ftrack_api_old.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) - ) - - try: - self.accessor.remove( - self.get_resource_identifier(component) - ) - except ftrack_api_old.exception.AccessorResourceNotFoundError: - # If accessor does not support detecting sequence paths then an - # AccessorResourceNotFoundError is raised. For now, if the - # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': - raise - - def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' - component_id = ftrack_api_old.inspection.primary_key(component).values()[0] - location_id = ftrack_api_old.inspection.primary_key(self).values()[0] - - # TODO: Use session.get for optimisation. - component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) - )[0] - - self.session.delete(component_location) - - # TODO: Should auto-commit here be optional? - self.session.commit() - - def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] - - def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. - - Return list of float values corresponding to each component. - - ''' - return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( - components, locations=[self] - ) - ] - - def get_resource_identifier(self, component): - '''Return resource identifier for *component*. - - Raise :exc:`ftrack_api_old.exception.ComponentNotInLocationError` if the - component is not present in this location. - - ''' - return self.get_resource_identifiers([component])[0] - - def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api_old.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - resource_identifiers = self._get_resource_identifiers(components) - - # Optionally decode resource identifier. - if self.resource_identifier_transformer: - for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier, - context={'component': components[index]} - ) - ) - - return resource_identifiers - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api_old.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - component_ids_mapping = collections.OrderedDict() - for component in components: - component_id = ftrack_api_old.inspection.primary_key( - component - ).values()[0] - component_ids_mapping[component_id] = component - - component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( - ftrack_api_old.inspection.primary_key(self).values()[0], - ', '.join(component_ids_mapping.keys()) - ) - ) - - resource_identifiers_map = {} - for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) - - resource_identifiers = [] - missing = [] - for component_id, component in component_ids_mapping.items(): - if component_id not in resource_identifiers_map: - missing.append(component) - else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) - - if missing: - raise ftrack_api_old.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' - return self.get_filesystem_paths([component])[0] - - def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' - resource_identifiers = self.get_resource_identifiers(components) - - filesystem_paths = [] - for resource_identifier in resource_identifiers: - filesystem_paths.append( - self.accessor.get_filesystem_path(resource_identifier) - ) - - return filesystem_paths - - def get_url(self, component): - '''Return url for *component*. - - Raise :exc:`~ftrack_api_old.exception.AccessorFilesystemPathError` if - URL could not be determined from *component* or - :exc:`~ftrack_api_old.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - - return self.accessor.get_url(resource_identifier) - - -class MemoryLocationMixin(object): - '''Represent storage for components. - - Unlike a standard location, only store metadata for components in this - location in memory rather than persisting to the database. - - ''' - - @property - def _cache(self): - '''Return cache.''' - try: - cache = self.__cache - except AttributeError: - cache = self.__cache = {} - - return cache - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' - component_id = ftrack_api_old.inspection.primary_key(component).values()[0] - self._cache[component_id] = resource_identifier - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self._register_component_in_location(component, resource_identifier) - - def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' - component_id = ftrack_api_old.inspection.primary_key(component).values()[0] - self._cache.pop(component_id) - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api_old.exception.ComponentNotInLocationError` if any - of the referenced components are not present in this location. - - ''' - resource_identifiers = [] - missing = [] - for component in components: - component_id = ftrack_api_old.inspection.primary_key( - component - ).values()[0] - resource_identifier = self._cache.get(component_id) - if resource_identifier is None: - missing.append(component) - else: - resource_identifiers.append(resource_identifier) - - if missing: - raise ftrack_api_old.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - -class UnmanagedLocationMixin(object): - '''Location that does not manage data.''' - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - Overridden to have no effect. - - ''' - return - - def _remove_data(self, component): - '''Remove data associated with *component*. - - Overridden to have no effect. - - ''' - return - - -class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - context.update(dict( - source_resource_identifier=source - )) - - return context - - -class ServerLocationMixin(object): - '''Location representing ftrack server. - - Adds convenience methods to location, specific to ftrack server. - ''' - def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - - Raise :exc:`~ftrack_api_old.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api_old.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/pype/vendor/ftrack_api_old/entity/note.py b/pype/vendor/ftrack_api_old/entity/note.py deleted file mode 100644 index c628886fd9..0000000000 --- a/pype/vendor/ftrack_api_old/entity/note.py +++ /dev/null @@ -1,105 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import warnings - -import ftrack_api_old.entity.base - - -class Note(ftrack_api_old.entity.base.Entity): - '''Represent a note.''' - - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. - - .. note:: - - This is a helper method. To create replies manually use the - standard :meth:`Session.create` method. - - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) - - self['replies'].append(reply) - - return reply - - -class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' - - def create_note( - self, content, author, recipients=None, category=None, labels=None - ): - '''Create note with *content*, *author*. - - NoteLabels can be set by including *labels*. - - Note category can be set by including *category*. - - *recipients* can be specified as a list of user or group instances. - - ''' - note_label_support = 'NoteLabel' in self.session.types - - if not labels: - labels = [] - - if labels and not note_label_support: - raise ValueError( - 'NoteLabel is not supported by the current server version.' - ) - - if category and labels: - raise ValueError( - 'Both category and labels cannot be set at the same time.' - ) - - if not recipients: - recipients = [] - - data = { - 'content': content, - 'author': author - } - - if category: - if note_label_support: - labels = [category] - warnings.warn( - 'category argument will be removed in an upcoming version, ' - 'please use labels instead.', - PendingDeprecationWarning - ) - else: - data['category_id'] = category['id'] - - note = self.session.create('Note', data) - - self['notes'].append(note) - - for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) - - note['recipients'].append(recipient) - - for label in labels: - self.session.create( - 'NoteLabelLink', - { - 'label_id': label['id'], - 'note_id': note['id'] - } - ) - - return note diff --git a/pype/vendor/ftrack_api_old/entity/project_schema.py b/pype/vendor/ftrack_api_old/entity/project_schema.py deleted file mode 100644 index 7f2f2b492d..0000000000 --- a/pype/vendor/ftrack_api_old/entity/project_schema.py +++ /dev/null @@ -1,94 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api_old.entity.base - - -class ProjectSchema(ftrack_api_old.entity.base.Entity): - '''Class representing ProjectSchema.''' - - def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. - - *type_id* is the id of the Type for a TypedContext and can be used to - get statuses where the workflow has been overridden. - - ''' - # Task has overrides and need to be handled separately. - if schema == 'Task': - if type_id is not None: - overrides = self['_overrides'] - for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] - - return self['_task_workflow']['statuses'][:] - - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [ - schema_type['task_status'] for schema_type in result - ] - - raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) - ) - - def get_types(self, schema): - '''Return types for *schema*.''' - # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [schema_type['task_type'] for schema_type in result] - - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) diff --git a/pype/vendor/ftrack_api_old/entity/user.py b/pype/vendor/ftrack_api_old/entity/user.py deleted file mode 100644 index e57783aca0..0000000000 --- a/pype/vendor/ftrack_api_old/entity/user.py +++ /dev/null @@ -1,123 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import arrow - -import ftrack_api_old.entity.base -import ftrack_api_old.exception - - -class User(ftrack_api_old.entity.base.Entity): - '''Represent a user.''' - - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. - - *force* can be used to automatically stop an existing timer and create a - timelog for it. If you need to get access to the created timelog, use - :func:`stop_timer` instead. - - *comment* and *name* are optional but will be set on the timer. - - .. note:: - - This method will automatically commit the changes and if *force* is - False then it will fail with a - :class:`ftrack_api_old.exception.NotUniqueError` exception if a - timer is already running. - - ''' - if force: - try: - self.stop_timer() - except ftrack_api_old.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') - - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) - - # Commit the new timer and try to catch any error that indicate another - # timelog already exists and inform the user about it. - try: - self.session.commit() - except ftrack_api_old.exception.ServerError as error: - if 'IntegrityError' in str(error): - raise ftrack_api_old.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) - ) - else: - # Reraise the error as it might be something unrelated. - raise - - return timer - - def stop_timer(self): - '''Stop the current timer and return a timelog created from it. - - If a timer is not running, a - :exc:`ftrack_api_old.exception.NoResultFoundError` exception will be - raised. - - .. note:: - - This method will automatically commit the changes. - - ''' - timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) - ).one() - - # If the server is running in the same timezone as the local - # timezone, we remove the TZ offset to get the correct duration. - is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None - ) - if is_timezone_support_enabled is None: - self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' - ) - is_timezone_support_enabled = True - - if is_timezone_support_enabled: - now = arrow.now() - else: - now = arrow.now().replace(tzinfo='utc') - - delta = now - timer['start'] - duration = delta.days * 24 * 60 * 60 + delta.seconds - - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) - - self.session.delete(timer) - self.session.commit() - - return timelog - - def send_invite(self): - '''Send a invation email to the user''' - - self.session.send_user_invite( - self - ) - def reset_api_key(self): - '''Reset the users api key.''' - - response = self.session.reset_remote( - 'api_key', entity=self - ) - - return response['api_key'] diff --git a/pype/vendor/ftrack_api_old/event/__init__.py b/pype/vendor/ftrack_api_old/event/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api_old/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api_old/event/base.py b/pype/vendor/ftrack_api_old/event/base.py deleted file mode 100644 index b5fd57da78..0000000000 --- a/pype/vendor/ftrack_api_old/event/base.py +++ /dev/null @@ -1,85 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import uuid -import collections - - -class Event(collections.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. - - *topic* is the required topic for the event. It can use a dotted - notation to demarcate groupings. For example, 'ftrack.update'. - - *id* is the unique id for this event instance. It is primarily used when - replying to an event. If not supplied a default uuid based value will - be used. - - *data* refers to event specific data. It should be a mapping structure - and defaults to an empty dictionary if not supplied. - - *sent* is the timestamp the event is sent. It will be set automatically - as send time unless specified here. - - *source* is information about where the event originated. It should be - a mapping and include at least a unique id value under an 'id' key. If - not specified, senders usually populate the value automatically at - publish time. - - *target* can be an expression that targets this event. For example, - a reply event would target the event to the sender of the source event. - The expression will be tested against subscriber information only. - - *in_reply_to_event* is used when replying to an event and should contain - the unique id of the event being replied to. - - ''' - super(Event, self).__init__() - self._data = dict( - id=id or uuid.uuid4().hex, - data=data or {}, - topic=topic, - sent=sent, - source=source or {}, - target=target, - in_reply_to_event=in_reply_to_event - ) - self._stopped = False - - def stop(self): - '''Stop further processing of this event.''' - self._stopped = True - - def is_stopped(self): - '''Return whether event has been stopped.''' - return self._stopped - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/pype/vendor/ftrack_api_old/event/expression.py b/pype/vendor/ftrack_api_old/event/expression.py deleted file mode 100644 index 8de4be0d71..0000000000 --- a/pype/vendor/ftrack_api_old/event/expression.py +++ /dev/null @@ -1,282 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from operator import eq, ne, ge, le, gt, lt - -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) - -import ftrack_api_old.exception - -# Do not enable packrat since it is not thread-safe and will result in parsing -# exceptions in a multi threaded environment. -# ParserElement.enablePackrat() - - -class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' - - def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } - self._parser = self._construct_parser() - super(Parser, self).__init__() - - def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') - operator = oneOf(self._operators.keys()) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) - - condition = Group( - field + operator + (quoted_value | value) - )('condition') - - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') - - expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') - previous = condition | parenthesis - - for conjunction in (not_, and_, or_): - current = Forward() - - if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) - ) - - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) - - else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') - - current <<= (conjunction_expression | previous) - previous = current - - expression <<= previous - return expression('expression') - - def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. - - Raise :exc:`ftrack_api_old.exception.ParseError` if *expression* could - not be parsed. - - ''' - result = None - expression = expression.strip() - if expression: - try: - result = self._parser.parseString( - expression, parseAll=True - ) - except Exception as error: - raise ftrack_api_old.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) - ) - - return self._process(result) - - def _process(self, result): - '''Process *result* using appropriate method. - - Method called is determined by the name of the result. - - ''' - method_name = '_process_{0}'.format(result.getName()) - method = getattr(self, method_name) - return method(result) - - def _process_expression(self, result): - '''Process *result* as expression.''' - return self._process(result[0]) - - def _process_not(self, result): - '''Process *result* as NOT operation.''' - return Not(self._process(result[0])) - - def _process_and(self, result): - '''Process *result* as AND operation.''' - return All([self._process(entry) for entry in result]) - - def _process_or(self, result): - '''Process *result* as OR operation.''' - return Any([self._process(entry) for entry in result]) - - def _process_condition(self, result): - '''Process *result* as condition.''' - key, operator, value = result - return Condition(key, self._operators[operator], value) - - def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' - return result - - -class Expression(object): - '''Represent a structured expression to test candidates against.''' - - def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return True - - -class All(Expression): - '''Match candidate that matches all of the specified expressions. - - .. note:: - - If no expressions are supplied then will always match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(All, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Any(Expression): - '''Match candidate that matches any of the specified expressions. - - .. note:: - - If no expressions are supplied then will never match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(Any, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Not(Expression): - '''Negate expression.''' - - def __init__(self, expression): - '''Initialise with *expression* to negate.''' - self._expression = expression - super(Not, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return not self._expression.match(candidate) - - -class Condition(Expression): - '''Represent condition.''' - - def __init__(self, key, operator, value): - '''Initialise condition. - - *key* is the key to check on the data when matching. It can be a nested - key represented by dots. For example, 'data.eventType' would attempt to - match candidate['data']['eventType']. If the candidate is missing any - of the requested keys then the match fails immediately. - - *operator* is the operator function to use to perform the match between - the retrieved candidate value and the conditional *value*. - - If *value* is a string, it can use a wildcard '*' at the end to denote - that any values matching the substring portion are valid when matching - equality only. - - ''' - self._key = key - self._operator = operator - self._value = value - self._wildcard = '*' - self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' - } - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( - self.__class__.__name__, - self._key, - self._operatorMapping.get(self._operator, self._operator), - self._value - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') - - try: - value = candidate - for keyPart in key_parts: - value = value[keyPart] - except (KeyError, TypeError): - return False - - if ( - self._operator is eq - and isinstance(self._value, basestring) - and self._value[-1] == self._wildcard - ): - return self._value[:-1] in value - else: - return self._operator(value, self._value) diff --git a/pype/vendor/ftrack_api_old/event/hub.py b/pype/vendor/ftrack_api_old/event/hub.py deleted file mode 100644 index 3ffbd38056..0000000000 --- a/pype/vendor/ftrack_api_old/event/hub.py +++ /dev/null @@ -1,1091 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from __future__ import absolute_import - -import collections -import urlparse -import threading -import Queue as queue -import logging -import time -import uuid -import operator -import functools -import json -import socket -import warnings - -import requests -import requests.exceptions -import websocket - -import ftrack_api_old.exception -import ftrack_api_old.event.base -import ftrack_api_old.event.subscriber -import ftrack_api_old.event.expression -from ftrack_api_old.logging import LazyLogMessage as L - - -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) - - - - -class EventHub(object): - '''Manage routing of events.''' - - _future_signature_warning = ( - 'When constructing your Session object you did not explicitly define ' - 'auto_connect_event_hub as True even though you appear to be publishing ' - 'and / or subscribing to asynchronous events. In version version 2.0 of ' - 'the ftrack-python-api the default behavior will change from True ' - 'to False. Please make sure to update your tools. You can read more at ' - 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' - ) - - def __init__(self, server_url, api_user, api_key): - '''Initialise hub, connecting to ftrack *server_url*. - - *api_user* is the user to authenticate as and *api_key* is the API key - to authenticate with. - - ''' - super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.id = uuid.uuid4().hex - self._connection = None - - self._unique_packet_id = 0 - self._packet_callbacks = {} - self._lock = threading.RLock() - - self._wait_timeout = 4 - - self._subscribers = [] - self._reply_callbacks = {} - self._intentional_disconnect = False - - self._event_queue = queue.Queue() - self._event_namespace = 'ftrack.event' - self._expression_parser = ftrack_api_old.event.expression.Parser() - - # Default values for auto reconnection timeout on unintentional - # disconnection. Equates to 5 minutes. - self._auto_reconnect_attempts = 30 - self._auto_reconnect_delay = 10 - - self._deprecation_warning_auto_connect = False - - # Mapping of Socket.IO codes to meaning. - self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' - } - self._code_name_mapping.update( - dict((name, code) for code, name in self._code_name_mapping.items()) - ) - - self._server_url = server_url - self._api_user = api_user - self._api_key = api_key - - # Parse server URL and store server details. - url_parse_result = urlparse.urlparse(self._server_url) - if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') - - if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') - - self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port - ) - - def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) - - def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' - if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) - else: - return self.server.hostname - - @property - def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' - - def connect(self): - '''Initialise connection to server. - - Raise :exc:`ftrack_api_old.exception.EventHubConnectionError` if already - connected or connection fails. - - ''' - - self._deprecation_warning_auto_connect = False - - if self.connected: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Already connected.' - ) - - # Reset flag tracking whether disconnection was intentional. - self._intentional_disconnect = False - - try: - # Connect to socket.io server using websocket transport. - session = self._get_socket_io_session() - - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) - - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( - scheme, self.get_network_location(), session.id - ) - - # timeout is set to 60 seconds to avoid the issue where the socket - # ends up in a bad state where it is reported as connected but the - # connection has been closed. The issue happens often when connected - # to a secure socket and the computer goes to sleep. - # More information on how the timeout works can be found here: - # https://docs.python.org/2/library/socket.html#socket.socket.setblocking - self._connection = websocket.create_connection(url, timeout=60) - - except Exception as error: - error_message = ( - 'Failed to connect to event server at {server_url} with ' - 'error: "{error}".' - ) - - error_details = { - 'error': unicode(error), - 'server_url': self.get_server_url() - } - - self.logger.debug( - L( - error_message, **error_details - ), - exc_info=1 - ) - raise ftrack_api_old.exception.EventHubConnectionError( - error_message, - details=error_details - ) - - # Start background processing thread. - self._processor_thread = _ProcessorThread(self) - self._processor_thread.start() - - # Subscribe to reply events if not already. Note: Only adding the - # subscriber locally as the following block will notify server of all - # existing subscribers, which would cause the server to report a - # duplicate subscriber error if EventHub.subscribe was called here. - try: - self._add_subscriber( - 'topic=ftrack.meta.reply', - self._handle_reply, - subscriber=dict( - id=self.id - ) - ) - except ftrack_api_old.exception.NotUniqueError: - pass - - # Now resubscribe any existing stored subscribers. This can happen when - # reconnecting automatically for example. - for subscriber in self._subscribers[:]: - self._notify_server_about_subscriber(subscriber) - - @property - def connected(self): - '''Return if connected.''' - return self._connection is not None and self._connection.connected - - def disconnect(self, unsubscribe=True): - '''Disconnect from server. - - Raise :exc:`ftrack_api_old.exception.EventHubConnectionError` if not - currently connected. - - If *unsubscribe* is True then unsubscribe all current subscribers - automatically before disconnecting. - - ''' - if not self.connected: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Not currently connected.' - ) - - else: - # Set flag to indicate disconnection was intentional. - self._intentional_disconnect = True - - # Set blocking to true on socket to make sure unsubscribe events - # are emitted before closing the connection. - self._connection.sock.setblocking(1) - - # Unsubscribe all subscribers. - if unsubscribe: - for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) - - # Now disconnect. - self._connection.close() - self._connection = None - - # Shutdown background processing thread. - self._processor_thread.cancel() - - # Join to it if it is not current thread to help ensure a clean - # shutdown. - if threading.current_thread() != self._processor_thread: - self._processor_thread.join(self._wait_timeout) - - def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. - - Make *attempts* number of attempts with *delay* in seconds between each - attempt. - - .. note:: - - All current subscribers will be automatically resubscribed after - successful reconnection. - - Raise :exc:`ftrack_api_old.exception.EventHubConnectionError` if fail to - reconnect. - - ''' - try: - self.disconnect(unsubscribe=False) - except ftrack_api_old.exception.EventHubConnectionError: - pass - - for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) - - # Silence logging temporarily to avoid lots of failed connection - # related information. - try: - logging.disable(logging.CRITICAL) - - try: - self.connect() - except ftrack_api_old.exception.EventHubConnectionError: - time.sleep(delay) - else: - break - - finally: - logging.disable(logging.NOTSET) - - if not self.connected: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) - ) - - def wait(self, duration=None): - '''Wait for events and handle as they arrive. - - If *duration* is specified, then only process events until duration is - reached. *duration* is in seconds though float values can be used for - smaller values. - - ''' - started = time.time() - - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - pass - else: - self._handle(event) - - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. - - Return None if no subscriber with *identifier* found. - - ''' - for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: - return subscriber - - return None - - def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. - - A *subscription* is a string that can specify in detail which events the - callback should receive. The filtering is applied against each event - object. Nested references are supported using '.' separators. - For example, 'topic=foo and data.eventType=Shot' would match the - following event:: - - - - The *callback* should accept an instance of - :class:`ftrack_api_old.event.base.Event` as its sole argument. - - Callbacks are called in order of *priority*. The lower the priority - number the sooner it will be called, with 0 being the first. The - default priority is 100. Note that priority only applies against other - callbacks registered with this hub and not as a global priority. - - An earlier callback can prevent processing of subsequent callbacks by - calling :meth:`Event.stop` on the passed `event` before - returning. - - .. warning:: - - Handlers block processing of other received events. For long - running callbacks it is advisable to delegate the main work to - another process or thread. - - A *callback* can be attached to *subscriber* information that details - the subscriber context. A subscriber context will be generated - automatically if not supplied. - - .. note:: - - The subscription will be stored locally, but until the server - receives notification of the subscription it is possible the - callback will not be called. - - Return subscriber identifier. - - Raise :exc:`ftrack_api_old.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) - - # Notify server now if possible. - try: - self._notify_server_about_subscriber(subscriber) - except ftrack_api_old.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) - - return subscriber.metadata['id'] - - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. - - See :meth:`subscribe` for argument descriptions. - - Return :class:`ftrack_api_old.event.subscriber.Subscriber` instance. - - Raise :exc:`ftrack_api_old.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - if subscriber is None: - subscriber = {} - - subscriber.setdefault('id', uuid.uuid4().hex) - - # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) - - if existing_subscriber is not None: - raise ftrack_api_old.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) - ) - - subscriber = ftrack_api_old.event.subscriber.Subscriber( - subscription=subscription, - callback=callback, - metadata=subscriber, - priority=priority - ) - - self._subscribers.append(subscriber) - - return subscriber - - def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' - subscribe_event = ftrack_api_old.event.base.Event( - topic='ftrack.meta.subscribe', - data=dict( - subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) - ) - - self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) - ) - - def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. - - .. note:: - - If the server is not reachable then it won't be notified of the - unsubscription. However, the subscriber will be removed locally - regardless. - - ''' - subscriber = self.get_subscriber_by_identifier(subscriber_identifier) - - if subscriber is None: - raise ftrack_api_old.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) - ) - - self._subscribers.pop(self._subscribers.index(subscriber)) - - # Notify the server if possible. - unsubscribe_event = ftrack_api_old.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) - ) - - try: - self._publish( - unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) - ) - except ftrack_api_old.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) - - def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) - - def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. - - Modify *event*, setting appropriate values to target event correctly as - a reply. - - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] - if source is not None: - event['source'] = source - - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. - - .. note:: - - Will not be called when *synchronous* is True. - - If *on_error* is set to 'ignore' then errors raised during publish of - event will be caught by this method and ignored. - - ''' - if self._deprecation_warning_auto_connect and not synchronous: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) - except Exception: - if on_error == 'ignore': - pass - else: - raise - - def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. - - If *source* is specified it will be used for the source value of the - sent event. - - ''' - reply_event = ftrack_api_old.event.base.Event( - 'ftrack.meta.reply', - data=data - ) - self._prepare_reply_event(reply_event, source_event, source=source) - self.publish(reply_event) - - def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - A *callback* can also be specified. This callback will be called once - the server acknowledges receipt of the sent event. A default callback - that checks for errors from the server will be used if not specified. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. Note that there is no - guarantee that a reply will be sent. - - Raise :exc:`ftrack_api_old.exception.EventHubConnectionError` if not - currently connected. - - ''' - # Prepare event adding any relevant additional information. - self._prepare_event(event) - - if synchronous: - # Bypass emitting event to server and instead call locally - # registered handlers directly, collecting and returning results. - return self._handle(event, synchronous=synchronous) - - if not self.connected: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' - ) - - # Use standard callback if none specified. - if callback is None: - callback = functools.partial(self._on_published, event) - - # Emit event to central server for asynchronous processing. - try: - # Register on reply callback if specified. - if on_reply is not None: - # TODO: Add cleanup process that runs after a set duration to - # garbage collect old reply callbacks and prevent dictionary - # growing too large. - self._reply_callbacks[event['id']] = on_reply - - try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except ftrack_api_old.exception.EventHubConnectionError: - # Connection may have dropped temporarily. Wait a few moments to - # see if background thread reconnects automatically. - time.sleep(15) - - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except: - raise - - except Exception: - # Failure to send event should not cause caller to fail. - # TODO: This behaviour is inconsistent with the failing earlier on - # lack of connection and also with the error handling parameter of - # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) - - def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) - - def _handle(self, event, synchronous=False): - '''Handle *event*. - - If *synchronous* is True, do not send any automatic reply events. - - ''' - # Sort by priority, lower is higher. - # TODO: Use a sorted list to avoid sorting each time in order to improve - # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) - - results = [] - - target = event.get('target', None) - target_expression = None - if target: - try: - target_expression = self._expression_parser.parse(target) - except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) - return - - for subscriber in subscribers: - # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) - ): - continue - - # Check if subscriber interested in the event. - if not subscriber.interested_in(event): - continue - - response = None - - try: - response = subscriber.callback(event) - results.append(response) - except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) - - # Automatically publish a non None response as a reply when not in - # synchronous mode. - if not synchronous: - if self._deprecation_warning_auto_connect: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - if response is not None: - try: - self.publish_reply( - event, data=response, source=subscriber.metadata - ) - - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) - - # Check whether to continue processing topic event. - if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) - break - - return results - - def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) - if callback is not None: - callback(event) - - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. - - The subscribed callback will be automatically unsubscribed on exit - of the context manager. - - ''' - return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, - priority=priority, - ) - - # Socket.IO interface. - # - - def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' - ).format( - self.server.scheme, - self.get_network_location(), - self._api_user, - self._api_key - ) - try: - response = requests.get( - socket_io_url, - timeout=60 # 60 seconds timeout to recieve errors faster. - ) - except requests.exceptions.Timeout as error: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) - ) - except requests.exceptions.SSLError as error: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) - ) - except requests.exceptions.ConnectionError as error: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) - ) - else: - status = response.status_code - if status != 200: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) - ) - - # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) - - def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. - - Return the unique packet ID. - - ''' - with self._lock: - self._unique_packet_id += 1 - unique_identifier = self._unique_packet_id - - self._packet_callbacks[unique_identifier] = callback - - return '{0}+'.format(unique_identifier) - - def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' - return self._packet_callbacks.pop(packet_identifier) - - def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) - self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback - ) - - def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') - data = str(packet_identifier) - if args: - data += '+{1}'.format(self._encode(args)) - - self._send_packet(self._code_name_mapping['acknowledge'], data=data) - - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) - packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) - - try: - self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) - except socket.error as error: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) - ) - - def _receive_packet(self): - '''Receive and return packet via connection.''' - try: - packet = self._connection.recv() - except Exception as error: - raise ftrack_api_old.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) - ) - - try: - parts = packet.split(':', 3) - except AttributeError: - raise ftrack_api_old.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - code, packet_identifier, path, data = None, None, None, None - - count = len(parts) - if count == 4: - code, packet_identifier, path, data = parts - elif count == 3: - code, packet_identifier, path = parts - elif count == 1: - code = parts[0] - else: - raise ftrack_api_old.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - self.logger.debug(L('Received packet: {0}', packet)) - return code, packet_identifier, path, data - - def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' - code_name = self._code_name_mapping[code] - - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api_old.event.base.Event('ftrack.meta.connected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') - if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) - try: - self.reconnect( - attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay - ) - except ftrack_api_old.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') - else: - self.logger.debug('Reconnected successfully.') - - if not self.connected: - event = ftrack_api_old.event.base.Event('ftrack.meta.disconnected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'heartbeat': - # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) - - elif code_name == 'event': - payload = self._decode(data) - args = payload.get('args', []) - - if len(args) == 1: - event_payload = args[0] - if isinstance(event_payload, collections.Mapping): - try: - event = ftrack_api_old.event.base.Event(**event_payload) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) - return - - self._event_queue.put(event) - - elif code_name == 'acknowledge': - parts = data.split('+', 1) - acknowledged_packet_identifier = int(parts[0]) - args = [] - if len(parts) == 2: - args = self._decode(parts[1]) - - try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) - except KeyError: - pass - else: - callback(*args) - - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) - - else: - self.logger.debug(L('{0}: {1}', code_name, data)) - - def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) - - def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' - if isinstance(item, ftrack_api_old.event.base.Event): - # Convert to dictionary for encoding. - item = dict(**item) - - if 'in_reply_to_event' in item: - # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') - - return item - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' - return json.loads(string, object_hook=self._decode_object_hook) - - def _decode_object_hook(self, item): - '''Return *item* transformed.''' - if isinstance(item, collections.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') - - return item - - -class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' - - def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' - self._hub = hub - self._subscription = subscription - self._callback = callback - self._subscriber = subscriber - self._priority = priority - self._subscriberIdentifier = None - - def __enter__(self): - '''Enter context subscribing callback to topic.''' - self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority - ) - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' - self._hub.unsubscribe(self._subscriberIdentifier) - - -class _ProcessorThread(threading.Thread): - '''Process messages from server.''' - - daemon = True - - def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' - super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.client = client - self.done = threading.Event() - - def run(self): - '''Perform work in thread.''' - while not self.done.is_set(): - try: - code, packet_identifier, path, data = self.client._receive_packet() - self.client._handle_packet(code, packet_identifier, path, data) - - except ftrack_api_old.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) - continue - - except ftrack_api_old.exception.EventHubConnectionError: - self.cancel() - - # Fake a disconnection event in order to trigger reconnection - # when necessary. - self.client._handle_packet('0', '', '', '') - - break - - except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) - self.cancel() - break - - def cancel(self): - '''Cancel work as soon as possible.''' - self.done.set() diff --git a/pype/vendor/ftrack_api_old/event/subscriber.py b/pype/vendor/ftrack_api_old/event/subscriber.py deleted file mode 100644 index 522cf3d86f..0000000000 --- a/pype/vendor/ftrack_api_old/event/subscriber.py +++ /dev/null @@ -1,27 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api_old.event.subscription - - -class Subscriber(object): - '''Represent event subscriber.''' - - def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api_old.event.subscription.Subscription( - subscription - ) - self.callback = callback - self.metadata = metadata - self.priority = priority - - def __str__(self): - '''Return string representation.''' - return '<{0} metadata={1} subscription="{2}">'.format( - self.__class__.__name__, self.metadata, self.subscription - ) - - def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' - return self.subscription.includes(event) diff --git a/pype/vendor/ftrack_api_old/event/subscription.py b/pype/vendor/ftrack_api_old/event/subscription.py deleted file mode 100644 index 87b777c412..0000000000 --- a/pype/vendor/ftrack_api_old/event/subscription.py +++ /dev/null @@ -1,23 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api_old.event.expression - - -class Subscription(object): - '''Represent a subscription.''' - - parser = ftrack_api_old.event.expression.Parser() - - def __init__(self, subscription): - '''Initialise with *subscription*.''' - self._subscription = subscription - self._expression = self.parser.parse(subscription) - - def __str__(self): - '''Return string representation.''' - return self._subscription - - def includes(self, event): - '''Return whether subscription includes *event*.''' - return self._expression.match(event) diff --git a/pype/vendor/ftrack_api_old/exception.py b/pype/vendor/ftrack_api_old/exception.py deleted file mode 100644 index bc1c87c060..0000000000 --- a/pype/vendor/ftrack_api_old/exception.py +++ /dev/null @@ -1,392 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import traceback - -import ftrack_api_old.entity.base - - -class Error(Exception): - '''ftrack specific error.''' - - default_message = 'Unspecified error occurred.' - - def __init__(self, message=None, details=None): - '''Initialise exception with *message*. - - If *message* is None, the class 'default_message' will be used. - - *details* should be a mapping of extra information that can be used in - the message and also to provide more context. - - ''' - if message is None: - message = self.default_message - - self.message = message - self.details = details - if self.details is None: - self.details = {} - - self.traceback = traceback.format_exc() - - def __str__(self): - '''Return string representation.''' - keys = {} - for key, value in self.details.iteritems(): - if isinstance(value, unicode): - value = value.encode(sys.getfilesystemencoding()) - keys[key] = value - - return str(self.message.format(**keys)) - - -class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' - - default_message = 'Authentication error.' - - -class ServerError(Error): - '''Raise when the server reports an error.''' - - default_message = 'Server reported error processing request.' - - -class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' - - default_message = 'Server incompatible.' - - -class NotFoundError(Error): - '''Raise when something that should exist is not found.''' - - default_message = 'Not found.' - - -class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' - - default_message = 'Non-unique value detected.' - - -class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' - - default_message = 'Incorrect result detected.' - - -class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' - - default_message = 'Expected result, but no result was found.' - - -class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' - - default_message = 'Expected single result, but received multiple results.' - - -class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' - - default_message = 'Entity type error.' - - -class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' - - default_message = 'Entity type "{entity_type}" not recognised.' - - def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) - super(UnrecognisedEntityTypeError, self).__init__(**kw) - - -class OperationError(Error): - '''Raise when an operation error occurs.''' - - default_message = 'Operation error.' - - -class InvalidStateError(Error): - '''Raise when an invalid state detected.''' - - default_message = 'Invalid state.' - - -class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' - - default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' - ) - - def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) - super(InvalidStateTransitionError, self).__init__(**kw) - - -class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' - - default_message = 'Attribute error.' - - -class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' - - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) - - def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) - super(ImmutableAttributeError, self).__init__(**kw) - - -class CollectionError(Error): - '''Raise when an error related to collections occurs.''' - - default_message = 'Collection error.' - - def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) - super(CollectionError, self).__init__(**kw) - - -class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' - - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) - - -class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' - - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) - - def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) - super(DuplicateItemInCollectionError, self).__init__(collection, **kw) - - -class ParseError(Error): - '''Raise when a parsing error occurs.''' - - default_message = 'Failed to parse.' - - -class EventHubError(Error): - '''Raise when issues related to event hub occur.''' - - default_message = 'Event hub error occurred.' - - -class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' - - default_message = 'Event hub is not connected.' - - -class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' - - default_message = 'Invalid packet.' - - -class PermissionDeniedError(Error): - '''Raise when permission is denied.''' - - default_message = 'Permission denied.' - - -class LocationError(Error): - '''Base for errors associated with locations.''' - - default_message = 'Unspecified location error' - - -class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' - - default_message = 'Component not available in any location.' - - -class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' - - default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api_old.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentNotInLocationError, self).__init__(**kw) - - -class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' - - default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api_old.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentInLocationError, self).__init__(**kw) - - -class AccessorError(Error): - '''Base for errors associated with accessors.''' - - default_message = 'Unspecified accessor error' - - -class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) - super(AccessorOperationFailedError, self).__init__(**kw) - - -class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' - - default_message = 'Operation {operation} unsupported.' - - -class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' - - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) - - -class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' - - default_message = 'Resource identifier is invalid: {resource_identifier}.' - - def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) - super(AccessorResourceIdentifierError, self).__init__(**kw) - - -class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' - - default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' - ) - - -class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' - - default_message = 'Unspecified resource error: {resource_identifier}' - - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) - super(AccessorResourceError, self).__init__(**kw) - - -class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' - - default_message = 'Resource not found: {resource_identifier}' - - -class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' - - default_message = 'Parent resource is missing: {resource_identifier}' - - -class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' - - default_message = 'Resource invalid: {resource_identifier}' - - -class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' - - default_message = 'Container is not empty: {resource_identifier}' - - -class StructureError(Error): - '''Base for errors associated with structures.''' - - default_message = 'Unspecified structure error' - - -class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' - - default_message = "Connection closed." diff --git a/pype/vendor/ftrack_api_old/formatter.py b/pype/vendor/ftrack_api_old/formatter.py deleted file mode 100644 index 543b9ef04e..0000000000 --- a/pype/vendor/ftrack_api_old/formatter.py +++ /dev/null @@ -1,131 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import termcolor - -import ftrack_api_old.entity.base -import ftrack_api_old.collection -import ftrack_api_old.symbol -import ftrack_api_old.inspection - - -#: Useful filters to pass to :func:`format`.` -FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api_old.symbol.NOT_SET - ) -} - - -def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None -): - '''Return formatted string representing *entity*. - - *formatters* can be used to customise formatting of elements. It should be a - mapping with one or more of the following keys: - - * header - Used to format entity type. - * label - Used to format attribute names. - - Specify an *attribute_filter* to control which attributes to include. By - default all attributes are included. The *attribute_filter* should be a - callable that accepts `(entity, attribute_name, attribute_value)` and - returns True if the attribute should be included in the output. For example, - to filter out all unset values:: - - attribute_filter=ftrack_api_old.formatter.FILTER['ignore_unset'] - - If *recursive* is True then recurse into Collections and format each entity - present. - - *indent* specifies the overall indentation in spaces of the formatted text, - whilst *indent_first_line* determines whether to apply that indent to the - first generated line. - - .. warning:: - - Iterates over all *entity* attributes which may cause multiple queries - to the server. Turn off auto populating in the session to prevent this. - - ''' - # Initialise default formatters. - if formatters is None: - formatters = dict() - - formatters.setdefault( - 'header', lambda text: termcolor.colored( - text, 'white', 'on_blue', attrs=['bold'] - ) - ) - formatters.setdefault( - 'label', lambda text: termcolor.colored( - text, 'blue', attrs=['bold'] - ) - ) - - # Determine indents. - spacer = ' ' * indent - if indent_first_line: - first_line_spacer = spacer - else: - first_line_spacer = '' - - # Avoid infinite recursion on circular references. - if _seen is None: - _seen = set() - - identifier = str(ftrack_api_old.inspection.identity(entity)) - if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) - - _seen.add(identifier) - information = list() - - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) - for key, value in sorted(entity.items()): - if attribute_filter is not None: - if not attribute_filter(entity, key, value): - continue - - child_indent = indent + len(key) + 3 - - if isinstance(value, ftrack_api_old.entity.base.Entity): - value = format( - value, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=False, - _seen=_seen.copy() - ) - - if isinstance(value, ftrack_api_old.collection.Collection): - if recursive: - child_values = [] - for index, child in enumerate(value): - child_value = format( - child, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=index != 0, - _seen=_seen.copy() - ) - child_values.append(child_value) - - value = '\n'.join(child_values) - - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) - - return '\n'.join(information) diff --git a/pype/vendor/ftrack_api_old/inspection.py b/pype/vendor/ftrack_api_old/inspection.py deleted file mode 100644 index fbef8a5873..0000000000 --- a/pype/vendor/ftrack_api_old/inspection.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections - -import ftrack_api_old.symbol -import ftrack_api_old.operation - - -def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - primary_key(entity).values() - ) - - -def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. - - To get just the primary key values:: - - primary_key(entity).values() - - ''' - primary_key = collections.OrderedDict() - for name in entity.primary_key_attributes: - value = entity[name] - if value is ftrack_api_old.symbol.NOT_SET: - raise KeyError( - 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) - ) - - primary_key[str(name)] = str(value) - - return primary_key - - -def _state(operation, state): - '''Return state following *operation* against current *state*.''' - if ( - isinstance( - operation, ftrack_api_old.operation.CreateEntityOperation - ) - and state is ftrack_api_old.symbol.NOT_SET - ): - state = ftrack_api_old.symbol.CREATED - - elif ( - isinstance( - operation, ftrack_api_old.operation.UpdateEntityOperation - ) - and state is ftrack_api_old.symbol.NOT_SET - ): - state = ftrack_api_old.symbol.MODIFIED - - elif isinstance( - operation, ftrack_api_old.operation.DeleteEntityOperation - ): - state = ftrack_api_old.symbol.DELETED - - return state - - -def state(entity): - '''Return current *entity* state. - - .. seealso:: :func:`ftrack_api_old.inspection.states`. - - ''' - value = ftrack_api_old.symbol.NOT_SET - - for operation in entity.session.recorded_operations: - # Determine if operation refers to an entity and whether that entity - # is *entity*. - if ( - isinstance( - operation, - ( - ftrack_api_old.operation.CreateEntityOperation, - ftrack_api_old.operation.UpdateEntityOperation, - ftrack_api_old.operation.DeleteEntityOperation - ) - ) - and operation.entity_type == entity.entity_type - and operation.entity_key == primary_key(entity) - ): - value = _state(operation, value) - - return value - - -def states(entities): - '''Return current states of *entities*. - - An optimised function for determining states of multiple entities in one - go. - - .. note:: - - All *entities* should belong to the same session. - - .. seealso:: :func:`ftrack_api_old.inspection.state`. - - ''' - if not entities: - return [] - - session = entities[0].session - - entities_by_identity = collections.OrderedDict() - for entity in entities: - key = (entity.entity_type, str(primary_key(entity).values())) - entities_by_identity[key] = ftrack_api_old.symbol.NOT_SET - - for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api_old.operation.CreateEntityOperation, - ftrack_api_old.operation.UpdateEntityOperation, - ftrack_api_old.operation.DeleteEntityOperation - ) - ) - ): - key = (operation.entity_type, str(operation.entity_key.values())) - if key not in entities_by_identity: - continue - - value = _state(operation, entities_by_identity[key]) - entities_by_identity[key] = value - - return entities_by_identity.values() diff --git a/pype/vendor/ftrack_api_old/logging.py b/pype/vendor/ftrack_api_old/logging.py deleted file mode 100644 index 41969c5b2a..0000000000 --- a/pype/vendor/ftrack_api_old/logging.py +++ /dev/null @@ -1,43 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import functools -import warnings - - -def deprecation_warning(message): - def decorator(function): - @functools.wraps(function) - def wrapper(*args, **kwargs): - warnings.warn( - message, - PendingDeprecationWarning - ) - return function(*args, **kwargs) - return wrapper - - return decorator - - -class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. - - Example:: - - # Formatting of string will not occur unless debug logging enabled. - logger.debug(LazyLogMessage( - 'Hello {0}', 'world' - )) - - ''' - - def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' - self.message = message - self.args = args - self.kwargs = kwargs - - def __str__(self): - '''Return string representation.''' - return self.message.format(*self.args, **self.kwargs) - diff --git a/pype/vendor/ftrack_api_old/operation.py b/pype/vendor/ftrack_api_old/operation.py deleted file mode 100644 index ebf5ce46fe..0000000000 --- a/pype/vendor/ftrack_api_old/operation.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy - - -class Operations(object): - '''Stack of operations.''' - - def __init__(self): - '''Initialise stack.''' - self._stack = [] - super(Operations, self).__init__() - - def clear(self): - '''Clear all operations.''' - del self._stack[:] - - def push(self, operation): - '''Push *operation* onto stack.''' - self._stack.append(operation) - - def pop(self): - '''Pop and return most recent operation from stack.''' - return self._stack.pop() - - def __len__(self): - '''Return count of operations.''' - return len(self._stack) - - def __iter__(self): - '''Return iterator over operations.''' - return iter(self._stack) - - -class Operation(object): - '''Represent an operation.''' - - -class CreateEntityOperation(Operation): - '''Represent create entity operation.''' - - def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api_old.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api_old.inspection.primary_key`. - - *entity_data* should be a mapping of the initial data to populate the - entity with when creating. - - .. note:: - - Shallow copies will be made of each value in *entity_data*. - - ''' - super(CreateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.entity_data = {} - for key, value in entity_data.items(): - self.entity_data[key] = copy.copy(value) - - -class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' - - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api_old.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api_old.inspection.primary_key`. - - *attribute_name* should be the string name of the attribute being - modified and *old_value* and *new_value* should reflect the change in - value. - - .. note:: - - Shallow copies will be made of both *old_value* and *new_value*. - - ''' - super(UpdateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.attribute_name = attribute_name - self.old_value = copy.copy(old_value) - self.new_value = copy.copy(new_value) - - -class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' - - def __init__(self, entity_type, entity_key): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api_old.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api_old.inspection.primary_key`. - - ''' - super(DeleteEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - diff --git a/pype/vendor/ftrack_api_old/plugin.py b/pype/vendor/ftrack_api_old/plugin.py deleted file mode 100644 index 2c7a9a4500..0000000000 --- a/pype/vendor/ftrack_api_old/plugin.py +++ /dev/null @@ -1,121 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import os -import uuid -import imp -import inspect - - -def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. - - Each discovered module should implement a register function that accepts - *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs - respectively. - - If a register function does not accept variable arguments, then attempt to - only pass accepted arguments to the function by inspecting its signature. - - ''' - logger = logging.getLogger(__name__ + '.discover') - - if positional_arguments is None: - positional_arguments = [] - - if keyword_arguments is None: - keyword_arguments = {} - - for path in paths: - # Ignore empty paths that could resolve to current directory. - path = path.strip() - if not path: - continue - - for base, directories, filenames in os.walk(path): - for filename in filenames: - name, extension = os.path.splitext(filename) - if extension != '.py': - continue - - module_path = os.path.join(base, filename) - unique_name = uuid.uuid4().hex - - try: - module = imp.load_source(unique_name, module_path) - except Exception as error: - logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) - ) - continue - - try: - module.register - except AttributeError: - logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) - ) - else: - # Attempt to only pass arguments that are accepted by the - # register function. - specification = inspect.getargspec(module.register) - - selected_positional_arguments = positional_arguments - selected_keyword_arguments = keyword_arguments - - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - - selected_positional_arguments = positional_arguments[ - len(specification.args): - ] - selected_keyword_arguments = {} - - elif not specification.keywords: - # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] - - # Determine remaining available keyword arguments. - defined_keyword_arguments = [] - if specification.defaults: - defined_keyword_arguments = specification.args[ - -len(specification.defaults): - ] - - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) - - if not set(keyword_arguments.keys()).issubset( - remaining_keyword_arguments - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - selected_keyword_arguments = { - key: value - for key, value in keyword_arguments.items() - if key in remaining_keyword_arguments - } - - module.register( - *selected_positional_arguments, - **selected_keyword_arguments - ) diff --git a/pype/vendor/ftrack_api_old/query.py b/pype/vendor/ftrack_api_old/query.py deleted file mode 100644 index 03b33f20c4..0000000000 --- a/pype/vendor/ftrack_api_old/query.py +++ /dev/null @@ -1,202 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import re -import collections - -import ftrack_api_old.exception - - -class QueryResult(collections.Sequence): - '''Results from a query.''' - - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') - - def __init__(self, session, expression, page_size=500): - '''Initialise result set. - - *session* should be an instance of :class:`ftrack_api_old.session.Session` - that will be used for executing the query *expression*. - - *page_size* should be an integer specifying the maximum number of - records to fetch in one request allowing the results to be fetched - incrementally in a transparent manner for optimal performance. Any - offset or limit specified in *expression* are honoured for final result - set, but intermediate queries may be issued with different offsets and - limits in order to fetch pages. When an embedded limit is smaller than - the given *page_size* it will be used instead and no paging will take - place. - - .. warning:: - - Setting *page_size* to a very large amount may negatively impact - performance of not only the caller, but the server in general. - - ''' - super(QueryResult, self).__init__() - self._session = session - self._results = [] - - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) - - self._page_size = page_size - if self._limit is not None and self._limit < self._page_size: - # Optimise case where embedded limit is less than fetching a - # single page. - self._page_size = self._limit - - self._next_offset = self._offset - if self._next_offset is None: - # Initialise with zero offset. - self._next_offset = 0 - - def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. - - Return (expression, offset, limit). - - ''' - offset = None - match = self.OFFSET_EXPRESSION.search(expression) - if match: - offset = int(match.group('value')) - expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] - ) - - limit = None - match = self.LIMIT_EXPRESSION.search(expression) - if match: - limit = int(match.group('value')) - expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] - ) - - return expression.strip(), offset, limit - - def __getitem__(self, index): - '''Return value at *index*.''' - while self._can_fetch_more() and index >= len(self._results): - self._fetch_more() - - return self._results[index] - - def __len__(self): - '''Return number of items.''' - while self._can_fetch_more(): - self._fetch_more() - - return len(self._results) - - def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' - return self._next_offset is not None - - def _fetch_more(self): - '''Fetch next page of results if available.''' - if not self._can_fetch_more(): - return - - expression = '{0} offset {1} limit {2}'.format( - self._expression, self._next_offset, self._page_size - ) - records, metadata = self._session._query(expression) - self._results.extend(records) - - if self._limit is not None and (len(self._results) >= self._limit): - # Original limit reached. - self._next_offset = None - del self._results[self._limit:] - else: - # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) - - def all(self): - '''Fetch and return all data.''' - return list(self) - - def one(self): - '''Return exactly one single result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - Raise :exc:`ValueError` if an existing offset is already present in the - expression as offset is inappropriate when expecting a single item. - - Raise :exc:`~ftrack_api_old.exception.MultipleResultsFoundError` if more - than one result was available or - :exc:`~ftrack_api_old.exception.NoResultFoundError` if no results were - available. - - .. note:: - - Both errors subclass - :exc:`~ftrack_api_old.exception.IncorrectResultError` if you want to - catch only one error type. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - if self._offset is not None: - raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' - ) - - # Apply custom limit as optimisation. A limit of 2 is used rather than - # 1 so that it is possible to test for multiple matching entries - # case. - expression += ' limit 2' - - results, metadata = self._session._query(expression) - - if not results: - raise ftrack_api_old.exception.NoResultFoundError() - - if len(results) != 1: - raise ftrack_api_old.exception.MultipleResultsFoundError() - - return results[0] - - def first(self): - '''Return first matching result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - If no matching result available return None. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - # Apply custom offset if present. - if self._offset is not None: - expression += ' offset {0}'.format(self._offset) - - # Apply custom limit as optimisation. - expression += ' limit 1' - - results, metadata = self._session._query(expression) - - if results: - return results[0] - - return None diff --git a/pype/vendor/ftrack_api_old/resource_identifier_transformer/__init__.py b/pype/vendor/ftrack_api_old/resource_identifier_transformer/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api_old/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api_old/resource_identifier_transformer/base.py b/pype/vendor/ftrack_api_old/resource_identifier_transformer/base.py deleted file mode 100644 index c344d51049..0000000000 --- a/pype/vendor/ftrack_api_old/resource_identifier_transformer/base.py +++ /dev/null @@ -1,50 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. - - Provide ability to modify resource identifier before it is stored centrally - (:meth:`encode`), or after it has been retrieved, but before it is used - locally (:meth:`decode`). - - For example, you might want to decompose paths into a set of key, value - pairs to store centrally and then compose a path from those values when - reading back. - - .. note:: - - This is separate from any transformations an - :class:`ftrack_api_old.accessor.base.Accessor` may perform and is targeted - towards common transformations. - - ''' - - def __init__(self, session): - '''Initialise resource identifier transformer. - - *session* should be the :class:`ftrack_api_old.session.Session` instance - to use for communication with the server. - - ''' - self.session = session - super(ResourceIdentifierTransformer, self).__init__() - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier diff --git a/pype/vendor/ftrack_api_old/session.py b/pype/vendor/ftrack_api_old/session.py deleted file mode 100644 index 0986962ca4..0000000000 --- a/pype/vendor/ftrack_api_old/session.py +++ /dev/null @@ -1,2515 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import json -import logging -import collections -import datetime -import os -import getpass -import functools -import itertools -import distutils.version -import hashlib -import tempfile -import threading -import atexit -import warnings - -import requests -import requests.auth -import arrow -import clique - -import ftrack_api_old -import ftrack_api_old.exception -import ftrack_api_old.entity.factory -import ftrack_api_old.entity.base -import ftrack_api_old.entity.location -import ftrack_api_old.cache -import ftrack_api_old.symbol -import ftrack_api_old.query -import ftrack_api_old.attribute -import ftrack_api_old.collection -import ftrack_api_old.event.hub -import ftrack_api_old.event.base -import ftrack_api_old.plugin -import ftrack_api_old.inspection -import ftrack_api_old.operation -import ftrack_api_old.accessor.disk -import ftrack_api_old.structure.origin -import ftrack_api_old.structure.entity_id -import ftrack_api_old.accessor.server -import ftrack_api_old._centralized_storage_scenario -import ftrack_api_old.logging -from ftrack_api_old.logging import LazyLogMessage as L - -try: - from weakref import WeakMethod -except ImportError: - from ftrack_api_old._weakref import WeakMethod - - -class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' - - def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' - self.api_key = api_key - self.api_user = api_user - super(SessionAuthentication, self).__init__() - - def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) - return request - - -class Session(object): - '''An isolated session for interaction with an ftrack server.''' - - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`ftrack_api_old_KEY` and *api_user* from - :envvar:`ftrack_api_old_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api_old.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api_old.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api_old.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api_old.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`ftrack_api_old_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api_old.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api_old.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api_old.cache.LayeredCache([ - ftrack_api_old.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ftrack_api_old.event.hub.EventHub( - self._server_url, - self._api_user, - self._api_key - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(WeakMethod(self.close)) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'ftrack_api_old_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_old_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api_old._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api_old.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def __enter__(self): - '''Return session as context manager.''' - return self - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' - self.close() - - @property - def _request(self): - '''Return request session. - - Raise :exc:`ftrack_api_old.exception.ConnectionClosedError` if session has - been closed and connection unavailable. - - ''' - if self._managed_request is None: - raise ftrack_api_old.exception.ConnectionClosedError() - - return self._managed_request - - @_request.setter - def _request(self, value): - '''Set request session to *value*.''' - self._managed_request = value - - @property - def closed(self): - '''Return whether session has been closed.''' - return self._closed - - @property - def server_information(self): - '''Return server information such as server version.''' - return self._server_information.copy() - - @property - def server_url(self): - '''Return server ulr used for session.''' - return self._server_url - - @property - def api_user(self): - '''Return username used for session.''' - return self._api_user - - @property - def api_key(self): - '''Return API key used for session.''' - return self._api_key - - @property - def event_hub(self): - '''Return event hub.''' - return self._event_hub - - @property - def _local_cache(self): - '''Return top level memory cache.''' - return self.cache.caches[0] - - def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') - if server_version is None: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Could not determine server version.' - ) - - # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version - ) - ) - - def close(self): - '''Close session. - - Close connections to server. Clear any pending operations and local - cache. - - Use this to ensure that session is cleaned up properly after use. - - ''' - if self.closed: - self.logger.debug('Session already closed.') - return - - self._closed = True - - self.logger.debug('Closing session.') - if self.recorded_operations: - self.logger.warning( - 'Closing session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Close connections. - self._request.close() - self._request = None - - try: - self.event_hub.disconnect() - if self._auto_connect_event_hub_thread: - self._auto_connect_event_hub_thread.join() - except ftrack_api_old.exception.EventHubConnectionError: - pass - - self.logger.debug('Session closed.') - - def reset(self): - '''Reset session clearing local state. - - Clear all pending operations and expunge all entities from session. - - Also clear the local cache. If the cache used by the session is a - :class:`~ftrack_api_old.cache.LayeredCache` then only clear top level cache. - Otherwise, clear the entire cache. - - Plugins are not rediscovered or reinitialised, but certain plugin events - are re-emitted to properly configure session aspects that are dependant - on cache (such as location plugins). - - .. warning:: - - Previously attached entities are not reset in memory and will retain - their state, but should not be used. Doing so will cause errors. - - ''' - if self.recorded_operations: - self.logger.warning( - 'Resetting session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Re-configure certain session aspects that may be dependant on cache. - self._configure_locations() - - self.event_hub.publish( - ftrack_api_old.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. - - The current setting will be restored automatically when done. - - Example:: - - with session.auto_populating(False): - print entity['name'] - - ''' - return AutoPopulatingContext(self, auto_populate) - - def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. - - The current setting will be restored automatically when done. - - Example:: - - with session.operation_recording(False): - entity['name'] = 'change_not_recorded' - - ''' - return OperationRecordingContext(self, record_operations) - - @property - def created(self): - '''Return list of newly created entities.''' - entities = self._local_cache.values() - states = ftrack_api_old.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api_old.symbol.CREATED - ] - - @property - def modified(self): - '''Return list of locally modified entities.''' - entities = self._local_cache.values() - states = ftrack_api_old.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api_old.symbol.MODIFIED - ] - - @property - def deleted(self): - '''Return list of deleted entities.''' - entities = self._local_cache.values() - states = ftrack_api_old.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api_old.symbol.DELETED - ] - - def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. - - *reset_type* is a server side supported reset type, - passing the optional *entity* to perform the option upon. - - Please refer to ftrack documentation for a complete list of - supported server side reset types. - ''' - - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } - - if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) - - result = self.call( - [payload] - ) - - return result[0]['data'] - - def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. - - If specified, *data* should be a dictionary of key, value pairs that - should be used to populate attributes on the entity. - - If *reconstructing* is False then create a new entity setting - appropriate defaults for missing data. If True then reconstruct an - existing entity. - - Constructed entity will be automatically :meth:`merged ` - into the session. - - ''' - entity = self._create(entity_type, data, reconstructing=reconstructing) - entity = self.merge(entity) - return entity - - def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' - try: - EntityTypeClass = self.types[entity_type] - except KeyError: - raise ftrack_api_old.exception.UnrecognisedEntityTypeError(entity_type) - - return EntityTypeClass(self, data=data, reconstructing=reconstructing) - - def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. - - *data* should be a dictionary of the same form passed to :meth:`create`. - - By default, check for an entity that has matching *data*. If - *identifying_keys* is specified as a list of keys then only consider the - values from *data* for those keys when searching for existing entity. If - *data* is missing an identifying key then raise :exc:`KeyError`. - - If no *identifying_keys* specified then use all of the keys from the - passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be - determined. - - Each key should be a string. - - .. note:: - - Currently only top level scalars supported. To ensure an entity by - looking at relationships, manually issue the :meth:`query` and - :meth:`create` calls. - - If more than one entity matches the determined filter criteria then - raise :exc:`~ftrack_api_old.exception.MultipleResultsFoundError`. - - If no matching entity found then create entity using supplied *data*. - - If a matching entity is found, then update it if necessary with *data*. - - .. note:: - - If entity created or updated then a :meth:`commit` will be issued - automatically. If this behaviour is undesired, perform the - :meth:`query` and :meth:`create` calls manually. - - Return retrieved or created entity. - - Example:: - - # First time, a new entity with `username=martin` is created. - entity = session.ensure('User', {'username': 'martin'}) - - # After that, the existing entity is retrieved. - entity = session.ensure('User', {'username': 'martin'}) - - # When existing entity retrieved, entity may also be updated to - # match supplied data. - entity = session.ensure( - 'User', {'username': 'martin', 'email': 'martin@example.com'} - ) - - ''' - if not identifying_keys: - identifying_keys = data.keys() - - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) - - if not identifying_keys: - raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) - ) - - expression = '{0} where'.format(entity_type) - criteria = [] - for identifying_key in identifying_keys: - value = data[identifying_key] - - if isinstance(value, basestring): - value = '"{0}"'.format(value) - - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): - # Server does not store microsecond or timezone currently so - # need to strip from query. - # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) - value = '"{0}"'.format(value) - - criteria.append('{0} is {1}'.format(identifying_key, value)) - - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) - - try: - entity = self.query(expression).one() - - except ftrack_api_old.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') - - # Create entity. - entity = self.create(entity_type, data) - self.commit() - - else: - self.logger.debug('Retrieved matching existing entity.') - - # Update entity if required. - updated = False - for key, target_value in data.items(): - if entity[key] != target_value: - entity[key] = target_value - updated = True - - if updated: - self.logger.debug('Updating existing entity to match new data.') - self.commit() - - return entity - - def delete(self, entity): - '''Mark *entity* for deletion.''' - if self.record_operations: - self.recorded_operations.push( - ftrack_api_old.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api_old.inspection.primary_key(entity) - ) - ) - - def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. - - First check for an existing entry in the configured cache, otherwise - issue a query to the server. - - If no matching entity found, return None. - - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) - - primary_key_definition = self.types[entity_type].primary_key_attributes - if isinstance(entity_key, basestring): - entity_key = [entity_key] - - if len(entity_key) != len(primary_key_definition): - raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) - ) - ) - - entity = None - try: - entity = self._get(entity_type, entity_key) - - - except KeyError: - - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) - - results = self.query(expression).all() - if results: - entity = results[0] - - return entity - - def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. - - Raise :exc:`KeyError` if no such entity in the cache. - - ''' - # Check cache for existing entity emulating - # ftrack_api_old.inspection.identity result object to pass to key maker. - cache_key = self.cache_key_maker.key( - (str(entity_type), map(str, entity_key)) - ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) - entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) - - return entity - - def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. - - *expression* is not executed directly. Instead return an - :class:`ftrack_api_old.query.QueryResult` instance that will execute remote - call on access. - - *page_size* specifies the maximum page size that the returned query - result object should be configured with. - - .. seealso:: :ref:`querying` - - ''' - self.logger.debug(L('Query {0!r}', expression)) - - # Add in sensible projections if none specified. Note that this is - # done here rather than on the server to allow local modification of the - # schema setting to include commonly used custom attributes for example. - # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] - EntityTypeClass = self.types[entity_type] - projections = EntityTypeClass.default_projections - - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression - ) - - query_result = ftrack_api_old.query.QueryResult( - self, expression, page_size=page_size - ) - return query_result - - def _query(self, expression): - '''Execute *query* and return (records, metadata). - - Records will be a list of entities retrieved via the query and metadata - a dictionary of accompanying information about the result set. - - ''' - # TODO: Actually support batching several queries together. - # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] - - # TODO: When should this execute? How to handle background=True? - results = self.call(batch) - - # Merge entities into local cache and return merged entities. - data = [] - merged = dict() - for entity in results[0]['data']: - data.append(self._merge_recursive(entity, merged)) - - return data, results[0]['metadata'] - - def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. - - *merged* should be a mapping to record merges during run and should be - used to avoid infinite recursion. If not set will default to a - dictionary. - - ''' - if merged is None: - merged = {} - - with self.operation_recording(False): - return self._merge(value, merged) - - def _merge(self, value, merged): - '''Return merged *value*.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if isinstance(value, ftrack_api_old.entity.base.Entity): - log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) - ) - - return self._merge_entity(value, merged=merged) - - elif isinstance(value, ftrack_api_old.collection.Collection): - log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - elif isinstance(value, ftrack_api_old.collection.MappedCollectionProxy): - log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - else: - return value - - def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - attached = self.merge(entity, merged) - - for attribute in entity.attributes: - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - - if isinstance( - remote_value, - ( - ftrack_api_old.entity.base.Entity, - ftrack_api_old.collection.Collection, - ftrack_api_old.collection.MappedCollectionProxy - ) - ): - log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - if isinstance(remote_value, ftrack_api_old.entity.base.Entity): - self._merge_recursive(remote_value, merged=merged) - - elif isinstance( - remote_value, ftrack_api_old.collection.Collection - ): - for entry in remote_value: - self._merge_recursive(entry, merged=merged) - - elif isinstance( - remote_value, ftrack_api_old.collection.MappedCollectionProxy - ): - for entry in remote_value.collection: - self._merge_recursive(entry, merged=merged) - - return attached - - def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. - - Merge is recursive so any references to other entities will also be - merged. - - *entity* will never be modified in place. Ensure that the returned - merged entity instance is used. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - with self.auto_populating(False): - entity_key = self.cache_key_maker.key( - ftrack_api_old.inspection.identity(entity) - ) - - # Check whether this entity has already been processed. - attached_entity = merged.get(entity_key) - if attached_entity is not None: - log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) - ) - - return attached_entity - else: - log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) - ) - - # Check for existing instance of entity in cache. - log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) - ) - try: - attached_entity = self.cache.get(entity_key) - - log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) - ) - - except KeyError: - # Construct new minimal instance to store in cache. - attached_entity = self._create( - entity.entity_type, {}, reconstructing=True - ) - - log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) - ) - - # Mark entity as seen to avoid infinite loops. - merged[entity_key] = attached_entity - - changes = attached_entity.merge(entity, merged=merged) - if changes: - self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') - - else: - self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' - ) - - return attached_entity - - def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. - - Any locally set values included in the *projections* will not be - overwritten with the retrieved remote value. If this 'synchronise' - behaviour is required, first clear the relevant values on the entity by - setting them to :attr:`ftrack_api_old.symbol.NOT_SET`. Deleting the key will - have the same effect:: - - >>> print(user['username']) - martin - >>> del user['username'] - >>> print(user['username']) - Symbol(NOT_SET) - - .. note:: - - Entities that have been created and not yet persisted will be - skipped as they have no remote values to fetch. - - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) - - if not isinstance( - entities, (list, tuple, ftrack_api_old.query.QueryResult) - ): - entities = [entities] - - # TODO: How to handle a mixed collection of different entity types - # Should probably fail, but need to consider handling hierarchies such - # as User and Group both deriving from Resource. Actually, could just - # proceed and ignore projections that are not present in entity type. - - entities_to_process = [] - - for entity in entities: - if ftrack_api_old.inspection.state(entity) is ftrack_api_old.symbol.CREATED: - # Created entities that are not yet persisted have no remote - # values. Don't raise an error here as it is reasonable to - # iterate over an entities properties and see that some of them - # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) - continue - - entities_to_process.append(entity) - - if entities_to_process: - reference_entity = entities_to_process[0] - entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) - - primary_key_definition = reference_entity.primary_key_attributes - entity_keys = [ - ftrack_api_old.inspection.primary_key(entity).values() - for entity in entities_to_process - ] - - if len(primary_key_definition) > 1: - # Composite keys require full OR syntax unfortunately. - conditions = [] - for entity_key in entity_keys: - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - conditions.append('({0})'.format('and '.join(condition))) - - query = '{0} where {1}'.format(query, ' or '.join(conditions)) - - else: - primary_key = primary_key_definition[0] - - if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) - ) - else: - query = '{0} where {1} is {2}'.format( - query, primary_key, str(entity_keys[0][0]) - ) - - result = self.query(query) - - # Fetch all results now. Doing so will cause them to populate the - # relevant entities in the cache. - result.all() - - # TODO: Should we check that all requested attributes were - # actually populated? If some weren't would we mark that to avoid - # repeated calls or perhaps raise an error? - - # TODO: Make atomic. - def commit(self): - '''Commit all local changes to the server.''' - batch = [] - - with self.auto_populating(False): - for operation in self.recorded_operations: - - # Convert operation to payload. - if isinstance( - operation, ftrack_api_old.operation.CreateEntityOperation - ): - # At present, data payload requires duplicating entity - # type in data and also ensuring primary key added. - entity_data = { - '__entity_type__': operation.entity_type, - } - entity_data.update(operation.entity_key) - entity_data.update(operation.entity_data) - - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api_old.operation.UpdateEntityOperation - ): - entity_data = { - # At present, data payload requires duplicating entity - # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value - } - - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api_old.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values() - }) - - else: - raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) - ) - - batch.append(payload) - - # Optimise batch. - # TODO: Might be better to perform these on the operations list instead - # so all operation contextual information available. - - # If entity was created and deleted in one batch then remove all - # payloads for that entity. - created = set() - deleted = set() - - for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - created_then_deleted = deleted.intersection(created) - if created_then_deleted: - optimised_batch = [] - for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) - - if (entity_type, entity_key) in created_then_deleted: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Remove early update operations so that only last operation on - # attribute is applied server side. - updates_map = set() - for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in payload['entity_data'].items(): - if key == '__entity_type__': - continue - - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) - if identity in updates_map: - del payload['entity_data'][key] - else: - updates_map.add(identity) - - # Remove NOT_SET values from entity_data. - for payload in batch: - entity_data = payload.get('entity_data', {}) - for key, value in entity_data.items(): - if value is ftrack_api_old.symbol.NOT_SET: - del entity_data[key] - - # Remove payloads with redundant entity_data. - optimised_batch = [] - for payload in batch: - entity_data = payload.get('entity_data') - if entity_data is not None: - keys = entity_data.keys() - if not keys or keys == ['__entity_type__']: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Collapse updates that are consecutive into one payload. Also, collapse - # updates that occur immediately after creation into the create payload. - optimised_batch = [] - previous_payload = None - - for payload in batch: - if ( - previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] - ): - previous_payload['entity_data'].update(payload['entity_data']) - continue - - else: - optimised_batch.append(payload) - previous_payload = payload - - batch = optimised_batch - - # Process batch. - if batch: - result = self.call(batch) - - # Clear recorded operations. - self.recorded_operations.clear() - - # As optimisation, clear local values which are not primary keys to - # avoid redundant merges when merging references. Note: primary keys - # remain as needed for cache retrieval on new entities. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - for attribute in entity: - if attribute not in entity.primary_key_attributes: - del entity[attribute] - - # Process results merging into cache relevant data. - for entry in result: - - if entry['action'] in ('create', 'update'): - # Merge returned entities into local cache. - self.merge(entry['data']) - - elif entry['action'] == 'delete': - # TODO: Detach entity - need identity returned? - # TODO: Expunge entity from cache. - pass - # Clear remaining local state, including local values for primary - # keys on entities that were merged. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - entity.clear() - - def rollback(self): - '''Clear all recorded operations and local state. - - Typically this would be used following a failed :meth:`commit` in order - to revert the session to a known good state. - - Newly created entities not yet persisted will be detached from the - session / purged from cache and no longer contribute, but the actual - objects are not deleted from memory. They should no longer be used and - doing so could cause errors. - - ''' - with self.auto_populating(False): - with self.operation_recording(False): - - # Detach all newly created entities and remove from cache. This - # is done because simply clearing the local values of newly - # created entities would result in entities with no identity as - # primary key was local while not persisted. In addition, it - # makes no sense for failed created entities to exist in session - # or cache. - for operation in self.recorded_operations: - if isinstance( - operation, ftrack_api_old.operation.CreateEntityOperation - ): - entity_key = str(( - str(operation.entity_type), - operation.entity_key.values() - )) - try: - self.cache.remove(entity_key) - except KeyError: - pass - - # Clear locally stored modifications on remaining entities. - for entity in self._local_cache.values(): - entity.clear() - - self.recorded_operations.clear() - - def _fetch_server_information(self): - '''Return server information.''' - result = self.call([{'action': 'query_server_information'}]) - return result[0] - - def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. - - Each discovered module should implement a register function that - accepts this session as first argument. Typically the function should - register appropriate event listeners against the session's event hub. - - def register(session): - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) - - *plugin_arguments* should be an optional mapping of keyword arguments - and values to pass to plugin register functions upon discovery. - - ''' - plugin_arguments = plugin_arguments or {} - ftrack_api_old.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) - - def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. - - *schema_cache_path* should be the path to the file containing the - schemas in JSON format. - - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) - - if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) - - return [], None - - with open(schema_cache_path, 'r') as schema_file: - schemas = json.load(schema_file) - hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True) - ).hexdigest() - - return schemas, hash_ - - def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. - - *schema_cache_path* should be a path to a file that the schemas can be - written to in JSON format. - - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) - - with open(schema_cache_path, 'w') as local_cache_file: - json.dump(schemas, local_cache_file, indent=4) - - def _load_schemas(self, schema_cache_path): - '''Load schemas. - - First try to load schemas from cache at *schema_cache_path*. If the - cache is not available or the cache appears outdated then load schemas - from server and store fresh copy in cache. - - If *schema_cache_path* is set to `False`, always load schemas from - server bypassing cache. - - ''' - local_schema_hash = None - schemas = [] - - if schema_cache_path: - try: - schemas, local_schema_hash = self._read_schemas_from_cache( - schema_cache_path - ) - except (IOError, TypeError, AttributeError, ValueError): - # Catch any known exceptions when trying to read the local - # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) - - # Use `dictionary.get` to retrieve hash to support older version of - # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) - if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self.call([{'action': 'query_schemas'}])[0] - - if schema_cache_path: - try: - self._write_schemas_to_cache(schemas, schema_cache_path) - except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) - - else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) - - return schemas - - def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' - fallback_factory = ftrack_api_old.entity.factory.StandardFactory() - classes = {} - - for schema in schemas: - results = self.event_hub.publish( - ftrack_api_old.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ), - synchronous=True - ) - - results = [result for result in results if result is not None] - - if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) - entity_type_class = fallback_factory.create(schema) - - elif len(results) > 1: - raise ValueError( - 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) - ) - - else: - entity_type_class = results[0] - - classes[entity_type_class.entity_type] = entity_type_class - - return classes - - def _configure_locations(self): - '''Configure locations.''' - # First configure builtin locations, by injecting them into local cache. - - # Origin. - location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api_old.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api_old.mixin( - location, ftrack_api_old.entity.location.OriginLocationMixin, - name='OriginLocation' - ) - location.accessor = ftrack_api_old.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api_old.structure.origin.OriginStructure() - location.priority = 100 - - # Unmanaged. - location = self.create( - 'Location', - data=dict( - name='ftrack.unmanaged', - id=ftrack_api_old.symbol.UNMANAGED_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api_old.mixin( - location, ftrack_api_old.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api_old.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api_old.structure.origin.OriginStructure() - # location.resource_identifier_transformer = ( - # ftrack_api_old.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) - # ) - location.priority = 90 - - # Review. - location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api_old.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api_old.mixin( - location, ftrack_api_old.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api_old.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api_old.structure.origin.OriginStructure() - location.priority = 110 - - # Server. - location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api_old.symbol.SERVER_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api_old.mixin( - location, ftrack_api_old.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api_old.accessor.server._ServerAccessor( - session=self - ) - location.structure = ftrack_api_old.structure.entity_id.EntityIdStructure() - location.priority = 150 - - # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') - - if ( - storage_scenario and - storage_scenario.get('scenario') - ): - self.event_hub.publish( - ftrack_api_old.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) - ), - synchronous=True - ) - - # Next, allow further configuration of locations via events. - self.event_hub.publish( - ftrack_api_old.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ), - synchronous=True - ) - - @ftrack_api_old.logging.deprecation_warning( - 'Session._call is now available as public method Session.call. The ' - 'private method will be removed in version 2.0.' - ) - def _call(self, data): - '''Make request to server with *data* batch describing the actions. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.call(data) - - def call(self, data): - '''Make request to server with *data* batch describing the actions.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') - - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) - - response = self._request.post( - url, - headers=headers, - data=data - ) - - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - - self.logger.debug(L('Response: {0!r}', response.text)) - try: - result = self.decode(response.text) - - except Exception: - error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) - ) - self.logger.exception(error_message) - raise ftrack_api_old.exception.ServerError(error_message) - - else: - if 'exception' in result: - # Handle exceptions. - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] - ) - self.logger.exception(error_message) - raise ftrack_api_old.exception.ServerError(error_message) - - return result - - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. The following strategies are available: - - * *all* - Encode all attributes, loading any that are currently NOT_SET. - * *set_only* - Encode only attributes that are currently set without - loading any from the remote. - * *modified_only* - Encode only attributes that have been modified - locally. - * *persisted_only* - Encode only remote (persisted) attribute values. - - ''' - entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' - ) - if entity_attribute_strategy not in entity_attribute_strategies: - raise ValueError( - 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) - ) - ) - - return json.dumps( - data, - sort_keys=True, - default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) - ) - - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. See :meth:`Session.encode` for available strategies. - - ''' - if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } - - if isinstance(item, OperationPayload): - data = dict(item.items()) - if "entity_data" in data: - for key, value in data["entity_data"].items(): - if isinstance(value, ftrack_api_old.entity.base.Entity): - data["entity_data"][key] = self.entity_reference(value) - - return data - - if isinstance(item, ftrack_api_old.entity.base.Entity): - data = self.entity_reference(item) - - with self.auto_populating(True): - - for attribute in item.attributes: - value = ftrack_api_old.symbol.NOT_SET - - if entity_attribute_strategy == 'all': - value = attribute.get_value(item) - - elif entity_attribute_strategy == 'set_only': - if attribute.is_set(item): - value = attribute.get_local_value(item) - if value is ftrack_api_old.symbol.NOT_SET: - value = attribute.get_remote_value(item) - - elif entity_attribute_strategy == 'modified_only': - if attribute.is_modified(item): - value = attribute.get_local_value(item) - - elif entity_attribute_strategy == 'persisted_only': - if not attribute.computed: - value = attribute.get_remote_value(item) - - if value is not ftrack_api_old.symbol.NOT_SET: - if isinstance( - attribute, ftrack_api_old.attribute.ReferenceAttribute - ): - if isinstance(value, ftrack_api_old.entity.base.Entity): - value = self.entity_reference(value) - - data[attribute.name] = value - - return data - - if isinstance( - item, ftrack_api_old.collection.MappedCollectionProxy - ): - # Use proxied collection for serialisation. - item = item.collection - - if isinstance(item, ftrack_api_old.collection.Collection): - data = [] - for entity in item: - data.append(self.entity_reference(entity)) - - return data - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - ''' - reference = { - '__entity_type__': entity.entity_type - } - with self.auto_populating(False): - reference.update(ftrack_api_old.inspection.primary_key(entity)) - - return reference - - @ftrack_api_old.logging.deprecation_warning( - 'Session._entity_reference is now available as public method ' - 'Session.entity_reference. The private method will be removed ' - 'in version 2.0.' - ) - def _entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along - with the key, value pairs that make up it's primary key. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.entity_reference(entity) - - def decode(self, string): - '''Return decoded JSON *string* as Python object.''' - with self.operation_recording(False): - return json.loads(string, object_hook=self._decode) - - def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' - if isinstance(item, collections.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) - - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) - - return item - - def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. - - If *filter_inaccessible* is True then only accessible locations will be - included in result. - - ''' - # Optimise this call. - locations = self.query('Location') - - # Filter. - if filter_inaccessible: - locations = filter( - lambda location: location.accessor, - locations - ) - - # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) - - return locations - - def pick_location(self, component=None): - '''Return suitable location to use. - - If no *component* specified then return highest priority accessible - location. Otherwise, return highest priority accessible location that - *component* is available in. - - Return None if no suitable location could be picked. - - ''' - if component: - return self.pick_locations([component])[0] - - else: - locations = self._get_locations() - if locations: - return locations[0] - else: - return None - - def pick_locations(self, components): - '''Return suitable locations for *components*. - - Return list of locations corresponding to *components* where each - picked location is the highest priority accessible location for that - component. If a component has no location available then its - corresponding entry will be None. - - ''' - candidate_locations = self._get_locations() - availabilities = self.get_component_availabilities( - components, locations=candidate_locations - ) - - locations = [] - for component, availability in zip(components, availabilities): - location = None - - for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: - location = candidate_location - break - - locations.append(location) - - return locations - - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). - - If *location* is specified then automatically add component to that - location. The default of 'auto' will automatically pick a suitable - location to add the component to if one is available. To not add to any - location specifiy locations as None. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration in the - location. - ''' - if data is None: - data = {} - - if location == 'auto': - # Check if the component name matches one of the ftrackreview - # specific names. Add the component to the ftrack.review location if - # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' - ): - location = self.get( - 'Location', ftrack_api_old.symbol.REVIEW_LOCATION_ID - ) - - else: - location = self.pick_location() - - try: - collection = clique.parse(path) - - except ValueError: - # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) - - data.setdefault('file_type', os.path.splitext(path)[-1]) - - return self._create_component( - 'FileComponent', path, data, location - ) - - else: - # Calculate size of container and members. - member_sizes = {} - container_size = data.get('size') - - if container_size is not None: - if len(collection.indexes) > 0: - member_size = int( - round(container_size / len(collection.indexes)) - ) - for item in collection: - member_sizes[item] = member_size - - else: - container_size = 0 - for item in collection: - member_sizes[item] = self._get_filesystem_size(item) - container_size += member_sizes[item] - - # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - data.setdefault('padding', collection.padding) - data.setdefault('file_type', os.path.splitext(container_path)[-1]) - data.setdefault('size', container_size) - - container = self._create_component( - 'SequenceComponent', container_path, data, location=None - ) - - # Create member components for sequence. - for member_path in collection: - member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': os.path.splitext(member_path)[-1] - } - - component = self._create_component( - 'FileComponent', member_path, member_data, location=None - ) - container['members'].append(component) - - if location: - origin_location = self.get( - 'Location', ftrack_api_old.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True - ) - - return container - - def _create_component(self, entity_type, path, data, location): - '''Create and return component. - - See public function :py:func:`createComponent` for argument details. - - ''' - component = self.create(entity_type, data) - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = self.get( - 'Location', ftrack_api_old.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component(component, path, recursive=False) - - if location: - location.add_component(component, origin_location, recursive=False) - - return component - - def _get_filesystem_size(self, path): - '''Return size from *path*''' - try: - size = os.path.getsize(path) - except OSError: - size = 0 - - return size - - def get_component_availability(self, component, locations=None): - '''Return availability of *component*. - - If *locations* is set then limit result to availability of *component* - in those *locations*. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] - - def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. - - If *locations* is set then limit result to availabilities of - *components* in those *locations*. - - Return a list of dictionaries of {location_id:percentage_availability}. - The list indexes correspond to those of *components*. - - ''' - availabilities = [] - - if locations is None: - locations = self.query('Location') - - # Separate components into two lists, those that are containers and - # those that are not, so that queries can be optimised. - standard_components = [] - container_components = [] - - for component in components: - if 'members' in component.keys(): - container_components.append(component) - else: - standard_components.append(component) - - # Perform queries. - if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) - - if container_components: - self.populate( - container_components, - 'members, component_locations.location_id' - ) - - base_availability = {} - for location in locations: - base_availability[location['id']] = 0.0 - - for component in components: - availability = base_availability.copy() - availabilities.append(availability) - - is_container = 'members' in component.keys() - if is_container and len(component['members']): - member_availabilities = self.get_component_availabilities( - component['members'], locations=locations - ) - multiplier = 1.0 / len(component['members']) - for member, member_availability in zip( - component['members'], member_availabilities - ): - for location_id, ratio in member_availability.items(): - availability[location_id] += ( - ratio * multiplier - ) - else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] - if location_id in availability: - availability[location_id] = 100.0 - - for location_id, percentage in availability.items(): - # Avoid quantization error by rounding percentage and clamping - # to range 0-100. - adjusted_percentage = round(percentage, 9) - adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) - availability[location_id] = adjusted_percentage - - return availabilities - - @ftrack_api_old.logging.deprecation_warning( - 'Session.delayed_job has been deprecated in favour of session.call. ' - 'Please refer to the release notes for more information.' - ) - def delayed_job(self, job_type): - '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. - - *job_type* should be one of the allowed job types. There is currently - only one remote job type "SYNC_USERS_LDAP". - ''' - if job_type not in (ftrack_api_old.symbol.JOB_SYNC_USERS_LDAP, ): - raise ValueError( - u'Invalid Job type: {0}.'.format(job_type) - ) - - operation = { - 'action': 'delayed_job', - 'job_type': job_type.name - } - - try: - result = self.call( - [operation] - )[0] - - except ftrack_api_old.exception.ServerError as error: - raise - - return result['data'] - - def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. - - The returned URL will be authenticated using a token which will expire - after 6 minutes. - - *name* should be the name of the widget to return and should be one of - 'info', 'tasks' or 'tasks_browser'. - - Certain widgets require an entity to be specified. If so, specify it by - setting *entity* to a valid entity instance. - - *theme* sets the theme of the widget and can be either 'light' or 'dark' - (defaulting to 'dark' if an invalid option given). - - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } - if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - ftrack_api_old.inspection.primary_key(entity).values() - ) - - try: - result = self.call([operation]) - - except ftrack_api_old.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - else: - return result[0]['widget_url'] - - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible that can be used - as a thumbnail. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *version_id* is specified, the new components will automatically be - associated with the AssetVersion. Otherwise, the components will not - be associated to a version even if the supplied *media* belongs to one. - A server version of 3.3.32 or higher is required for the version_id - argument to function properly. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - if isinstance(media, basestring): - # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api_old.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': - keep_original = False - - component_data = None - if keep_original: - component_data = dict(version_id=version_id) - - component = self.create_component( - path=media, - data=component_data, - location=server_location - ) - - # Auto commit to ensure component exists when sent to server. - self.commit() - - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): - # Existing file component. - component = media - if keep_original == 'auto': - keep_original = True - - else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) - - operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original - } - - try: - result = self.call([operation]) - - except ftrack_api_old.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return self.get('Job', result[0]['job_id']) - - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. - - *file_name* and *file_size* should match the components details. - - The returned URL should be requested using HTTP PUT with the specified - headers. - - The *checksum* is used as the Content-MD5 header and should contain - the base64-encoded 128-bit MD5 digest of the message (without the - headers) according to RFC 1864. This can be used as a message integrity - check to verify that the data is the same data that was originally sent. - ''' - operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum - } - - try: - result = self.call([operation]) - - except ftrack_api_old.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return result[0] - - def send_user_invite(self, user): - '''Send a invitation to the provided *user*. - - *user* is a User instance - - ''' - - self.send_user_invites( - [user] - ) - - def send_user_invites(self, users): - '''Send a invitation to the provided *user*. - - *users* is a list of User instances - - ''' - - operations = [] - - for user in users: - operations.append( - { - 'action':'send_user_invite', - 'user_id': user['id'] - } - ) - - try: - self.call(operations) - - except ftrack_api_old.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. - - *invitee* is a instance of ReviewSessionInvitee. - - .. note:: - - The *invitee* must be committed. - - ''' - self.send_review_session_invites([invitee]) - - def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. - - *invitee* is a list of ReviewSessionInvitee objects. - - .. note:: - - All *invitees* must be committed. - - ''' - operations = [] - - for invitee in invitees: - operations.append( - { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] - } - ) - - try: - self.call(operations) - except ftrack_api_old.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: - raise ftrack_api_old.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - -class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' - - def __init__(self, session, auto_populate): - '''Initialise context.''' - super(AutoPopulatingContext, self).__init__() - self._session = session - self._auto_populate = auto_populate - self._current_auto_populate = None - - def __enter__(self): - '''Enter context switching to desired auto populate setting.''' - self._current_auto_populate = self._session.auto_populate - self._session.auto_populate = self._auto_populate - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' - self._session.auto_populate = self._current_auto_populate - - -class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' - - def __init__(self, session, record_operations): - '''Initialise context.''' - super(OperationRecordingContext, self).__init__() - self._session = session - self._record_operations = record_operations - self._current_record_operations = None - - def __enter__(self): - '''Enter context.''' - self._current_record_operations = self._session.record_operations - self._session.record_operations = self._record_operations - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' - self._session.record_operations = self._current_record_operations - - -class OperationPayload(collections.MutableMapping): - '''Represent operation payload.''' - - def __init__(self, *args, **kwargs): - '''Initialise payload.''' - super(OperationPayload, self).__init__() - self._data = dict() - self.update(dict(*args, **kwargs)) - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/pype/vendor/ftrack_api_old/structure/__init__.py b/pype/vendor/ftrack_api_old/structure/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api_old/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api_old/structure/base.py b/pype/vendor/ftrack_api_old/structure/base.py deleted file mode 100644 index eae3784dc2..0000000000 --- a/pype/vendor/ftrack_api_old/structure/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from abc import ABCMeta, abstractmethod - - -class Structure(object): - '''Structure plugin interface. - - A structure plugin should compute appropriate paths for data. - - ''' - - __metaclass__ = ABCMeta - - def __init__(self, prefix=''): - '''Initialise structure.''' - self.prefix = prefix - self.path_separator = '/' - super(Structure, self).__init__() - - @abstractmethod - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - - def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] - if padding: - expression = '%0{0}d'.format(padding) - else: - expression = '%d' - - return expression diff --git a/pype/vendor/ftrack_api_old/structure/entity_id.py b/pype/vendor/ftrack_api_old/structure/entity_id.py deleted file mode 100644 index 9759e7795e..0000000000 --- a/pype/vendor/ftrack_api_old/structure/entity_id.py +++ /dev/null @@ -1,12 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api_old.structure.base - - -class EntityIdStructure(ftrack_api_old.structure.base.Structure): - '''Entity id pass-through structure.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] diff --git a/pype/vendor/ftrack_api_old/structure/id.py b/pype/vendor/ftrack_api_old/structure/id.py deleted file mode 100644 index 1051f55aad..0000000000 --- a/pype/vendor/ftrack_api_old/structure/id.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - -import ftrack_api_old.symbol -import ftrack_api_old.structure.base - - -class IdStructure(ftrack_api_old.structure.base.Structure): - '''Id based structure supporting Components only. - - A components unique id will be used to form a path to store the data at. - To avoid millions of entries in one directory each id is chunked into four - prefix directories with the remainder used to name the file:: - - /prefix/1/2/3/4/56789 - - If the component has a defined filetype it will be added to the path:: - - /prefix/1/2/3/4/56789.exr - - Components that are children of container components will be placed inside - the id structure of their parent:: - - /prefix/1/2/3/4/56789/355827648d.exr - /prefix/1/2/3/4/56789/ajf24215b5.exr - - However, sequence children will be named using their label as an index and - a common prefix of 'file.':: - - /prefix/1/2/3/4/56789/file.0001.exr - /prefix/1/2/3/4/56789/file.0002.exr - - ''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - if entity.entity_type in ('FileComponent',): - # When in a container, place the file inside a directory named - # after the container. - container = entity['container'] - if container and container is not ftrack_api_old.symbol.NOT_SET: - path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) - parts = [os.path.dirname(path), name] - - else: - # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] - parts = [path, name] - - else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) - - elif entity.entity_type in ('SequenceComponent',): - name = 'file' - - # Add a sequence identifier. - sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) - - if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api_old.symbol.NOT_SET - ): - name += entity['file_type'] - - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) - - elif entity.entity_type in ('ContainerComponent',): - # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) - - else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) - - return self.path_separator.join(parts).strip('/') diff --git a/pype/vendor/ftrack_api_old/structure/origin.py b/pype/vendor/ftrack_api_old/structure/origin.py deleted file mode 100644 index 0d4d3a57f5..0000000000 --- a/pype/vendor/ftrack_api_old/structure/origin.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from .base import Structure - - -class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* should be a mapping that includes at least a - 'source_resource_identifier' key that refers to the resource identifier - to pass through. - - ''' - if context is None: - context = {} - - resource_identifier = context.get('source_resource_identifier') - if resource_identifier is None: - raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' - ) - - return resource_identifier diff --git a/pype/vendor/ftrack_api_old/structure/standard.py b/pype/vendor/ftrack_api_old/structure/standard.py deleted file mode 100644 index f58608afb8..0000000000 --- a/pype/vendor/ftrack_api_old/structure/standard.py +++ /dev/null @@ -1,217 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import re -import unicodedata - -import ftrack_api_old.symbol -import ftrack_api_old.structure.base - - -class StandardStructure(ftrack_api_old.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. - - The resource identifier is generated from the project code, the name - of objects in the project structure, asset name and version number:: - - my_project/folder_a/folder_b/asset_name/v003 - - If the component is a `FileComponent` then the name of the component and the - file type are used as filename in the resource_identifier:: - - my_project/folder_a/folder_b/asset_name/v003/foo.jpg - - If the component is a `SequenceComponent` then a sequence expression, - `%04d`, is used. E.g. a component with the name `foo` yields:: - - my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg - - For the member components their index in the sequence is used:: - - my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg - - The name of the component is added to the resource identifier if the - component is a `ContainerComponent`. E.g. a container component with the - name `bar` yields:: - - my_project/folder_a/folder_b/asset_name/v003/bar - - For a member of that container the file name is based on the component name - and file type:: - - my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - - ''' - - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. - - If *project_versions_prefix* is defined, insert after the project code - for versions published directly under the project:: - - my_project//v001/foo.jpg - - Replace illegal characters with *illegal_character_substitute* if - defined. - - .. note:: - - Nested component containers/sequences are not supported. - - ''' - super(StandardStructure, self).__init__() - self.project_versions_prefix = project_versions_prefix - self.illegal_character_substitute = illegal_character_substitute - - def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' - session = entity.session - - version = entity['version'] - - if version is ftrack_api_old.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) - - error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) - ) - - if ( - version is ftrack_api_old.symbol.NOT_SET or - version in session.created - ): - raise ftrack_api_old.exception.StructureError(error_message) - - link = version['link'] - - if not link: - raise ftrack_api_old.exception.StructureError(error_message) - - structure_names = [ - item['name'] - for item in link[1:-1] - ] - - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] - - version_number = self._format_version(version['version']) - - parts = [] - parts.append(project['name']) - - if structure_names: - parts.extend(structure_names) - elif self.project_versions_prefix: - # Add *project_versions_prefix* if configured and the version is - # published directly under the project. - parts.append(self.project_versions_prefix) - - parts.append(asset['name']) - parts.append(version_number) - - return [self.sanitise_for_filesystem(part) for part in parts] - - def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) - - def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. - - An illegal character is one that is not typically valid for filesystem - usage, such as non ascii characters, or can be awkward to use in a - filesystem, such as spaces. Replace these characters with - the character specified by *illegal_character_substitute* on - initialisation. If no character was specified as substitute then return - *value* unmodified. - - ''' - if self.illegal_character_substitute is None: - return value - - if isinstance(value, str): - value = value.decode('utf-8') - - value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) - return unicode(value.strip().lower()) - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information, but - is unused in this implementation. - - - Raise a :py:exc:`ftrack_api_old.exeption.StructureError` if *entity* is not - attached to a committed version and a committed asset with a parent - context. - - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] - - if container: - # Get resource identifier for container. - container_path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Strip the sequence component expression from the parent - # container and back the correct filename, i.e. - # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] - ) - parts = [ - os.path.dirname(container_path), - self.sanitise_for_filesystem(name) - ] - - else: - # Container is not a sequence component so add it as a - # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] - - else: - # File component does not have a container, construct name from - # component name and file type. - parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] - parts.append(self.sanitise_for_filesystem(name)) - - elif entity.entity_type in ('SequenceComponent',): - # Create sequence expression for the sequence component and add it - # to the parts. - parts = self._get_parts(entity) - sequence_expression = self._get_sequence_expression(entity) - parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), - sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) - ) - ) - - elif entity.entity_type in ('ContainerComponent',): - # Add the name of the container to the resource identifier parts. - parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) - - else: - raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) - ) - - return self.path_separator.join(parts) diff --git a/pype/vendor/ftrack_api_old/symbol.py b/pype/vendor/ftrack_api_old/symbol.py deleted file mode 100644 index f46760f634..0000000000 --- a/pype/vendor/ftrack_api_old/symbol.py +++ /dev/null @@ -1,77 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - - -class Symbol(object): - '''A constant symbol.''' - - def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. - - *value* is used for nonzero testing. - - ''' - self.name = name - self.value = value - - def __str__(self): - '''Return string representation.''' - return self.name - - def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) - - def __nonzero__(self): - '''Return whether symbol represents non-zero value.''' - return bool(self.value) - - def __copy__(self): - '''Return shallow copy. - - Overridden to always return same instance. - - ''' - return self - - -#: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) - -#: Symbol representing created state. -CREATED = Symbol('CREATED') - -#: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') - -#: Symbol representing deleted state. -DELETED = Symbol('DELETED') - -#: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' - -#: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' - -#: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' - -#: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' - -#: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' - -#: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' - -#: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' - -#: Chunk size used when working with data, default to 1Mb. -CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 - -#: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/requirements.txt b/requirements.txt index 658405e2fb..f7d46a8dcc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,15 +8,19 @@ Click clique==1.5.0 coverage cx_Freeze +flake8 ftrack-python-api==2.0.0 ffmpeg-python google-api-python-client jsonschema keyring log4mongo -OpenTimelineIO +git+https://github.com/pypeclub/OpenTimelineIO.git@develop pathlib2 Pillow +pycodestyle +pydocstyle +pylint pynput pymongo pytest @@ -24,6 +28,7 @@ pytest-cov pytest-print pyqt5 Qt.py +scandir speedcopy six Sphinx diff --git a/schema/workfile-1.0.json b/schema/workfile-1.0.json new file mode 100644 index 0000000000..15bfdc6ff7 --- /dev/null +++ b/schema/workfile-1.0.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:workfile-1.0", + "description": "Workfile additional information.", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "filename", + "task_name", + "parent" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["pype:workfile-1.0"], + "example": "pype:workfile-1.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["workfile"], + "example": "workfile" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "filename": { + "description": "Workfile's filename", + "type": "string", + "example": "kuba_each_case_Alpaca_01_animation_v001.ma" + }, + "task_name": { + "description": "Task name", + "type": "string", + "example": "animation" + }, + "data": { + "description": "Document metadata", + "type": "object", + "example": {"key": "value"} + } + } +} diff --git a/setup.cfg b/setup.cfg index cc509b94d8..bc66f0c4d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -10,6 +10,9 @@ exclude = max-complexity = 30 +[pylint.'MESSAGES CONTROL'] +disable = no-member + [pydocstyle] convention = google ignore = D107 @@ -20,3 +23,6 @@ omit = /tests [coverage:html] directory = ./coverage + +[tool:pytest] +norecursedirs = repos/* pype/modules/ftrack/* \ No newline at end of file diff --git a/setup/hiero/hiero_plugin_path/Python/Startup/otioexporter/OTIOExportTask.py b/setup/hiero/hiero_plugin_path/Python/Startup/otioexporter/OTIOExportTask.py index 77dc9c45b3..90504ccd18 100644 --- a/setup/hiero/hiero_plugin_path/Python/Startup/otioexporter/OTIOExportTask.py +++ b/setup/hiero/hiero_plugin_path/Python/Startup/otioexporter/OTIOExportTask.py @@ -49,6 +49,9 @@ class OTIOExportTask(hiero.core.TaskBase): return str(type(self)) def get_rate(self, item): + if not hasattr(item, 'framerate'): + item = item.sequence() + num, den = item.framerate().toRational() rate = float(num) / float(den) @@ -58,12 +61,12 @@ class OTIOExportTask(hiero.core.TaskBase): return round(rate, 2) def get_clip_ranges(self, trackitem): - # Is clip an audio file? Use sequence frame rate - if not trackitem.source().mediaSource().hasVideo(): - rate_item = trackitem.sequence() + # Get rate from source or sequence + if trackitem.source().mediaSource().hasVideo(): + rate_item = trackitem.source() else: - rate_item = trackitem.source() + rate_item = trackitem.sequence() source_rate = self.get_rate(rate_item) @@ -88,9 +91,10 @@ class OTIOExportTask(hiero.core.TaskBase): duration=source_duration ) - available_range = None hiero_clip = trackitem.source() - if not hiero_clip.mediaSource().isOffline(): + + available_range = None + if hiero_clip.mediaSource().isMediaPresent(): start_time = otio.opentime.RationalTime( hiero_clip.mediaSource().startTime(), source_rate @@ -123,7 +127,7 @@ class OTIOExportTask(hiero.core.TaskBase): def get_marker_color(self, tag): icon = tag.icon() - pat = 'icons:Tag(?P\w+)\.\w+' + pat = r'icons:Tag(?P\w+)\.\w+' res = re.search(pat, icon) if res: @@ -155,13 +159,17 @@ class OTIOExportTask(hiero.core.TaskBase): ) ) + metadata = dict( + Hiero=tag.metadata().dict() + ) + # Store the source item for future import assignment + metadata['Hiero']['source_type'] = hiero_item.__class__.__name__ + marker = otio.schema.Marker( name=tag.name(), color=self.get_marker_color(tag), marked_range=marked_range, - metadata={ - 'Hiero': tag.metadata().dict() - } + metadata=metadata ) otio_item.markers.append(marker) @@ -170,37 +178,44 @@ class OTIOExportTask(hiero.core.TaskBase): hiero_clip = trackitem.source() # Add Gap if needed - prev_item = ( - itemindex and trackitem.parent().items()[itemindex - 1] or - trackitem - ) + if itemindex == 0: + prev_item = trackitem - if prev_item == trackitem and trackitem.timelineIn() > 0: + else: + prev_item = trackitem.parent().items()[itemindex - 1] + + clip_diff = trackitem.timelineIn() - prev_item.timelineOut() + + if itemindex == 0 and trackitem.timelineIn() > 0: self.add_gap(trackitem, otio_track, 0) - elif ( - prev_item != trackitem and - prev_item.timelineOut() != trackitem.timelineIn() - ): + elif itemindex and clip_diff != 1: self.add_gap(trackitem, otio_track, prev_item.timelineOut()) # Create Clip source_range, available_range = self.get_clip_ranges(trackitem) - otio_clip = otio.schema.Clip() - otio_clip.name = trackitem.name() - otio_clip.source_range = source_range + otio_clip = otio.schema.Clip( + name=trackitem.name(), + source_range=source_range + ) # Add media reference media_reference = otio.schema.MissingReference() - if not hiero_clip.mediaSource().isOffline(): + if hiero_clip.mediaSource().isMediaPresent(): source = hiero_clip.mediaSource() - media_reference = otio.schema.ExternalReference() - media_reference.available_range = available_range + first_file = source.fileinfos()[0] + path = first_file.filename() - path, name = os.path.split(source.fileinfos()[0].filename()) - media_reference.target_url = os.path.join(path, name) - media_reference.name = name + if "%" in path: + path = re.sub(r"%\d+d", "%d", path) + if "#" in path: + path = re.sub(r"#+", "%d", path) + + media_reference = otio.schema.ExternalReference( + target_url=u'{}'.format(path), + available_range=available_range + ) otio_clip.media_reference = media_reference @@ -218,6 +233,7 @@ class OTIOExportTask(hiero.core.TaskBase): # Add tags as markers if self._preset.properties()["includeTags"]: + self.add_markers(trackitem, otio_clip) self.add_markers(trackitem.source(), otio_clip) otio_track.append(otio_clip) @@ -273,16 +289,16 @@ class OTIOExportTask(hiero.core.TaskBase): name=alignment, # Consider placing Hiero name in metadata transition_type=otio.schema.TransitionTypes.SMPTE_Dissolve, in_offset=in_time, - out_offset=out_time, - metadata={} + out_offset=out_time ) if alignment == 'kFadeIn': - otio_track.insert(-2, otio_transition) + otio_track.insert(-1, otio_transition) else: otio_track.append(otio_transition) + def add_tracks(self): for track in self._sequence.items(): if isinstance(track, hiero.core.AudioTrack): @@ -291,8 +307,7 @@ class OTIOExportTask(hiero.core.TaskBase): else: kind = otio.schema.TrackKind.Video - otio_track = otio.schema.Track(kind=kind) - otio_track.name = track.name() + otio_track = otio.schema.Track(name=track.name(), kind=kind) for itemindex, trackitem in enumerate(track): if isinstance(trackitem.source(), hiero.core.Clip): @@ -306,6 +321,12 @@ class OTIOExportTask(hiero.core.TaskBase): def create_OTIO(self): self.otio_timeline = otio.schema.Timeline() + + # Set global start time based on sequence + self.otio_timeline.global_start_time = otio.opentime.RationalTime( + self._sequence.timecodeStart(), + self._sequence.framerate().toFloat() + ) self.otio_timeline.name = self._sequence.name() self.add_tracks() diff --git a/setup/hiero/hiero_plugin_path/Python/StartupUI/otioimporter/OTIOImport.py b/setup/hiero/hiero_plugin_path/Python/StartupUI/otioimporter/OTIOImport.py index f506333a67..7efb352ed2 100644 --- a/setup/hiero/hiero_plugin_path/Python/StartupUI/otioimporter/OTIOImport.py +++ b/setup/hiero/hiero_plugin_path/Python/StartupUI/otioimporter/OTIOImport.py @@ -202,7 +202,8 @@ marker_color_map = { "PURPLE": "Magenta", "MAGENTA": "Magenta", "BLACK": "Blue", - "WHITE": "Green" + "WHITE": "Green", + "MINT": "Cyan" } @@ -259,7 +260,7 @@ def add_markers(otio_item, hiero_item, tagsbin): marker.marked_range.duration.value ) - tag = hiero_item.addTagToRange(_tag, start, end) + tag = hiero_item.addTag(_tag) tag.setName(marker.name or marker_color_map[marker_color]) # Add metadata @@ -285,7 +286,7 @@ def create_track(otio_track, tracknum, track_kind): return track -def create_clip(otio_clip, tagsbin): +def create_clip(otio_clip): # Create MediaSource otio_media = otio_clip.media_reference if isinstance(otio_media, otio.schema.ExternalReference): @@ -300,13 +301,10 @@ def create_clip(otio_clip, tagsbin): # Create Clip clip = hiero.core.Clip(media) - # Add markers - add_markers(otio_clip, clip, tagsbin) - return clip -def create_trackitem(playhead, track, otio_clip, clip): +def create_trackitem(playhead, track, otio_clip, clip, tagsbin): source_range = otio_clip.source_range trackitem = track.createTrackItem(otio_clip.name) @@ -352,22 +350,44 @@ def create_trackitem(playhead, track, otio_clip, clip): trackitem.setTimelineIn(timeline_in) trackitem.setTimelineOut(timeline_out) + # Add markers + add_markers(otio_clip, trackitem, tagsbin) + return trackitem -def build_sequence(otio_timeline, project=None, track_kind=None): +def build_sequence( + otio_timeline, project=None, sequence=None, track_kind=None): + if project is None: - # TODO: Find a proper way for active project - project = hiero.core.projects(hiero.core.Project.kUserProjects)[-1] + if sequence: + project = sequence.project() - # Create a Sequence - sequence = hiero.core.Sequence(otio_timeline.name or 'OTIOSequence') + else: + # Per version 12.1v2 there is no way of getting active project + project = hiero.core.projects(hiero.core.Project.kUserProjects)[-1] - # Create a Bin to hold clips projectbin = project.clipsBin() - projectbin.addItem(hiero.core.BinItem(sequence)) - sequencebin = hiero.core.Bin(sequence.name()) - projectbin.addItem(sequencebin) + + if not sequence: + # Create a Sequence + sequence = hiero.core.Sequence(otio_timeline.name or 'OTIOSequence') + + # Set sequence settings from otio timeline if available + if hasattr(otio_timeline, 'global_start_time'): + if otio_timeline.global_start_time: + start_time = otio_timeline.global_start_time + sequence.setFramerate(start_time.rate) + sequence.setTimecodeStart(start_time.value) + + # Create a Bin to hold clips + projectbin.addItem(hiero.core.BinItem(sequence)) + + sequencebin = hiero.core.Bin(sequence.name()) + projectbin.addItem(sequencebin) + + else: + sequencebin = projectbin # Get tagsBin tagsbin = hiero.core.project("Tag Presets").tagsBin() @@ -375,13 +395,11 @@ def build_sequence(otio_timeline, project=None, track_kind=None): # Add timeline markers add_markers(otio_timeline, sequence, tagsbin) - # TODO: Set sequence settings from otio timeline if available if isinstance(otio_timeline, otio.schema.Timeline): tracks = otio_timeline.tracks else: - # otio.schema.Stack - tracks = otio_timeline + tracks = [otio_timeline] for tracknum, otio_track in enumerate(tracks): playhead = 0 @@ -403,7 +421,7 @@ def build_sequence(otio_timeline, project=None, track_kind=None): elif isinstance(otio_clip, otio.schema.Clip): # Create a Clip - clip = create_clip(otio_clip, tagsbin) + clip = create_clip(otio_clip) # Add Clip to a Bin sequencebin.addItem(hiero.core.BinItem(clip)) @@ -413,7 +431,8 @@ def build_sequence(otio_timeline, project=None, track_kind=None): playhead, track, otio_clip, - clip + clip, + tagsbin ) # Add trackitem to track