mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge remote-tracking branch 'origin/release/2.2.0' into bugfix/PYPE-436-fix-collect-assumed-destination
This commit is contained in:
commit
aeeb081421
226 changed files with 7074 additions and 3741 deletions
79
changelog.md
Normal file
79
changelog.md
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
# Pype changelog #
|
||||
Welcome to pype changelog
|
||||
|
||||
## 2.2.0 ##
|
||||
_release date: 8 Sept 2019_
|
||||
|
||||
**new**:
|
||||
- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts
|
||||
- _(nuke)_ option to choose deadline chunk size on write nodes
|
||||
- _(nukestudio)_ added option to publish soft effects (subTrackItems) from NukeStudio as subsets including LUT files. these can then be loaded in nuke or NukeStudio
|
||||
- _(nuke)_ option to build nuke script from previously published latest versions of plate and render subsets.
|
||||
- _(nuke)_ nuke writes now have deadline tab.
|
||||
- _(ftrack)_ Prepare Project action can now be used for creating the base folder structure on disk and in ftrack, setting up all the initial project attributes and it automatically prepares `pype_project_config` folder for the given project.
|
||||
- _(clockify)_ Added support for time tracking in clockify. This currently in addition to ftrack time logs, but does not completely replace them.
|
||||
- _(pype)_ any attributes in Creator and Loader plugins can now be customised using pype preset system
|
||||
|
||||
**changed**:
|
||||
- nukestudio now uses workio API for workfiles
|
||||
- _(maya)_ "FIX FPS" prompt in maya now appears in the middle of the screen
|
||||
- _(muster)_ can now be configured with custom templates
|
||||
- _(pype)_ global publishing plugins can now be configured using presets as well as host specific ones
|
||||
|
||||
|
||||
**fix**:
|
||||
- wrong version retrieval from path in certain scenarios
|
||||
- nuke reset resolution wasn't working in certain scenarios
|
||||
|
||||
## 2.1.0 ##
|
||||
_release date: 6 Aug 2019_
|
||||
|
||||
A large cleanup release. Most of the change are under the hood.
|
||||
|
||||
**new**:
|
||||
- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts
|
||||
- _(pype)_ Added configurable option to add burnins to any generated quicktimes
|
||||
- _(ftrack)_ Action that identifies what machines pype is running on.
|
||||
- _(system)_ unify subprocess calls
|
||||
- _(maya)_ add audio to review quicktimes
|
||||
- _(nuke)_ add crop before write node to prevent overscan problems in ffmpeg
|
||||
- **Nuke Studio** publishing and workfiles support
|
||||
- **Muster** render manager support
|
||||
- _(nuke)_ Framerange, FPS and Resolution are set automatically at startup
|
||||
- _(maya)_ Ability to load published sequences as image planes
|
||||
- _(system)_ Ftrack event that sets asset folder permissions based on task assignees in ftrack.
|
||||
- _(maya)_ Pyblish plugin that allow validation of maya attributes
|
||||
- _(system)_ added better startup logging to tray debug, including basic connection information
|
||||
- _(avalon)_ option to group published subsets to groups in the loader
|
||||
- _(avalon)_ loader family filters are working now
|
||||
|
||||
**changed**:
|
||||
- change multiple key attributes to unify their behaviour across the pipeline
|
||||
- `frameRate` to `fps`
|
||||
- `startFrame` to `frameStart`
|
||||
- `endFrame` to `frameEnd`
|
||||
- `fstart` to `frameStart`
|
||||
- `fend` to `frameEnd`
|
||||
- `handle_start` to `handleStart`
|
||||
- `handle_end` to `handleEnd`
|
||||
- `resolution_width` to `resolutionWidth`
|
||||
- `resolution_height` to `resolutionHeight`
|
||||
- `pixel_aspect` to `pixelAspect`
|
||||
|
||||
- _(nuke)_ write nodes are now created inside group with only some attributes editable by the artist
|
||||
- rendered frames are now deleted from temporary location after their publishing is finished.
|
||||
- _(ftrack)_ RV action can now be launched from any entity
|
||||
- after publishing only refresh button is now available in pyblish UI
|
||||
- added context instance pyblish-lite so that artist knows if context plugin fails
|
||||
- _(avalon)_ allow opening selected files using enter key
|
||||
- _(avalon)_ core updated to v5.2.9 with our forked changes on top
|
||||
|
||||
**fix**:
|
||||
- faster hierarchy retrieval from db
|
||||
- _(nuke)_ A lot of stability enhancements
|
||||
- _(nuke studio)_ A lot of stability enhancements
|
||||
- _(nuke)_ now only renders a single write node on farm
|
||||
- _(ftrack)_ pype would crash when launcher project level task
|
||||
- work directory was sometimes not being created correctly
|
||||
- major pype.lib cleanup. Removing of unused functions, merging those that were doing the same and general house cleaning.
|
||||
- _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner
|
||||
|
|
@ -3,10 +3,14 @@ import os
|
|||
from pyblish import api as pyblish
|
||||
from avalon import api as avalon
|
||||
from .lib import filter_pyblish_plugins
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__version__ = "2.1.0"
|
||||
|
||||
PACKAGE_DIR = os.path.dirname(__file__)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
|
|
@ -14,6 +18,51 @@ PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
|||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "global", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load")
|
||||
|
||||
# we are monkey patching `avalon.api.discover()` to allow us to load
|
||||
# plugin presets on plugins being discovered by avalon. Little bit of
|
||||
# hacking, but it allows us to add out own features without need
|
||||
# to modify upstream code.
|
||||
|
||||
_original_discover = avalon.discover
|
||||
|
||||
|
||||
def patched_discover(superclass):
|
||||
"""
|
||||
Monkey patched version of :func:`avalon.api.discover()`. It allows
|
||||
us to load presets on plugins being discovered.
|
||||
"""
|
||||
# run original discover and get plugins
|
||||
plugins = _original_discover(superclass)
|
||||
|
||||
# determine host application to use for finding presets
|
||||
host = avalon.registered_host().__name__.split(".")[-1]
|
||||
|
||||
# map plugin superclass to preset json. Currenly suppoted is load and
|
||||
# create (avalon.api.Loader and avalon.api.Creator)
|
||||
plugin_type = "undefined"
|
||||
if superclass.__name__.split(".")[-1] == "Loader":
|
||||
plugin_type = "load"
|
||||
elif superclass.__name__.split(".")[-1] == "Creator":
|
||||
plugin_type = "create"
|
||||
|
||||
print(">>> trying to find presets for {}:{} ...".format(host, plugin_type))
|
||||
try:
|
||||
config_data = config.get_presets()['plugins'][host][plugin_type]
|
||||
except KeyError:
|
||||
print("*** no presets found.")
|
||||
else:
|
||||
for plugin in plugins:
|
||||
if plugin.__name__ in config_data:
|
||||
print(">>> We have preset for {}".format(plugin.__name__))
|
||||
for option, value in config_data[plugin.__name__].items():
|
||||
if option == "enabled" and value is False:
|
||||
setattr(plugin, "active", False)
|
||||
print(" - is disabled by preset")
|
||||
else:
|
||||
setattr(plugin, option, value)
|
||||
print(" - setting `{}`: `{}`".format(option, value))
|
||||
return plugins
|
||||
|
||||
|
||||
def install():
|
||||
log.info("Registering global plug-ins..")
|
||||
|
|
@ -21,6 +70,9 @@ def install():
|
|||
pyblish.register_discovery_filter(filter_pyblish_plugins)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
|
||||
# apply monkey patched discover to original one
|
||||
avalon.discover = patched_discover
|
||||
|
||||
|
||||
def uninstall():
|
||||
log.info("Deregistering global plug-ins..")
|
||||
|
|
@ -28,3 +80,6 @@ def uninstall():
|
|||
pyblish.deregister_discovery_filter(filter_pyblish_plugins)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
log.info("Global plug-ins unregistred")
|
||||
|
||||
# restore original discover
|
||||
avalon.discover = _original_discover
|
||||
|
|
|
|||
46
pype/api.py
46
pype/api.py
|
|
@ -18,31 +18,20 @@ from .action import (
|
|||
|
||||
from pypeapp import Logger
|
||||
|
||||
|
||||
from .templates import (
|
||||
get_project_name,
|
||||
get_project_code,
|
||||
get_hierarchy,
|
||||
get_asset,
|
||||
get_task,
|
||||
set_avalon_workdir,
|
||||
get_version_from_path,
|
||||
get_workdir_template,
|
||||
set_hierarchy,
|
||||
set_project_code
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
version_up,
|
||||
get_handle_irregular,
|
||||
get_project_data,
|
||||
get_asset_data,
|
||||
get_asset,
|
||||
get_project,
|
||||
get_hierarchy,
|
||||
get_subsets,
|
||||
get_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment,
|
||||
get_data_hierarchical_attr,
|
||||
get_avalon_project_template
|
||||
add_tool_to_environment
|
||||
)
|
||||
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
from .lib import _subprocess as subprocess
|
||||
|
||||
__all__ = [
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
|
|
@ -54,28 +43,21 @@ __all__ = [
|
|||
# action
|
||||
"get_errored_instances_from_context",
|
||||
"RepairAction",
|
||||
"RepairContextAction",
|
||||
|
||||
"Logger",
|
||||
|
||||
"ValidationException",
|
||||
|
||||
# get contextual data
|
||||
"get_handle_irregular",
|
||||
"get_project_data",
|
||||
"get_asset_data",
|
||||
"get_project_name",
|
||||
"get_project_code",
|
||||
"version_up",
|
||||
"get_project",
|
||||
"get_hierarchy",
|
||||
"get_asset",
|
||||
"get_task",
|
||||
"set_avalon_workdir",
|
||||
"get_subsets",
|
||||
"get_version_from_path",
|
||||
"get_workdir_template",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
"set_hierarchy",
|
||||
"set_project_code",
|
||||
"get_data_hierarchical_attr",
|
||||
"get_avalon_project_template",
|
||||
|
||||
"subprocess"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from pyblish import api as pyblish
|
|||
from pypeapp import execute, Logger
|
||||
|
||||
from .. import api
|
||||
from .lib import set_avalon_workdir
|
||||
|
||||
log = Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
|
@ -33,7 +34,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
|
|||
|
||||
|
||||
def install():
|
||||
api.set_avalon_workdir()
|
||||
set_avalon_workdir()
|
||||
|
||||
log.info("Registering Aport plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
|
|||
|
|
@ -80,17 +80,23 @@ def publish(json_data_path, gui):
|
|||
|
||||
|
||||
@pico.expose()
|
||||
def context(project, asset, task, app):
|
||||
def context(project_name, asset, task, app):
|
||||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
io.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
|
||||
project_code = pype.get_project_code()
|
||||
pype.set_project_code(project_code)
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
pype.set_hierarchy(hierarchy)
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
|
|
|
|||
135
pype/aport/lib.py
Normal file
135
pype/aport/lib.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from avalon import io, api as avalon, lib as avalonlib
|
||||
from pype import lib
|
||||
from pype import api as pype
|
||||
# from pypeapp.api import (Templates, Logger, format)
|
||||
from pypeapp import Logger, Anatomy
|
||||
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
|
||||
|
||||
|
||||
def get_asset():
|
||||
"""
|
||||
Obtain Asset string from session or environment variable
|
||||
|
||||
Returns:
|
||||
string: asset name
|
||||
|
||||
Raises:
|
||||
log: error
|
||||
"""
|
||||
lib.set_io_database()
|
||||
asset = io.Session.get("AVALON_ASSET", None) \
|
||||
or os.getenv("AVALON_ASSET", None)
|
||||
log.info("asset: {}".format(asset))
|
||||
assert asset, log.error("missing `AVALON_ASSET`"
|
||||
"in avalon session "
|
||||
"or os.environ!")
|
||||
return asset
|
||||
|
||||
|
||||
def get_context_data(
|
||||
project_name=None, hierarchy=None, asset=None, task_name=None
|
||||
):
|
||||
"""
|
||||
Collect all main contextual data
|
||||
|
||||
Args:
|
||||
project (string, optional): project name
|
||||
hierarchy (string, optional): hierarchy path
|
||||
asset (string, optional): asset name
|
||||
task (string, optional): task name
|
||||
|
||||
Returns:
|
||||
dict: contextual data
|
||||
|
||||
"""
|
||||
if not task_name:
|
||||
lib.set_io_database()
|
||||
task_name = io.Session.get("AVALON_TASK", None) \
|
||||
or os.getenv("AVALON_TASK", None)
|
||||
assert task_name, log.error(
|
||||
"missing `AVALON_TASK` in avalon session or os.environ!"
|
||||
)
|
||||
|
||||
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
|
||||
os.environ['AVALON_PROJECT'] = project_name
|
||||
io.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
if not hierarchy:
|
||||
hierarchy = pype.get_hierarchy()
|
||||
|
||||
project_doc = io.find_one({"type": "project"})
|
||||
|
||||
data = {
|
||||
"task": task_name,
|
||||
"asset": asset or get_asset(),
|
||||
"project": {
|
||||
"name": project_doc["name"],
|
||||
"code": project_doc["data"].get("code", '')
|
||||
},
|
||||
"hierarchy": hierarchy,
|
||||
"app": application["application_dir"]
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def set_avalon_workdir(
|
||||
project=None, hierarchy=None, asset=None, task=None
|
||||
):
|
||||
"""
|
||||
Updates os.environ and session with filled workdir
|
||||
|
||||
Args:
|
||||
project (string, optional): project name
|
||||
hierarchy (string, optional): hierarchy path
|
||||
asset (string, optional): asset name
|
||||
task (string, optional): task name
|
||||
|
||||
Returns:
|
||||
os.environ[AVALON_WORKDIR]: workdir path
|
||||
avalon.session[AVALON_WORKDIR]: workdir path
|
||||
|
||||
"""
|
||||
|
||||
lib.set_io_database()
|
||||
awd = io.Session.get("AVALON_WORKDIR", None) or \
|
||||
os.getenv("AVALON_WORKDIR", None)
|
||||
|
||||
data = get_context_data(project, hierarchy, asset, task)
|
||||
|
||||
if (not awd) or ("{" not in awd):
|
||||
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
|
||||
awd = anatomy_filled["work"]["folder"]
|
||||
|
||||
awd_filled = os.path.normpath(format(awd, data))
|
||||
|
||||
io.Session["AVALON_WORKDIR"] = awd_filled
|
||||
os.environ["AVALON_WORKDIR"] = awd_filled
|
||||
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
|
||||
|
||||
|
||||
def get_workdir_template(data=None):
|
||||
"""
|
||||
Obtain workdir templated path from Anatomy()
|
||||
|
||||
Args:
|
||||
data (dict, optional): basic contextual data
|
||||
|
||||
Returns:
|
||||
string: template path
|
||||
"""
|
||||
|
||||
anatomy = Anatomy()
|
||||
anatomy_filled = anatomy.format(data or get_context_data())
|
||||
|
||||
try:
|
||||
work = anatomy_filled["work"]
|
||||
except Exception as e:
|
||||
log.error(
|
||||
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
|
||||
)
|
||||
|
||||
return work
|
||||
|
|
@ -82,13 +82,19 @@ def context(project, asset, task, app):
|
|||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
io.Session["AVALON_PROJECT"] = project
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
|
||||
project_code = pype.get_project_code()
|
||||
pype.set_project_code(project_code)
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
pype.set_hierarchy(hierarchy)
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
|
|
|
|||
|
|
@ -81,13 +81,19 @@ def context(project, asset, task, app):
|
|||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
io.Session["AVALON_PROJECT"] = project
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
|
||||
project_code = pype.get_project_code()
|
||||
pype.set_project_code(project_code)
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
pype.set_hierarchy(hierarchy)
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from .clockify_api import ClockifyAPI
|
||||
from .widget_settings import ClockifySettings
|
||||
from .widget_message import MessageWidget
|
||||
from .clockify import ClockifyModule
|
||||
|
||||
__all__ = [
|
||||
'ClockifyAPI',
|
||||
'ClockifySettings',
|
||||
'ClockifyModule'
|
||||
"ClockifyAPI",
|
||||
"ClockifySettings",
|
||||
"ClockifyModule",
|
||||
"MessageWidget"
|
||||
]
|
||||
|
||||
def tray_init(tray_widget, main_widget):
|
||||
return ClockifyModule(main_widget, tray_widget)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,19 @@
|
|||
import os
|
||||
import threading
|
||||
from pypeapp import style
|
||||
from pypeapp import style, Logger
|
||||
from Qt import QtWidgets
|
||||
from pype.clockify import ClockifySettings, ClockifyAPI
|
||||
from . import ClockifySettings, ClockifyAPI, MessageWidget
|
||||
|
||||
|
||||
class ClockifyModule:
|
||||
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.clockapi = ClockifyAPI()
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(main_parent, self)
|
||||
self.widget_settings_required = None
|
||||
|
||||
|
|
@ -20,9 +24,10 @@ class ClockifyModule:
|
|||
self.bool_workspace_set = False
|
||||
self.bool_timer_run = False
|
||||
|
||||
def start_up(self):
|
||||
self.clockapi.set_master(self)
|
||||
self.bool_api_key_set = self.clockapi.set_api()
|
||||
|
||||
def tray_start(self):
|
||||
if self.bool_api_key_set is False:
|
||||
self.show_settings()
|
||||
return
|
||||
|
|
@ -41,7 +46,7 @@ class ClockifyModule:
|
|||
os.path.dirname(__file__),
|
||||
'ftrack_actions'
|
||||
])
|
||||
current = os.environ('FTRACK_ACTIONS_PATH', '')
|
||||
current = os.environ.get('FTRACK_ACTIONS_PATH', '')
|
||||
if current:
|
||||
current += os.pathsep
|
||||
os.environ['FTRACK_ACTIONS_PATH'] = current + actions_path
|
||||
|
|
@ -57,6 +62,25 @@ class ClockifyModule:
|
|||
current += os.pathsep
|
||||
os.environ['AVALON_ACTIONS'] = current + actions_path
|
||||
|
||||
if 'TimersManager' in modules:
|
||||
self.timer_manager = modules['TimersManager']
|
||||
self.timer_manager.add_module(self)
|
||||
|
||||
def start_timer_manager(self, data):
|
||||
self.start_timer(data)
|
||||
|
||||
def stop_timer_manager(self):
|
||||
self.stop_timer()
|
||||
|
||||
def timer_started(self, data):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
self.timer_manager.start_timers(data)
|
||||
|
||||
def timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
if hasattr(self, 'timer_manager'):
|
||||
self.timer_manager.stop_timers()
|
||||
|
||||
def start_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is None:
|
||||
|
|
@ -75,21 +99,129 @@ class ClockifyModule:
|
|||
def check_running(self):
|
||||
import time
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
self.bool_timer_run = True
|
||||
else:
|
||||
self.bool_timer_run = False
|
||||
self.set_menu_visibility()
|
||||
bool_timer_run = True
|
||||
|
||||
if self.bool_timer_run != bool_timer_run:
|
||||
if self.bool_timer_run is True:
|
||||
self.timer_stopped()
|
||||
elif self.bool_timer_run is False:
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
if not actual_timer:
|
||||
continue
|
||||
|
||||
actual_proj_id = actual_timer["projectId"]
|
||||
if not actual_proj_id:
|
||||
continue
|
||||
|
||||
project = self.clockapi.get_project_by_id(actual_proj_id)
|
||||
if project and project.get("code") == 501:
|
||||
continue
|
||||
|
||||
project_name = project["name"]
|
||||
|
||||
actual_timer_hierarchy = actual_timer["description"]
|
||||
hierarchy_items = actual_timer_hierarchy.split("/")
|
||||
# Each pype timer must have at least 2 items!
|
||||
if len(hierarchy_items) < 2:
|
||||
continue
|
||||
task_name = hierarchy_items[-1]
|
||||
hierarchy = hierarchy_items[:-1]
|
||||
|
||||
task_type = None
|
||||
if len(actual_timer.get("tags", [])) > 0:
|
||||
task_type = actual_timer["tags"][0].get("name")
|
||||
data = {
|
||||
"task_name": task_name,
|
||||
"hierarchy": hierarchy,
|
||||
"project_name": project_name,
|
||||
"task_type": task_type
|
||||
}
|
||||
|
||||
self.timer_started(data)
|
||||
|
||||
self.bool_timer_run = bool_timer_run
|
||||
self.set_menu_visibility()
|
||||
time.sleep(5)
|
||||
|
||||
def stop_timer(self):
|
||||
self.clockapi.finish_time_entry()
|
||||
self.bool_timer_run = False
|
||||
if self.bool_timer_run:
|
||||
self.timer_stopped()
|
||||
|
||||
def signed_in(self):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def start_timer(self, input_data):
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockapi.get_api_key():
|
||||
return
|
||||
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
actual_timer_hierarchy = None
|
||||
actual_project_id = None
|
||||
if actual_timer is not None:
|
||||
actual_timer_hierarchy = actual_timer.get("description")
|
||||
actual_project_id = actual_timer.get("projectId")
|
||||
|
||||
# Concatenate hierarchy and task to get description
|
||||
desc_items = [val for val in input_data.get("hierarchy", [])]
|
||||
desc_items.append(input_data["task_name"])
|
||||
description = "/".join(desc_items)
|
||||
|
||||
# Check project existence
|
||||
project_name = input_data["project_name"]
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
if not project_id:
|
||||
self.log.warning((
|
||||
"Project \"{}\" was not found in Clockify. Timer won't start."
|
||||
).format(project_name))
|
||||
|
||||
msg = (
|
||||
"Project <b>\"{}\"</b> is not in Clockify Workspace <b>\"{}\"</b>."
|
||||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockapi.workspace))
|
||||
|
||||
self.message_widget = MessageWidget(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.on_message_widget_close)
|
||||
self.message_widget.show()
|
||||
|
||||
return
|
||||
|
||||
if (
|
||||
actual_timer is not None and
|
||||
description == actual_timer_hierarchy and
|
||||
project_id == actual_project_id
|
||||
):
|
||||
return
|
||||
|
||||
tag_ids = []
|
||||
task_tag_id = self.clockapi.get_tag_id(input_data["task_type"])
|
||||
if task_tag_id is not None:
|
||||
tag_ids.append(task_tag_id)
|
||||
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
def on_message_widget_close(self):
|
||||
self.message_widget = None
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent)
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
|
|
@ -109,7 +241,7 @@ class ClockifyModule:
|
|||
|
||||
self.set_menu_visibility()
|
||||
|
||||
return self.menu
|
||||
parent_menu.addMenu(self.menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import requests
|
||||
import json
|
||||
import datetime
|
||||
|
|
@ -22,7 +23,9 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
|
||||
file_name = 'clockify.json'
|
||||
fpath = os.path.join(app_dir, file_name)
|
||||
admin_permission_names = ['WORKSPACE_OWN', 'WORKSPACE_ADMIN']
|
||||
master_parent = None
|
||||
workspace = None
|
||||
workspace_id = None
|
||||
|
||||
def set_master(self, master_parent):
|
||||
|
|
@ -41,6 +44,8 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
if api_key is not None and self.validate_api_key(api_key) is True:
|
||||
self.headers["X-Api-Key"] = api_key
|
||||
self.set_workspace()
|
||||
if self.master_parent:
|
||||
self.master_parent.signed_in()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
@ -55,31 +60,41 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
return False
|
||||
return True
|
||||
|
||||
def validate_workspace_perm(self):
|
||||
test_project = '__test__'
|
||||
action_url = 'workspaces/{}/projects/'.format(self.workspace_id)
|
||||
body = {
|
||||
"name": test_project, "clientId": "", "isPublic": "false",
|
||||
"estimate": {"type": "AUTO"},
|
||||
"color": "#f44336", "billable": "true"
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers, json=body
|
||||
def validate_workspace_perm(self, workspace_id=None):
|
||||
user_id = self.get_user_id()
|
||||
if user_id is None:
|
||||
return False
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "/workspaces/{}/users/{}/permissions".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
if response.status_code == 201:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
else:
|
||||
projects = self.get_projects()
|
||||
if test_project in projects:
|
||||
try:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
except json.decoder.JSONDecodeError:
|
||||
return False
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
user_permissions = response.json()
|
||||
for perm in user_permissions:
|
||||
if perm['name'] in self.admin_permission_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_user_id(self):
|
||||
action_url = 'v1/user/'
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
# this regex is neccessary: UNICODE strings are crashing
|
||||
# during json serialization
|
||||
id_regex ='\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
|
||||
result = re.findall(id_regex, str(response.content))
|
||||
if len(result) != 1:
|
||||
# replace with log and better message?
|
||||
print('User ID was not found (this is a BUG!!!)')
|
||||
return None
|
||||
return json.loads('{'+result[0]+'}')['id']
|
||||
|
||||
def set_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
|
|
@ -147,6 +162,19 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
project["name"]: project["id"] for project in response.json()
|
||||
}
|
||||
|
||||
def get_project_by_id(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/'.format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
def get_tags(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
|
|
@ -279,6 +307,9 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
current = self.get_in_progress(workspace_id)
|
||||
if current is None:
|
||||
return
|
||||
|
||||
current_id = current["id"]
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, current_id
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
||||
class StartClockify(BaseAction):
|
||||
'''Starts timer on clockify.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'clockify.start.timer'
|
||||
#: Action label.
|
||||
label = 'Start timer'
|
||||
#: Action description.
|
||||
description = 'Starts timer on clockify'
|
||||
#: roles that are allowed to register this action
|
||||
icon = '{}/app_icons/clockify.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Clockify api
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
if entities[0].entity_type.lower() != 'task':
|
||||
return False
|
||||
if self.clockapi.workspace_id is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
task = entities[0]
|
||||
task_name = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(self.clockapi.get_tag_id(task_name))
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
StartClockify(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -17,10 +17,8 @@ class SyncClocify(BaseAction):
|
|||
label = 'Sync To Clockify'
|
||||
#: Action description.
|
||||
description = 'Synchronise data to Clockify workspace'
|
||||
#: priority
|
||||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
role_list = ["Pypeclub", "Administrator", "project Manager"]
|
||||
#: icon
|
||||
icon = '{}/app_icons/clockify-white.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
|
|
@ -28,16 +26,22 @@ class SyncClocify(BaseAction):
|
|||
#: CLockifyApi
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def register(self):
|
||||
def preregister(self):
|
||||
if self.clockapi.workspace_id is None:
|
||||
raise ValueError('Clockify Workspace or API key are not set!')
|
||||
return "Clockify Workspace or API key are not set!"
|
||||
|
||||
if self.clockapi.validate_workspace_perm() is False:
|
||||
raise MissingPermision('Clockify')
|
||||
super().register()
|
||||
|
||||
return True
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
|
|
|
|||
91
pype/clockify/widget_message.py
Normal file
91
pype/clockify/widget_message.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style
|
||||
|
||||
|
||||
class MessageWidget(QtWidgets.QWidget):
|
||||
|
||||
SIZE_W = 300
|
||||
SIZE_H = 130
|
||||
|
||||
closed = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent=None, messages=[], title="Message"):
|
||||
|
||||
super(MessageWidget, self).__init__()
|
||||
|
||||
self._parent = parent
|
||||
|
||||
# Icon
|
||||
if parent and hasattr(parent, 'icon'):
|
||||
self.setWindowIcon(parent.icon)
|
||||
else:
|
||||
from pypeapp.resources import get_resource
|
||||
self.setWindowIcon(QtGui.QIcon(get_resource('icon.png')))
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint |
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
# Font
|
||||
self.font = QtGui.QFont()
|
||||
self.font.setFamily("DejaVu Sans Condensed")
|
||||
self.font.setPointSize(9)
|
||||
self.font.setBold(True)
|
||||
self.font.setWeight(50)
|
||||
self.font.setKerning(True)
|
||||
|
||||
# Size setting
|
||||
self.resize(self.SIZE_W, self.SIZE_H)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
|
||||
|
||||
# Style
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self.setLayout(self._ui_layout(messages))
|
||||
self.setWindowTitle(title)
|
||||
|
||||
def _ui_layout(self, messages):
|
||||
if not messages:
|
||||
messages = ["*Misssing messages (This is a bug)*", ]
|
||||
|
||||
elif not isinstance(messages, (tuple, list)):
|
||||
messages = [messages, ]
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
|
||||
labels = []
|
||||
for message in messages:
|
||||
label = QtWidgets.QLabel(message)
|
||||
label.setFont(self.font)
|
||||
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
|
||||
label.setTextFormat(QtCore.Qt.RichText)
|
||||
label.setWordWrap(True)
|
||||
|
||||
labels.append(label)
|
||||
main_layout.addWidget(label)
|
||||
|
||||
btn_close = QtWidgets.QPushButton("Close")
|
||||
btn_close.setToolTip('Close this window')
|
||||
btn_close.clicked.connect(self.on_close_clicked)
|
||||
|
||||
btn_group = QtWidgets.QHBoxLayout()
|
||||
btn_group.addStretch(1)
|
||||
btn_group.addWidget(btn_close)
|
||||
|
||||
main_layout.addLayout(btn_group)
|
||||
|
||||
self.labels = labels
|
||||
self.btn_group = btn_group
|
||||
self.btn_close = btn_close
|
||||
self.main_layout = main_layout
|
||||
|
||||
return main_layout
|
||||
|
||||
def on_close_clicked(self):
|
||||
self.close()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
self.closed.emit()
|
||||
super(MessageWidget, self).close(*args, **kwargs)
|
||||
|
|
@ -4,21 +4,18 @@ import time
|
|||
from pype.ftrack import AppAction
|
||||
from avalon import lib
|
||||
from pypeapp import Logger
|
||||
from pype import lib as pypelib
|
||||
from pype.lib import get_all_avalon_projects
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def registerApp(app, session):
|
||||
def registerApp(app, session, plugins_presets):
|
||||
name = app['name']
|
||||
variant = ""
|
||||
try:
|
||||
variant = app['name'].split("_")[1]
|
||||
except Exception:
|
||||
log.warning((
|
||||
'"{0}" - App "name" and "variant" is not separated by "_"'
|
||||
' (variant is not set)'
|
||||
).format(app['name']))
|
||||
pass
|
||||
|
||||
abspath = lib.which_app(app['name'])
|
||||
if abspath is None:
|
||||
|
|
@ -44,29 +41,42 @@ def registerApp(app, session):
|
|||
# register action
|
||||
AppAction(
|
||||
session, label, name, executable, variant,
|
||||
icon, description, preactions
|
||||
icon, description, preactions, plugins_presets
|
||||
).register()
|
||||
|
||||
if not variant:
|
||||
log.info('- Variant is not set')
|
||||
|
||||
def register(session):
|
||||
projects = pypelib.get_all_avalon_projects()
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
# WARNING getting projects only helps to check connection to mongo
|
||||
# - without will `discover` of ftrack apps actions take ages
|
||||
result = get_all_avalon_projects()
|
||||
|
||||
apps = []
|
||||
appNames = []
|
||||
# Get all application from all projects
|
||||
for project in projects:
|
||||
for app in project['config']['apps']:
|
||||
if app['name'] not in appNames:
|
||||
appNames.append(app['name'])
|
||||
apps.append(app)
|
||||
|
||||
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
|
||||
for file in os.listdir(launchers_path):
|
||||
filename, ext = os.path.splitext(file)
|
||||
if ext.lower() != ".toml":
|
||||
continue
|
||||
loaded_data = toml.load(os.path.join(launchers_path, file))
|
||||
app_data = {
|
||||
"name": filename,
|
||||
"label": loaded_data.get("label", filename)
|
||||
}
|
||||
apps.append(app_data)
|
||||
|
||||
apps = sorted(apps, key=lambda x: x['name'])
|
||||
app_counter = 0
|
||||
for app in apps:
|
||||
try:
|
||||
registerApp(app, session)
|
||||
registerApp(app, session, plugins_presets)
|
||||
if app_counter%5 == 0:
|
||||
time.sleep(0.1)
|
||||
app_counter += 1
|
||||
except Exception as e:
|
||||
log.exception("'{0}' - not proper App ({1})".format(app['name'], e))
|
||||
except Exception as exc:
|
||||
log.exception(
|
||||
"\"{}\" - not a proper App ({})".format(app['name'], str(exc)),
|
||||
exc_info=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ class AssetDelete(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -87,7 +87,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AssetDelete(session).register()
|
||||
AssetDelete(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
286
pype/ftrack/actions/action_attributes_remapper.py
Normal file
286
pype/ftrack/actions/action_attributes_remapper.py
Normal file
|
|
@ -0,0 +1,286 @@
|
|||
import os
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AttributesRemapper(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'attributes.remapper'
|
||||
#: Action label.
|
||||
label = "Pype Doctor"
|
||||
variant = '- Attributes Remapper'
|
||||
#: Action description.
|
||||
description = 'Remaps attributes in avalon DB'
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator"]
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
db_con = DbConnector()
|
||||
keys_to_change = {
|
||||
"fstart": "frameStart",
|
||||
"startFrame": "frameStart",
|
||||
"edit_in": "frameStart",
|
||||
|
||||
"fend": "frameEnd",
|
||||
"endFrame": "frameEnd",
|
||||
"edit_out": "frameEnd",
|
||||
|
||||
"handle_start": "handleStart",
|
||||
"handle_end": "handleEnd",
|
||||
"handles": ["handleEnd", "handleStart"],
|
||||
|
||||
"frameRate": "fps",
|
||||
"framerate": "fps",
|
||||
"resolution_width": "resolutionWidth",
|
||||
"resolution_height": "resolutionHeight",
|
||||
"pixel_aspect": "pixelAspect"
|
||||
}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
title = 'Select Projects where attributes should be remapped'
|
||||
|
||||
items = []
|
||||
|
||||
selection_enum = {
|
||||
'label': 'Process type',
|
||||
'type': 'enumerator',
|
||||
'name': 'process_type',
|
||||
'data': [
|
||||
{
|
||||
'label': 'Selection',
|
||||
'value': 'selection'
|
||||
}, {
|
||||
'label': 'Inverted selection',
|
||||
'value': 'except'
|
||||
}
|
||||
],
|
||||
'value': 'selection'
|
||||
}
|
||||
selection_label = {
|
||||
'type': 'label',
|
||||
'value': (
|
||||
'Selection based variants:<br/>'
|
||||
'- `Selection` - '
|
||||
'NOTHING is processed when nothing is selected<br/>'
|
||||
'- `Inverted selection` - '
|
||||
'ALL Projects are processed when nothing is selected'
|
||||
)
|
||||
}
|
||||
|
||||
items.append(selection_enum)
|
||||
items.append(selection_label)
|
||||
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
|
||||
all_projects = session.query('Project').all()
|
||||
for project in all_projects:
|
||||
item_label = {
|
||||
'type': 'label',
|
||||
'value': '{} (<i>{}</i>)'.format(
|
||||
project['full_name'], project['name']
|
||||
)
|
||||
}
|
||||
item = {
|
||||
'name': project['id'],
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
if len(items) > 0:
|
||||
items.append(item_splitter)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
|
||||
if len(items) == 0:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found any projects'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
process_type = values.pop('process_type')
|
||||
|
||||
selection = True
|
||||
if process_type == 'except':
|
||||
selection = False
|
||||
|
||||
interface_messages = {}
|
||||
|
||||
projects_to_update = []
|
||||
for project_id, update_bool in values.items():
|
||||
if not update_bool and selection:
|
||||
continue
|
||||
|
||||
if update_bool and not selection:
|
||||
continue
|
||||
|
||||
project = session.query(
|
||||
'Project where id is "{}"'.format(project_id)
|
||||
).one()
|
||||
projects_to_update.append(project)
|
||||
|
||||
if not projects_to_update:
|
||||
self.log.debug('Nothing to update')
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing to update'
|
||||
}
|
||||
|
||||
|
||||
self.db_con.install()
|
||||
|
||||
relevant_types = ["project", "asset", "version"]
|
||||
|
||||
for ft_project in projects_to_update:
|
||||
self.log.debug(
|
||||
"Processing project \"{}\"".format(ft_project["full_name"])
|
||||
)
|
||||
|
||||
self.db_con.Session["AVALON_PROJECT"] = ft_project["full_name"]
|
||||
project = self.db_con.find_one({'type': 'project'})
|
||||
if not project:
|
||||
key = "Projects not synchronized to db"
|
||||
if key not in interface_messages:
|
||||
interface_messages[key] = []
|
||||
interface_messages[key].append(ft_project["full_name"])
|
||||
continue
|
||||
|
||||
# Get all entities in project collection from MongoDB
|
||||
_entities = self.db_con.find({})
|
||||
for _entity in _entities:
|
||||
ent_t = _entity.get("type", "*unknown type")
|
||||
name = _entity.get("name", "*unknown name")
|
||||
|
||||
self.log.debug(
|
||||
"- {} ({})".format(name, ent_t)
|
||||
)
|
||||
|
||||
# Skip types that do not store keys to change
|
||||
if ent_t.lower() not in relevant_types:
|
||||
self.log.debug("-- skipping - type is not relevant")
|
||||
continue
|
||||
|
||||
# Get data which will change
|
||||
updating_data = {}
|
||||
source_data = _entity["data"]
|
||||
|
||||
for key_from, key_to in self.keys_to_change.items():
|
||||
# continue if final key already exists
|
||||
if type(key_to) == list:
|
||||
for key in key_to:
|
||||
# continue if final key was set in update_data
|
||||
if key in updating_data:
|
||||
continue
|
||||
|
||||
# continue if source key not exist or value is None
|
||||
value = source_data.get(key_from)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"-- changing key {} to {}".format(
|
||||
key_from,
|
||||
key
|
||||
)
|
||||
)
|
||||
|
||||
updating_data[key] = value
|
||||
else:
|
||||
if key_to in source_data:
|
||||
continue
|
||||
|
||||
# continue if final key was set in update_data
|
||||
if key_to in updating_data:
|
||||
continue
|
||||
|
||||
# continue if source key not exist or value is None
|
||||
value = source_data.get(key_from)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"-- changing key {} to {}".format(key_from, key_to)
|
||||
)
|
||||
updating_data[key_to] = value
|
||||
|
||||
# Pop out old keys from entity
|
||||
is_obsolete = False
|
||||
for key in self.keys_to_change:
|
||||
if key not in source_data:
|
||||
continue
|
||||
is_obsolete = True
|
||||
source_data.pop(key)
|
||||
|
||||
# continue if there is nothing to change
|
||||
if not is_obsolete and not updating_data:
|
||||
self.log.debug("-- nothing to change")
|
||||
continue
|
||||
|
||||
source_data.update(updating_data)
|
||||
|
||||
self.db_con.update_many(
|
||||
{"_id": _entity["_id"]},
|
||||
{"$set": {"data": source_data}}
|
||||
)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
if interface_messages:
|
||||
self.show_interface_from_dict(
|
||||
messages=interface_messages,
|
||||
title="Errors during remapping attributes",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def show_interface_from_dict(self, event, messages, title=""):
|
||||
items = []
|
||||
|
||||
for key, value in messages.items():
|
||||
if not value:
|
||||
continue
|
||||
subtitle = {'type': 'label', 'value': '# {}'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
message = {
|
||||
'type': 'label', 'value': '<p>{}</p>'.format(item)
|
||||
}
|
||||
items.append(message)
|
||||
else:
|
||||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||
items.append(message)
|
||||
|
||||
self.show_interface(items=items, title=title, event=event)
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AttributesRemapper(session, plugins_presets).register()
|
||||
|
|
@ -53,12 +53,12 @@ class ClientReviewSort(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = ClientReviewSort(session)
|
||||
action_handler = ClientReviewSort(session, plugins_presets)
|
||||
action_handler.register()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class ComponentOpen(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -74,7 +74,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ComponentOpen(session).register()
|
||||
ComponentOpen(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import logging
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, get_ca_mongoid
|
||||
from pypeapp import config
|
||||
from ftrack_api.exception import NoResultFoundError
|
||||
|
||||
"""
|
||||
This action creates/updates custom attributes.
|
||||
|
|
@ -109,27 +110,21 @@ class CustomAttributes(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'create.update.attributes'
|
||||
#: Action label.
|
||||
label = 'Create/Update Avalon Attributes'
|
||||
label = "Pype Admin"
|
||||
variant = '- Create/Update Avalon Attributes'
|
||||
#: Action description.
|
||||
description = 'Creates Avalon/Mongo ID for double check'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/CustomAttributes.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
def __init__(self, session):
|
||||
super().__init__(session)
|
||||
|
||||
self.types = {}
|
||||
self.object_type_ids = {}
|
||||
self.groups = {}
|
||||
self.security_roles = {}
|
||||
self.required_keys = ['key', 'label', 'type']
|
||||
self.type_posibilities = [
|
||||
'text', 'boolean', 'date', 'enumerator',
|
||||
'dynamic enumerator', 'number'
|
||||
]
|
||||
required_keys = ['key', 'label', 'type']
|
||||
type_posibilities = [
|
||||
'text', 'boolean', 'date', 'enumerator',
|
||||
'dynamic enumerator', 'number'
|
||||
]
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
'''
|
||||
|
|
@ -139,8 +134,12 @@ class CustomAttributes(BaseAction):
|
|||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# JOB SETTINGS
|
||||
self.types = {}
|
||||
self.object_type_ids = {}
|
||||
self.groups = {}
|
||||
self.security_roles = {}
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
|
|
@ -159,11 +158,14 @@ class CustomAttributes(BaseAction):
|
|||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
session.rollback()
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
self.log.error('Creating custom attributes failed ({})'.format(e))
|
||||
self.log.error(
|
||||
'Creating custom attributes failed ({})'.format(exc),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -226,24 +228,30 @@ class CustomAttributes(BaseAction):
|
|||
def custom_attributes_from_file(self, session, event):
|
||||
presets = config.get_presets()['ftrack']['ftrack_custom_attributes']
|
||||
|
||||
for cust_attr_name in presets:
|
||||
for cust_attr_data in presets:
|
||||
cust_attr_name = cust_attr_data.get(
|
||||
'label',
|
||||
cust_attr_data.get('key')
|
||||
)
|
||||
try:
|
||||
data = {}
|
||||
cust_attr = presets[cust_attr_name]
|
||||
# Get key, label, type
|
||||
data.update(self.get_required(cust_attr))
|
||||
data.update(self.get_required(cust_attr_data))
|
||||
# Get hierachical/ entity_type/ object_id
|
||||
data.update(self.get_entity_type(cust_attr))
|
||||
data.update(self.get_entity_type(cust_attr_data))
|
||||
# Get group, default, security roles
|
||||
data.update(self.get_optional(cust_attr))
|
||||
data.update(self.get_optional(cust_attr_data))
|
||||
# Process data
|
||||
self.process_attribute(data)
|
||||
|
||||
except CustAttrException as cae:
|
||||
msg = 'Custom attribute error "{}" - {}'.format(
|
||||
cust_attr_name, str(cae)
|
||||
)
|
||||
self.log.warning(msg)
|
||||
if cust_attr_name:
|
||||
msg = 'Custom attribute error "{}" - {}'.format(
|
||||
cust_attr_name, str(cae)
|
||||
)
|
||||
else:
|
||||
msg = 'Custom attribute error - {}'.format(str(cae))
|
||||
self.log.warning(msg, exc_info=True)
|
||||
self.show_message(event, msg)
|
||||
|
||||
return True
|
||||
|
|
@ -422,9 +430,10 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
def get_security_role(self, security_roles):
|
||||
roles = []
|
||||
if len(security_roles) == 0 or security_roles[0] == 'ALL':
|
||||
security_roles_lowered = [role.lower() for role in security_roles]
|
||||
if len(security_roles) == 0 or 'all' in security_roles_lowered:
|
||||
roles = self.get_role_ALL()
|
||||
elif security_roles[0] == 'except':
|
||||
elif security_roles_lowered[0] == 'except':
|
||||
excepts = security_roles[1:]
|
||||
all = self.get_role_ALL()
|
||||
for role in all:
|
||||
|
|
@ -443,10 +452,10 @@ class CustomAttributes(BaseAction):
|
|||
role = self.session.query(query).one()
|
||||
self.security_roles[role_name] = role
|
||||
roles.append(role)
|
||||
except Exception:
|
||||
raise CustAttrException(
|
||||
'Securit role "{}" does not exist'.format(role_name)
|
||||
)
|
||||
except NoResultFoundError:
|
||||
raise CustAttrException((
|
||||
'Securit role "{}" does not exist'
|
||||
).format(role_name))
|
||||
|
||||
return roles
|
||||
|
||||
|
|
@ -560,7 +569,7 @@ class CustomAttributes(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -569,7 +578,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CustomAttributes(session).register()
|
||||
CustomAttributes(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import re
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import lib as avalonlib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from pypeapp import config, Anatomy
|
||||
|
||||
|
||||
|
|
@ -30,11 +30,13 @@ class CreateFolders(BaseAction):
|
|||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
not_allowed = ['assetversion']
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
not_allowed = ['assetversion', 'project']
|
||||
if entities[0].entity_type.lower() in not_allowed:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
|
|
@ -322,13 +324,13 @@ class PartialDict(dict):
|
|||
return '{'+key+'}'
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CreateFolders(session).register()
|
||||
CreateFolders(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -13,9 +13,9 @@ class CreateProjectFolders(BaseAction):
|
|||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'create.project.folders'
|
||||
identifier = 'create.project.structure'
|
||||
#: Action label.
|
||||
label = 'Create Project Folders'
|
||||
label = 'Create Project Structure'
|
||||
#: Action description.
|
||||
description = 'Creates folder structure'
|
||||
#: roles that are allowed to register this action
|
||||
|
|
@ -31,6 +31,11 @@ class CreateProjectFolders(BaseAction):
|
|||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -190,13 +195,13 @@ class CreateProjectFolders(BaseAction):
|
|||
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CreateProjectFolders(session).register()
|
||||
CreateProjectFolders(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
@ -12,14 +12,15 @@ class CustomAttributeDoctor(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'custom.attributes.doctor'
|
||||
#: Action label.
|
||||
label = 'Custom Attributes Doctor'
|
||||
label = "Pype Doctor"
|
||||
variant = '- Custom Attributes Doctor'
|
||||
#: Action description.
|
||||
description = (
|
||||
'Fix hierarchical custom attributes mainly handles, fstart'
|
||||
' and fend'
|
||||
)
|
||||
|
||||
icon = '{}/ftrack/action_icons/TestAction.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend']
|
||||
|
|
@ -286,13 +287,13 @@ class CustomAttributeDoctor(BaseAction):
|
|||
return all_roles
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CustomAttributeDoctor(session).register()
|
||||
CustomAttributeDoctor(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from bson.objectid import ObjectId
|
|||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteAsset(BaseAction):
|
||||
|
|
@ -311,7 +311,7 @@ class DeleteAsset(BaseAction):
|
|||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -320,7 +320,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DeleteAsset(session).register()
|
||||
DeleteAsset(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AssetsRemover(BaseAction):
|
||||
|
|
@ -13,12 +13,13 @@ class AssetsRemover(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'remove.assets'
|
||||
#: Action label.
|
||||
label = 'Delete Assets by Name'
|
||||
label = "Pype Admin"
|
||||
variant = '- Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/AssetsRemover.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Db
|
||||
|
|
@ -131,7 +132,7 @@ class AssetsRemover(BaseAction):
|
|||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -140,7 +141,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AssetsRemover(session).register()
|
||||
AssetsRemover(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class VersionsCleanup(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -51,7 +51,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
VersionsCleanup(session).register()
|
||||
VersionsCleanup(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class DJVViewAction(BaseAction):
|
|||
)
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
self.djv_path = None
|
||||
|
||||
self.config_data = config.get_presets()['djv_view']['config']
|
||||
|
|
@ -218,12 +218,12 @@ class DJVViewAction(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register hooks."""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DJVViewAction(session).register()
|
||||
DJVViewAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -14,12 +14,13 @@ class JobKiller(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'job.killer'
|
||||
#: Action label.
|
||||
label = 'Job Killer'
|
||||
label = "Pype Admin"
|
||||
variant = '- Job Killer'
|
||||
#: Action description.
|
||||
description = 'Killing selected running jobs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/JobKiller.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -117,7 +118,7 @@ class JobKiller(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -126,7 +127,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
JobKiller(session).register()
|
||||
JobKiller(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -112,13 +112,13 @@ class MultipleNotes(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
MultipleNotes(session).register()
|
||||
MultipleNotes(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
378
pype/ftrack/actions/action_prepare_project.py
Normal file
378
pype/ftrack/actions/action_prepare_project.py
Normal file
|
|
@ -0,0 +1,378 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from ruamel import yaml
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
from pype.ftrack.lib import get_avalon_attr
|
||||
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'prepare.project'
|
||||
#: Action label.
|
||||
label = 'Prepare Project'
|
||||
#: Action description.
|
||||
description = 'Set basic attributes on the project'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = '{}/ftrack/action_icons/PrepareProject.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
# Key to store info about trigerring create folder structure
|
||||
create_project_structure_key = "create_folder_structure"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
# Inform user that this may take a while
|
||||
self.show_message(event, "Preparing data... Please wait", True)
|
||||
|
||||
self.log.debug("Loading custom attributes")
|
||||
cust_attrs, hier_cust_attrs = get_avalon_attr(session, True)
|
||||
project_defaults = config.get_presets(
|
||||
entities[0]["full_name"]
|
||||
).get("ftrack", {}).get("project_defaults", {})
|
||||
|
||||
self.log.debug("Preparing data which will be shown")
|
||||
attributes_to_set = {}
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
}
|
||||
|
||||
for attr in cust_attrs:
|
||||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
}
|
||||
|
||||
# Sort by label
|
||||
attributes_to_set = dict(sorted(
|
||||
attributes_to_set.items(),
|
||||
key=lambda x: x[1]["label"]
|
||||
))
|
||||
self.log.debug("Preparing interface for keys: \"{}\"".format(
|
||||
str([key for key in attributes_to_set])
|
||||
))
|
||||
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
title = "Prepare Project"
|
||||
items = []
|
||||
|
||||
# Ask if want to trigger Action Create Folder Structure
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Want to create basic Folder Structure?</h3>"
|
||||
})
|
||||
|
||||
items.append({
|
||||
"name": self.create_project_structure_key,
|
||||
"type": "boolean",
|
||||
"value": False,
|
||||
"label": "Check if Yes"
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Set basic Attributes:</h3>"
|
||||
})
|
||||
|
||||
multiselect_enumerators = []
|
||||
|
||||
# This item will be last (before enumerators)
|
||||
# - sets value of auto synchronization
|
||||
auto_sync_name = "avalon_auto_sync"
|
||||
auto_sync_item = {
|
||||
"name": auto_sync_name,
|
||||
"type": "boolean",
|
||||
"value": project_defaults.get(auto_sync_name, False),
|
||||
"label": "AutoSync to Avalon"
|
||||
}
|
||||
|
||||
for key, in_data in attributes_to_set.items():
|
||||
attr = in_data["object"]
|
||||
|
||||
# initial item definition
|
||||
item = {
|
||||
"name": key,
|
||||
"label": in_data["label"]
|
||||
}
|
||||
|
||||
# cust attr type - may have different visualization
|
||||
type_name = attr["type"]["name"].lower()
|
||||
easy_types = ["text", "boolean", "date", "number"]
|
||||
|
||||
easy_type = False
|
||||
if type_name in easy_types:
|
||||
easy_type = True
|
||||
|
||||
elif type_name == "enumerator":
|
||||
|
||||
attr_config = json.loads(attr["config"])
|
||||
attr_config_data = json.loads(attr_config["data"])
|
||||
|
||||
if attr_config["multiSelect"] is True:
|
||||
multiselect_enumerators.append(item_splitter)
|
||||
|
||||
multiselect_enumerators.append({
|
||||
"type": "label",
|
||||
"value": in_data["label"]
|
||||
})
|
||||
|
||||
default = in_data["default"]
|
||||
names = []
|
||||
for option in sorted(
|
||||
attr_config_data, key=lambda x: x["menu"]
|
||||
):
|
||||
name = option["value"]
|
||||
new_name = "__{}__{}".format(key, name)
|
||||
names.append(new_name)
|
||||
item = {
|
||||
"name": new_name,
|
||||
"type": "boolean",
|
||||
"label": "- {}".format(option["menu"])
|
||||
}
|
||||
if default:
|
||||
if (
|
||||
isinstance(default, list) or
|
||||
isinstance(default, tuple)
|
||||
):
|
||||
if name in default:
|
||||
item["value"] = True
|
||||
else:
|
||||
if name == default:
|
||||
item["value"] = True
|
||||
|
||||
multiselect_enumerators.append(item)
|
||||
|
||||
multiselect_enumerators.append({
|
||||
"type": "hidden",
|
||||
"name": "__hidden__{}".format(key),
|
||||
"value": json.dumps(names)
|
||||
})
|
||||
else:
|
||||
easy_type = True
|
||||
item["data"] = attr_config_data
|
||||
|
||||
else:
|
||||
self.log.warning((
|
||||
"Custom attribute \"{}\" has type \"{}\"."
|
||||
" I don't know how to handle"
|
||||
).format(key, type_name))
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"!!! Can't handle Custom attritubte type \"{}\""
|
||||
" (key: \"{}\")"
|
||||
).format(type_name, key)
|
||||
})
|
||||
|
||||
if easy_type:
|
||||
item["type"] = type_name
|
||||
|
||||
# default value in interface
|
||||
default = in_data["default"]
|
||||
if default is not None:
|
||||
item["value"] = default
|
||||
|
||||
items.append(item)
|
||||
|
||||
# Add autosync attribute
|
||||
items.append(auto_sync_item)
|
||||
|
||||
# Add enumerator items at the end
|
||||
for item in multiselect_enumerators:
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
in_data = event['data']['values']
|
||||
|
||||
# pop out info about creating project structure
|
||||
create_proj_struct = in_data.pop(self.create_project_structure_key)
|
||||
|
||||
# Find hidden items for multiselect enumerators
|
||||
keys_to_process = []
|
||||
for key in in_data:
|
||||
if key.startswith("__hidden__"):
|
||||
keys_to_process.append(key)
|
||||
|
||||
self.log.debug("Preparing data for Multiselect Enumerators")
|
||||
enumerators = {}
|
||||
for key in keys_to_process:
|
||||
new_key = key.replace("__hidden__", "")
|
||||
enumerator_items = in_data.pop(key)
|
||||
enumerators[new_key] = json.loads(enumerator_items)
|
||||
|
||||
# find values set for multiselect enumerator
|
||||
for key, enumerator_items in enumerators.items():
|
||||
in_data[key] = []
|
||||
|
||||
name = "__{}__".format(key)
|
||||
|
||||
for item in enumerator_items:
|
||||
value = in_data.pop(item)
|
||||
if value is True:
|
||||
new_key = item.replace(name, "")
|
||||
in_data[key].append(new_key)
|
||||
|
||||
self.log.debug("Setting Custom Attribute values:")
|
||||
entity = entities[0]
|
||||
for key, value in in_data.items():
|
||||
entity["custom_attributes"][key] = value
|
||||
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
|
||||
|
||||
session.commit()
|
||||
|
||||
# Create project structure
|
||||
self.create_project_specific_config(entities[0]["full_name"], in_data)
|
||||
|
||||
# Trigger Create Project Structure action
|
||||
if create_proj_struct is True:
|
||||
self.trigger_action("create.project.structure", event)
|
||||
|
||||
return True
|
||||
|
||||
def create_project_specific_config(self, project_name, json_data):
|
||||
self.log.debug("*** Creating project specifig configs ***")
|
||||
|
||||
path_proj_configs = os.environ.get('PYPE_PROJECT_CONFIGS', "")
|
||||
|
||||
# Skip if PYPE_PROJECT_CONFIGS is not set
|
||||
# TODO show user OS message
|
||||
if not path_proj_configs:
|
||||
self.log.warning((
|
||||
"Environment variable \"PYPE_PROJECT_CONFIGS\" is not set."
|
||||
" Project specific config can't be set."
|
||||
))
|
||||
return
|
||||
|
||||
path_proj_configs = os.path.normpath(path_proj_configs)
|
||||
# Skip if path does not exist
|
||||
# TODO create if not exist?!!!
|
||||
if not os.path.exists(path_proj_configs):
|
||||
self.log.warning((
|
||||
"Path set in Environment variable \"PYPE_PROJECT_CONFIGS\""
|
||||
" Does not exist."
|
||||
))
|
||||
return
|
||||
|
||||
project_specific_path = os.path.normpath(
|
||||
os.path.join(path_proj_configs, project_name)
|
||||
)
|
||||
if not os.path.exists(project_specific_path):
|
||||
os.makedirs(project_specific_path)
|
||||
self.log.debug((
|
||||
"Project specific config folder for project \"{}\" created."
|
||||
).format(project_name))
|
||||
|
||||
# Anatomy ####################################
|
||||
self.log.debug("--- Processing Anatomy Begins: ---")
|
||||
|
||||
anatomy_dir = os.path.normpath(os.path.join(
|
||||
project_specific_path, "anatomy"
|
||||
))
|
||||
anatomy_path = os.path.normpath(os.path.join(
|
||||
anatomy_dir, "default.yaml"
|
||||
))
|
||||
|
||||
anatomy = None
|
||||
if os.path.exists(anatomy_path):
|
||||
self.log.debug(
|
||||
"Anatomy file already exist. Trying to read: \"{}\"".format(
|
||||
anatomy_path
|
||||
)
|
||||
)
|
||||
# Try to load data
|
||||
with open(anatomy_path, 'r') as file_stream:
|
||||
try:
|
||||
anatomy = yaml.load(file_stream, Loader=yaml.loader.Loader)
|
||||
self.log.debug("Reading Anatomy file was successful")
|
||||
except yaml.YAMLError as exc:
|
||||
self.log.warning(
|
||||
"Reading Yaml file failed: \"{}\"".format(anatomy_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not anatomy:
|
||||
self.log.debug("Anatomy is not set. Duplicating default.")
|
||||
# Create Anatomy folder
|
||||
if not os.path.exists(anatomy_dir):
|
||||
self.log.debug(
|
||||
"Creating Anatomy folder: \"{}\"".format(anatomy_dir)
|
||||
)
|
||||
os.makedirs(anatomy_dir)
|
||||
|
||||
source_items = [
|
||||
os.environ["PYPE_CONFIG"], "anatomy", "default.yaml"
|
||||
]
|
||||
|
||||
source_path = os.path.normpath(os.path.join(*source_items))
|
||||
with open(source_path, 'r') as file_stream:
|
||||
source_data = file_stream.read()
|
||||
|
||||
with open(anatomy_path, 'w') as file_stream:
|
||||
file_stream.write(source_data)
|
||||
|
||||
# Presets ####################################
|
||||
self.log.debug("--- Processing Presets Begins: ---")
|
||||
|
||||
project_defaults_dir = os.path.normpath(os.path.join(*[
|
||||
project_specific_path, "presets", "ftrack"
|
||||
]))
|
||||
project_defaults_path = os.path.normpath(os.path.join(*[
|
||||
project_defaults_dir, "project_defaults.json"
|
||||
]))
|
||||
# Create folder if not exist
|
||||
if not os.path.exists(project_defaults_dir):
|
||||
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
|
||||
project_defaults_dir
|
||||
))
|
||||
os.makedirs(project_defaults_dir)
|
||||
|
||||
with open(project_defaults_path, 'w') as file_stream:
|
||||
json.dump(json_data, file_stream, indent=4)
|
||||
|
||||
self.log.debug("*** Creating project specifig configs Finished ***")
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
PrepareProject(session, plugins_presets).register()
|
||||
|
|
@ -23,13 +23,13 @@ class RVAction(BaseAction):
|
|||
)
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
""" Constructor
|
||||
|
||||
:param session: ftrack Session
|
||||
:type session: :class:`ftrack_api.Session`
|
||||
"""
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
self.rv_path = None
|
||||
self.config_data = None
|
||||
|
||||
|
|
@ -326,12 +326,12 @@ class RVAction(BaseAction):
|
|||
return paths
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register hooks."""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
RVAction(session).register()
|
||||
RVAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class SetVersion(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -80,7 +80,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SetVersion(session).register()
|
||||
SetVersion(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -19,61 +19,25 @@ class StartTimer(BaseAction):
|
|||
entity = entities[0]
|
||||
if entity.entity_type.lower() != 'task':
|
||||
return
|
||||
self.start_ftrack_timer(entity)
|
||||
try:
|
||||
self.start_clockify_timer(entity)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
'Failed starting Clockify timer for task: ' + entity['name']
|
||||
)
|
||||
|
||||
user = self.session.query(
|
||||
"User where username is \"{}\"".format(self.session.api_user)
|
||||
).one()
|
||||
|
||||
user.start_timer(entity, force=True)
|
||||
self.session.commit()
|
||||
|
||||
self.log.info(
|
||||
"Starting Ftrack timer for task: {}".format(entity['name'])
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def start_ftrack_timer(self, task):
|
||||
user_query = 'User where username is "{}"'.format(self.session.api_user)
|
||||
user = self.session.query(user_query).one()
|
||||
self.log.info('Starting Ftrack timer for task: ' + task['name'])
|
||||
user.start_timer(task, force=True)
|
||||
self.session.commit()
|
||||
|
||||
def start_clockify_timer(self, task):
|
||||
# Validate Clockify settings if Clockify is required
|
||||
clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
if clockify_timer is None:
|
||||
return
|
||||
|
||||
from pype.clockify import ClockifyAPI
|
||||
clockapi = ClockifyAPI()
|
||||
if clockapi.verify_api() is False:
|
||||
return
|
||||
task_type = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
|
||||
project_id = clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(clockapi.get_tag_id(task_type))
|
||||
clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
self.log.info('Starting Clockify timer for task: ' + task['name'])
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
StartTimer(session).register()
|
||||
StartTimer(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import collections
|
|||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
|
|
@ -19,16 +19,17 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs.local'
|
||||
#: Action label.
|
||||
label = 'Sync HierAttrs - Local'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Local)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Administrator']
|
||||
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -41,6 +42,7 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
|
@ -53,13 +55,27 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = session.query(
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
|
@ -68,6 +84,12 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
|
|
@ -76,6 +98,10 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
|
|
@ -93,28 +119,61 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
for entity in entities:
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
self.log.debug(
|
||||
'Hierachical attribute "{}" not found on "{}"'.format(
|
||||
key, entity.get('name', entity)
|
||||
)
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
self.log.warning(
|
||||
'Hierarchical attribute "{}" not set on "{}"'.format(
|
||||
key, entity.get('name', entity)
|
||||
)
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
|
|
@ -127,6 +186,11 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
if self.interface_messages:
|
||||
title = "Errors during SyncHierarchicalAttrs"
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages, title=title, event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -146,6 +210,27 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
|
|
@ -153,24 +238,49 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
|
||||
entity.get('name', entity)
|
||||
))
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
|
||||
entity.get('name', entity)
|
||||
))
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'Entity "{}" is not synchronized to avalon.'.format(
|
||||
entity.get('name', entity)
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
|
@ -188,17 +298,21 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
'-- stored value "{}"'.format(value)
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -47,11 +47,12 @@ class SyncToAvalon(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon.local'
|
||||
#: Action label.
|
||||
label = 'SyncToAvalon - Local'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync To Avalon (Local)'
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/SyncToAvalon-local.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
|
|
@ -59,7 +60,7 @@ class SyncToAvalon(BaseAction):
|
|||
#: Action priority
|
||||
priority = 200
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
super(SyncToAvalon, self).__init__(session)
|
||||
# reload utils on initialize (in case of server restart)
|
||||
|
||||
|
|
@ -177,17 +178,7 @@ class SyncToAvalon(BaseAction):
|
|||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier='sync.hierarchical.attrs.local',
|
||||
selection=event['data']['selection']
|
||||
),
|
||||
source=dict(
|
||||
user=event['source']['user']
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
self.trigger_action("sync.hierarchical.attrs.local", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
|
|
@ -212,7 +203,7 @@ class SyncToAvalon(BaseAction):
|
|||
self.add_childs_to_importable(child)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -221,7 +212,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncToAvalon(session).register()
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -11,12 +11,10 @@ from pype.ftrack import BaseAction
|
|||
from avalon import io, inventory, schema
|
||||
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
||||
class TestAction(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
identifier = 'test.action'
|
||||
#: Action label.
|
||||
|
|
@ -42,13 +40,13 @@ class TestAction(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
TestAction(session).register()
|
||||
TestAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -14,9 +14,11 @@ class ThumbToChildren(BaseAction):
|
|||
# Action identifier
|
||||
identifier = 'thumb.to.children'
|
||||
# Action label
|
||||
label = 'Thumbnail to Children'
|
||||
label = 'Thumbnail'
|
||||
# Action variant
|
||||
variant = " to Children"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/thumbToChildren.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -64,12 +66,12 @@ class ThumbToChildren(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbToChildren(session).register()
|
||||
ThumbToChildren(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
@ -13,9 +13,11 @@ class ThumbToParent(BaseAction):
|
|||
# Action identifier
|
||||
identifier = 'thumb.to.parent'
|
||||
# Action label
|
||||
label = 'Thumbnail to Parent'
|
||||
label = 'Thumbnail'
|
||||
# Action variant
|
||||
variant = " to Parent"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/thumbToParent.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -86,12 +88,12 @@ class ThumbToParent(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbToParent(session).register()
|
||||
ThumbToParent(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
46
pype/ftrack/actions/action_where_run_ask.py
Normal file
46
pype/ftrack/actions/action_where_run_ask.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import os
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class ActionAskWhereIRun(BaseAction):
|
||||
""" Sometimes user forget where pipeline with his credentials is running.
|
||||
- this action triggers `ActionShowWhereIRun`
|
||||
"""
|
||||
# Action is ignored by default
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
identifier = 'ask.where.i.run'
|
||||
#: Action label.
|
||||
label = 'Ask where I run'
|
||||
#: Action description.
|
||||
description = 'Triggers PC info where user have running Pype'
|
||||
#: Action icon
|
||||
icon = '{}/ftrack/action_icons/ActionAskWhereIRun.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Hide by default - Should be enabled only if you want to run.
|
||||
- best practise is to create another action that triggers this one
|
||||
"""
|
||||
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
more_data = {"event_hub_id": session.event_hub.id}
|
||||
self.trigger_action(
|
||||
"show.where.i.run", event, additional_event_data=more_data
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ActionAskWhereIRun(session, plugins_presets).register()
|
||||
86
pype/ftrack/actions/action_where_run_show.py
Normal file
86
pype/ftrack/actions/action_where_run_show.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import platform
|
||||
import socket
|
||||
import getpass
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
class ActionShowWhereIRun(BaseAction):
|
||||
""" Sometimes user forget where pipeline with his credentials is running.
|
||||
- this action shows on which PC, Username and IP is running
|
||||
- requirement action MUST be registered where we want to locate the PC:
|
||||
- - can't be used retrospectively...
|
||||
"""
|
||||
#: Action identifier.
|
||||
identifier = 'show.where.i.run'
|
||||
#: Action label.
|
||||
label = 'Show where I run'
|
||||
#: Action description.
|
||||
description = 'Shows PC info where user have running Pype'
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Hide by default - Should be enabled only if you want to run.
|
||||
- best practise is to create another action that triggers this one
|
||||
"""
|
||||
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# Don't show info when was launch from this session
|
||||
if session.event_hub.id == event.get("data", {}).get("event_hub_id"):
|
||||
return True
|
||||
|
||||
title = "Where Do I Run?"
|
||||
msgs = {}
|
||||
all_keys = ["Hostname", "IP", "Username", "System name", "PC name"]
|
||||
try:
|
||||
host_name = socket.gethostname()
|
||||
msgs["Hostname"] = host_name
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
msgs["IP"] = host_ip
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
system_name, pc_name, *_ = platform.uname()
|
||||
msgs["System name"] = system_name
|
||||
msgs["PC name"] = pc_name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
msgs["Username"] = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for key in all_keys:
|
||||
if not msgs.get(key):
|
||||
msgs[key] = "-Undefined-"
|
||||
|
||||
items = []
|
||||
first = True
|
||||
splitter = {'type': 'label', 'value': '---'}
|
||||
for key, value in msgs.items():
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
items.append(splitter)
|
||||
self.log.debug("{}: {}".format(key, value))
|
||||
|
||||
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
|
||||
items.append(subtitle)
|
||||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||
items.append(message)
|
||||
|
||||
self.show_interface(items, title, event=event)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ActionShowWhereIRun(session, plugins_presets).register()
|
||||
|
|
@ -8,7 +8,7 @@ import collections
|
|||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
|
|
@ -20,11 +20,12 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs'
|
||||
#: Action label.
|
||||
label = 'Sync HierAttrs'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (server)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
|
|
@ -61,7 +62,7 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
if role['security_role']['name'] in role_list:
|
||||
role_check = True
|
||||
break
|
||||
print(self.icon)
|
||||
|
||||
if role_check is True:
|
||||
for entity in entities:
|
||||
context_type = entity.get('context_type', '').lower()
|
||||
|
|
@ -75,6 +76,8 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
return discover
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
|
@ -87,13 +90,26 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = session.query(
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
|
@ -102,6 +118,12 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
|
|
@ -110,6 +132,10 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
|
|
@ -127,28 +153,61 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
for entity in entities:
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
self.log.debug(
|
||||
'Hierachical attribute "{}" not found on "{}"'.format(
|
||||
key, entity.get('name', entity)
|
||||
)
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, entity.get('name', entity)))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
self.log.warning(
|
||||
'Hierarchical attribute "{}" not set on "{}"'.format(
|
||||
key, entity.get('name', entity)
|
||||
)
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
|
|
@ -161,6 +220,13 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
if self.interface_messages:
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages,
|
||||
title="something went wrong",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -180,6 +246,27 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
|
|
@ -187,24 +274,49 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
|
||||
entity.get('name', entity)
|
||||
))
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
|
||||
entity.get('name', entity)
|
||||
))
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'Entity "{}" is not synchronized to avalon.'.format(
|
||||
entity.get('name', entity)
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
|
@ -226,13 +338,13 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from pype.ftrack import BaseAction, lib
|
|||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class Sync_To_Avalon(BaseAction):
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
|
|
@ -48,11 +48,12 @@ class Sync_To_Avalon(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon'
|
||||
#: Action label.
|
||||
label = 'SyncToAvalon'
|
||||
label = "Pype Admin"
|
||||
variant = "- Sync To Avalon (Server)"
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
|
|
@ -206,18 +207,8 @@ class Sync_To_Avalon(BaseAction):
|
|||
job['status'] = 'failed'
|
||||
|
||||
session.commit()
|
||||
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier='sync.hierarchical.attrs',
|
||||
selection=event['data']['selection']
|
||||
),
|
||||
source=dict(
|
||||
user=event['source']['user']
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
|
||||
self.trigger_action("sync.hierarchical.attrs", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
|
|
@ -242,7 +233,7 @@ class Sync_To_Avalon(BaseAction):
|
|||
self.add_childs_to_importable(child)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -251,7 +242,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Sync_To_Avalon(session).register()
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -51,9 +51,9 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
continue
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DelAvalonIdFromNew(session).register()
|
||||
DelAvalonIdFromNew(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -86,9 +86,9 @@ class NextTaskUpdate(BaseEvent):
|
|||
session.rollback()
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
NextTaskUpdate(session).register()
|
||||
NextTaskUpdate(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -2,11 +2,10 @@ from pype.vendor import ftrack_api
|
|||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
||||
class Radio_buttons(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
def launch(self, session, event):
|
||||
'''Provides a readio button behaviour to any bolean attribute in
|
||||
radio_button group.'''
|
||||
|
|
@ -34,9 +33,10 @@ class Radio_buttons(BaseEvent):
|
|||
|
||||
session.commit()
|
||||
|
||||
def register(session):
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Radio_buttons(session).register()
|
||||
Radio_buttons(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
|
|
@ -23,7 +23,10 @@ class SyncHierarchicalAttrs(BaseEvent):
|
|||
if not keys:
|
||||
continue
|
||||
|
||||
entity = session.get(ent['entity_type'], ent['entityId'])
|
||||
if not ent['entityType'] in ['task', 'show']:
|
||||
continue
|
||||
|
||||
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||
processable.append(ent)
|
||||
processable_ent[ent['entityId']] = entity
|
||||
|
||||
|
|
@ -115,9 +118,9 @@ class SyncHierarchicalAttrs(BaseEvent):
|
|||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -112,16 +112,16 @@ class Sync_to_Avalon(BaseEvent):
|
|||
{'type': 'label', 'value': '# Fatal Error'},
|
||||
{'type': 'label', 'value': '<p>{}</p>'.format(ftrack_message)}
|
||||
]
|
||||
self.show_interface(event, items, title)
|
||||
self.show_interface(items, title, event=event)
|
||||
self.log.error('Fatal error during sync: {}'.format(message))
|
||||
|
||||
return
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Sync_to_Avalon(session).register()
|
||||
Sync_to_Avalon(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -5,11 +5,10 @@ from pype.vendor import ftrack_api
|
|||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
||||
class Test_Event(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
priority = 10000
|
||||
|
||||
def launch(self, session, event):
|
||||
|
|
@ -21,9 +20,9 @@ class Test_Event(BaseEvent):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Test_Event(session).register()
|
||||
Test_Event(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -45,9 +45,9 @@ class ThumbnailEvents(BaseEvent):
|
|||
pass
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbnailEvents(session).register()
|
||||
ThumbnailEvents(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
|
|
@ -229,11 +229,11 @@ class UserAssigmentEvent(BaseEvent):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
"""
|
||||
Register plugin. Called when used as an plugin.
|
||||
"""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
UserAssigmentEvent(session).register()
|
||||
UserAssigmentEvent(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -69,9 +69,9 @@ class VersionToTaskStatus(BaseEvent):
|
|||
path, task_status['name']))
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
VersionToTaskStatus(session).register()
|
||||
VersionToTaskStatus(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@ import importlib
|
|||
from pype.vendor import ftrack_api
|
||||
import time
|
||||
import logging
|
||||
from pypeapp import Logger
|
||||
import inspect
|
||||
from pypeapp import Logger, config
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
|
@ -27,8 +29,8 @@ PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
|
|||
"""
|
||||
|
||||
|
||||
class FtrackServer():
|
||||
def __init__(self, type='action'):
|
||||
class FtrackServer:
|
||||
def __init__(self, server_type='action'):
|
||||
"""
|
||||
- 'type' is by default set to 'action' - Runs Action server
|
||||
- enter 'event' for Event server
|
||||
|
|
@ -43,21 +45,12 @@ class FtrackServer():
|
|||
ftrack_log = logging.getLogger("ftrack_api")
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
|
||||
self.type = type
|
||||
self.actionsAvailable = True
|
||||
self.eventsAvailable = True
|
||||
# Separate all paths
|
||||
if "FTRACK_ACTIONS_PATH" in os.environ:
|
||||
all_action_paths = os.environ["FTRACK_ACTIONS_PATH"]
|
||||
self.actionsPaths = all_action_paths.split(os.pathsep)
|
||||
else:
|
||||
self.actionsAvailable = False
|
||||
env_key = "FTRACK_ACTIONS_PATH"
|
||||
if server_type.lower() == 'event':
|
||||
env_key = "FTRACK_EVENTS_PATH"
|
||||
|
||||
if "FTRACK_EVENTS_PATH" in os.environ:
|
||||
all_event_paths = os.environ["FTRACK_EVENTS_PATH"]
|
||||
self.eventsPaths = all_event_paths.split(os.pathsep)
|
||||
else:
|
||||
self.eventsAvailable = False
|
||||
self.server_type = server_type
|
||||
self.env_key = env_key
|
||||
|
||||
def stop_session(self):
|
||||
if self.session.event_hub.connected is True:
|
||||
|
|
@ -67,7 +60,7 @@ class FtrackServer():
|
|||
|
||||
def set_files(self, paths):
|
||||
# Iterate all paths
|
||||
functions = []
|
||||
register_functions_dict = []
|
||||
for path in paths:
|
||||
# add path to PYTHON PATH
|
||||
if path not in sys.path:
|
||||
|
|
@ -80,32 +73,23 @@ class FtrackServer():
|
|||
if '.pyc' in file or '.py' not in file:
|
||||
continue
|
||||
|
||||
ignore = 'ignore_me'
|
||||
mod = importlib.import_module(os.path.splitext(file)[0])
|
||||
importlib.reload(mod)
|
||||
mod_functions = dict(
|
||||
[
|
||||
(name, function)
|
||||
for name, function in mod.__dict__.items()
|
||||
if isinstance(function, types.FunctionType) or
|
||||
name == ignore
|
||||
if isinstance(function, types.FunctionType)
|
||||
]
|
||||
)
|
||||
# Don't care about ignore_me files
|
||||
if (
|
||||
ignore in mod_functions and
|
||||
mod_functions[ignore] is True
|
||||
):
|
||||
continue
|
||||
|
||||
# separate files by register function
|
||||
if 'register' not in mod_functions:
|
||||
msg = (
|
||||
'"{0}" - Missing register method'
|
||||
).format(file, self.type)
|
||||
msg = ('"{}" - Missing register method').format(file)
|
||||
log.warning(msg)
|
||||
continue
|
||||
|
||||
functions.append({
|
||||
register_functions_dict.append({
|
||||
'name': file,
|
||||
'register': mod_functions['register']
|
||||
})
|
||||
|
|
@ -113,45 +97,49 @@ class FtrackServer():
|
|||
msg = 'Loading of file "{}" failed ({})'.format(
|
||||
file, str(e)
|
||||
)
|
||||
log.warning(msg)
|
||||
log.warning(msg, exc_info=e)
|
||||
|
||||
if len(functions) < 1:
|
||||
if len(register_functions_dict) < 1:
|
||||
raise Exception
|
||||
|
||||
# Load presets for setting plugins
|
||||
key = "user"
|
||||
if self.server_type.lower() == "event":
|
||||
key = "server"
|
||||
plugins_presets = config.get_presets().get(
|
||||
"ftrack", {}
|
||||
).get("plugins", {}).get(key, {})
|
||||
|
||||
function_counter = 0
|
||||
for function in functions:
|
||||
for function_dict in register_functions_dict:
|
||||
register = function_dict["register"]
|
||||
try:
|
||||
function['register'](self.session)
|
||||
if len(inspect.signature(register).parameters) == 1:
|
||||
register(self.session)
|
||||
else:
|
||||
register(self.session, plugins_presets=plugins_presets)
|
||||
|
||||
if function_counter%7 == 0:
|
||||
time.sleep(0.1)
|
||||
function_counter += 1
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
msg = '"{}" - register was not successful ({})'.format(
|
||||
function['name'], str(e)
|
||||
function_dict['name'], str(exc)
|
||||
)
|
||||
log.warning(msg)
|
||||
|
||||
def run_server(self):
|
||||
self.session = ftrack_api.Session(auto_connect_event_hub=True,)
|
||||
|
||||
if self.type.lower() == 'event':
|
||||
if self.eventsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_EVENTS_PATH is not set'
|
||||
', event server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.eventsPaths)
|
||||
else:
|
||||
if self.actionsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_ACTIONS_PATH is not set'
|
||||
', action server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.actionsPaths)
|
||||
paths_str = os.environ.get(self.env_key)
|
||||
if paths_str is None:
|
||||
log.error((
|
||||
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
|
||||
).format(self.env_key, self.server_type))
|
||||
return
|
||||
|
||||
paths = paths_str.split(os.pathsep)
|
||||
self.set_files(paths)
|
||||
|
||||
log.info(60*"*")
|
||||
log.info('Registration of actions/events has finished!')
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
from pype import lib as pypelib
|
||||
from pype.lib import get_avalon_database
|
||||
from bson.objectid import ObjectId
|
||||
import avalon
|
||||
import avalon.api
|
||||
from avalon import schema
|
||||
from avalon.vendor import toml, jsonschema
|
||||
from pypeapp import Logger
|
||||
from pypeapp import Logger, Anatomy, config
|
||||
|
||||
ValidationError = jsonschema.ValidationError
|
||||
|
||||
|
|
@ -53,8 +52,8 @@ def import_to_avalon(
|
|||
if entity_type in ['Project']:
|
||||
type = 'project'
|
||||
|
||||
config = get_project_config(entity)
|
||||
schema.validate(config)
|
||||
proj_config = get_project_config(entity)
|
||||
schema.validate(proj_config)
|
||||
|
||||
av_project_code = None
|
||||
if av_project is not None and 'code' in av_project['data']:
|
||||
|
|
@ -62,13 +61,12 @@ def import_to_avalon(
|
|||
ft_project_code = ft_project['name']
|
||||
|
||||
if av_project is None:
|
||||
project_schema = pypelib.get_avalon_project_template_schema()
|
||||
item = {
|
||||
'schema': project_schema,
|
||||
'schema': "avalon-core:project-2.0",
|
||||
'type': type,
|
||||
'name': project_name,
|
||||
'data': dict(),
|
||||
'config': config,
|
||||
'config': proj_config,
|
||||
'parent': None,
|
||||
}
|
||||
schema.validate(item)
|
||||
|
|
@ -118,13 +116,13 @@ def import_to_avalon(
|
|||
# not override existing templates!
|
||||
templates = av_project['config'].get('template', None)
|
||||
if templates is not None:
|
||||
for key, value in config['template'].items():
|
||||
for key, value in proj_config['template'].items():
|
||||
if (
|
||||
key in templates and
|
||||
templates[key] is not None and
|
||||
templates[key] != value
|
||||
):
|
||||
config['template'][key] = templates[key]
|
||||
proj_config['template'][key] = templates[key]
|
||||
|
||||
projectId = av_project['_id']
|
||||
|
||||
|
|
@ -144,7 +142,7 @@ def import_to_avalon(
|
|||
{'_id': ObjectId(projectId)},
|
||||
{'$set': {
|
||||
'name': project_name,
|
||||
'config': config,
|
||||
'config': proj_config,
|
||||
'data': data
|
||||
}}
|
||||
)
|
||||
|
|
@ -214,9 +212,8 @@ def import_to_avalon(
|
|||
{'type': 'asset', 'name': name}
|
||||
)
|
||||
if avalon_asset is None:
|
||||
asset_schema = pypelib.get_avalon_asset_template_schema()
|
||||
item = {
|
||||
'schema': asset_schema,
|
||||
'schema': "avalon-core:asset-2.0",
|
||||
'name': name,
|
||||
'silo': silo,
|
||||
'parent': ObjectId(projectId),
|
||||
|
|
@ -329,13 +326,26 @@ def import_to_avalon(
|
|||
return output
|
||||
|
||||
|
||||
def get_avalon_attr(session):
|
||||
def get_avalon_attr(session, split_hierarchical=False):
|
||||
custom_attributes = []
|
||||
hier_custom_attributes = []
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' not in cust_attr['key']:
|
||||
custom_attributes.append(cust_attr)
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if split_hierarchical:
|
||||
if cust_attr["is_hierarchical"]:
|
||||
hier_custom_attributes.append(cust_attr)
|
||||
continue
|
||||
|
||||
custom_attributes.append(cust_attr)
|
||||
|
||||
if split_hierarchical:
|
||||
# return tuple
|
||||
return custom_attributes, hier_custom_attributes
|
||||
|
||||
return custom_attributes
|
||||
|
||||
|
||||
|
|
@ -345,13 +355,12 @@ def changeability_check_childs(entity):
|
|||
childs = entity['children']
|
||||
for child in childs:
|
||||
if child.entity_type.lower() == 'task':
|
||||
config = get_config_data()
|
||||
if 'sync_to_avalon' in config:
|
||||
config = config['sync_to_avalon']
|
||||
if 'statuses_name_change' in config:
|
||||
available_statuses = config['statuses_name_change']
|
||||
else:
|
||||
available_statuses = []
|
||||
available_statuses = config.get_presets().get(
|
||||
"ftrack", {}).get(
|
||||
"ftrack_config", {}).get(
|
||||
"sync_to_avalon", {}).get(
|
||||
"statuses_name_change", []
|
||||
)
|
||||
ent_status = child['status']['name'].lower()
|
||||
if ent_status not in available_statuses:
|
||||
return False
|
||||
|
|
@ -480,14 +489,28 @@ def get_avalon_project(ft_project):
|
|||
return avalon_project
|
||||
|
||||
|
||||
def get_project_config(entity):
|
||||
config = {}
|
||||
config['schema'] = pypelib.get_avalon_project_config_schema()
|
||||
config['tasks'] = get_tasks(entity)
|
||||
config['apps'] = get_project_apps(entity)
|
||||
config['template'] = pypelib.get_avalon_project_template()
|
||||
def get_avalon_project_template():
|
||||
"""Get avalon template
|
||||
|
||||
return config
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
templates = Anatomy().templates
|
||||
return {
|
||||
'workfile': templates["avalon"]["workfile"],
|
||||
'work': templates["avalon"]["work"],
|
||||
'publish': templates["avalon"]["publish"]
|
||||
}
|
||||
|
||||
|
||||
def get_project_config(entity):
|
||||
proj_config = {}
|
||||
proj_config['schema'] = 'avalon-core:config-1.0'
|
||||
proj_config['tasks'] = get_tasks(entity)
|
||||
proj_config['apps'] = get_project_apps(entity)
|
||||
proj_config['template'] = get_avalon_project_template()
|
||||
|
||||
return proj_config
|
||||
|
||||
|
||||
def get_tasks(project):
|
||||
|
|
@ -539,7 +562,7 @@ def avalon_check_name(entity, inSchema=None):
|
|||
if entity.entity_type in ['Project']:
|
||||
# data['type'] = 'project'
|
||||
name = entity['full_name']
|
||||
# schema = get_avalon_project_template_schema()
|
||||
# schema = "avalon-core:project-2.0"
|
||||
|
||||
data['silo'] = 'Film'
|
||||
|
||||
|
|
@ -557,24 +580,6 @@ def avalon_check_name(entity, inSchema=None):
|
|||
raise ValueError(msg.format(name))
|
||||
|
||||
|
||||
def get_config_data():
|
||||
path_items = [pypelib.get_presets_path(), 'ftrack', 'ftrack_config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
data = dict()
|
||||
try:
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'Loading "Ftrack Config file" Failed.'
|
||||
' Please check log for more information.'
|
||||
)
|
||||
log.warning("{} - {}".format(msg, str(e)))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def show_errors(obj, event, errors):
|
||||
title = 'Hey You! You raised few Errors! (*look below*)'
|
||||
items = []
|
||||
|
|
@ -596,4 +601,4 @@ def show_errors(obj, event, errors):
|
|||
obj.log.error(
|
||||
'{}: {}'.format(key, message)
|
||||
)
|
||||
obj.show_interface(event, items, title)
|
||||
obj.show_interface(items, title, event=event)
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class BaseAction(BaseHandler):
|
|||
icon = None
|
||||
type = 'Action'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
|
||||
if self.label is None:
|
||||
raise ValueError(
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from avalon import lib as avalonlib
|
|||
import acre
|
||||
from pype import api as pype
|
||||
from pype import lib as pypelib
|
||||
from .avalon_sync import get_config_data
|
||||
from pypeapp import config
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
|
||||
from pypeapp import Anatomy
|
||||
|
|
@ -26,10 +26,10 @@ class AppAction(BaseHandler):
|
|||
preactions = ['start.timer']
|
||||
|
||||
def __init__(
|
||||
self, session, label, name, executable,
|
||||
variant=None, icon=None, description=None, preactions=[]
|
||||
self, session, label, name, executable, variant=None,
|
||||
icon=None, description=None, preactions=[], plugins_presets={}
|
||||
):
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
|
||||
if label is None:
|
||||
|
|
@ -221,11 +221,22 @@ class AppAction(BaseHandler):
|
|||
anatomy = anatomy.format(data)
|
||||
work_template = anatomy["work"]["folder"]
|
||||
|
||||
except Exception as e:
|
||||
self.log.exception(
|
||||
"{0} Error in anatomy.format: {1}".format(__name__, e)
|
||||
except Exception as exc:
|
||||
msg = "{} Error in anatomy.format: {}".format(
|
||||
__name__, str(exc)
|
||||
)
|
||||
os.environ["AVALON_WORKDIR"] = os.path.normpath(work_template)
|
||||
self.log.error(msg, exc_info=True)
|
||||
return {
|
||||
'success': False,
|
||||
'message': msg
|
||||
}
|
||||
|
||||
workdir = os.path.normpath(work_template)
|
||||
os.environ["AVALON_WORKDIR"] = workdir
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
# collect all parents from the task
|
||||
parents = []
|
||||
|
|
@ -328,10 +339,10 @@ class AppAction(BaseHandler):
|
|||
pass
|
||||
|
||||
# Change status of task to In progress
|
||||
config = get_config_data()
|
||||
presets = config.get_presets()["ftrack"]["ftrack_config"]
|
||||
|
||||
if 'status_update' in config:
|
||||
statuses = config['status_update']
|
||||
if 'status_update' in presets:
|
||||
statuses = presets['status_update']
|
||||
|
||||
actual_status = entity['status']['name'].lower()
|
||||
next_status_name = None
|
||||
|
|
@ -351,7 +362,7 @@ class AppAction(BaseHandler):
|
|||
session.commit()
|
||||
except Exception:
|
||||
msg = (
|
||||
'Status "{}" in config wasn\'t found on Ftrack'
|
||||
'Status "{}" in presets wasn\'t found on Ftrack'
|
||||
).format(next_status_name)
|
||||
self.log.warning(msg)
|
||||
|
||||
|
|
|
|||
|
|
@ -26,9 +26,10 @@ class BaseHandler(object):
|
|||
priority = 100
|
||||
# Type is just for logging purpose (e.g.: Action, Event, Application,...)
|
||||
type = 'No-type'
|
||||
ignore_me = False
|
||||
preactions = []
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
self._session = session
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
|
|
@ -36,11 +37,23 @@ class BaseHandler(object):
|
|||
# Using decorator
|
||||
self.register = self.register_decorator(self.register)
|
||||
self.launch = self.launch_log(self.launch)
|
||||
self.plugins_presets = plugins_presets
|
||||
|
||||
# Decorator
|
||||
def register_decorator(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_register(*args, **kwargs):
|
||||
|
||||
presets_data = self.plugins_presets.get(self.__class__.__name__)
|
||||
if presets_data:
|
||||
for key, value in presets_data.items():
|
||||
if not hasattr(self, key):
|
||||
continue
|
||||
setattr(self, key, value)
|
||||
|
||||
if self.ignore_me:
|
||||
return
|
||||
|
||||
label = self.__class__.__name__
|
||||
if hasattr(self, 'label'):
|
||||
if self.variant is None:
|
||||
|
|
@ -72,7 +85,7 @@ class BaseHandler(object):
|
|||
self.type, label)
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.exception('{} "{}" - Registration failed ({})'.format(
|
||||
self.log.error('{} "{}" - Registration failed ({})'.format(
|
||||
self.type, label, str(e))
|
||||
)
|
||||
return wrapper_register
|
||||
|
|
@ -83,23 +96,23 @@ class BaseHandler(object):
|
|||
def wrapper_launch(*args, **kwargs):
|
||||
label = self.__class__.__name__
|
||||
if hasattr(self, 'label'):
|
||||
if self.variant is None:
|
||||
label = self.label
|
||||
else:
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
label = self.label
|
||||
if hasattr(self, 'variant'):
|
||||
if self.variant is not None:
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
|
||||
self.log.info(('{} "{}": Launched').format(self.type, label))
|
||||
try:
|
||||
self.log.info(('{} "{}": Launched').format(self.type, label))
|
||||
result = func(*args, **kwargs)
|
||||
self.log.info(('{} "{}": Finished').format(self.type, label))
|
||||
return result
|
||||
except Exception as e:
|
||||
msg = '{} "{}": Failed ({})'.format(self.type, label, str(e))
|
||||
self.log.exception(msg)
|
||||
return func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
|
||||
self.log.error(msg, exc_info=True)
|
||||
return {
|
||||
'success': False,
|
||||
'message': msg
|
||||
}
|
||||
finally:
|
||||
self.log.info(('{} "{}": Finished').format(self.type, label))
|
||||
return wrapper_launch
|
||||
|
||||
@property
|
||||
|
|
@ -127,6 +140,13 @@ class BaseHandler(object):
|
|||
|
||||
# Custom validations
|
||||
result = self.preregister()
|
||||
if result is None:
|
||||
self.log.debug((
|
||||
"\"{}\" 'preregister' method returned 'None'. Expected it"
|
||||
" didn't fail and continue as preregister returned True."
|
||||
).format(self.__class__.__name__))
|
||||
return
|
||||
|
||||
if result is True:
|
||||
return
|
||||
msg = "Pre-register conditions were not met"
|
||||
|
|
@ -194,7 +214,6 @@ class BaseHandler(object):
|
|||
def _translate_event(self, session, event):
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
_entities = event['data'].get('entities_object', None)
|
||||
if (
|
||||
_entities is None or
|
||||
|
|
@ -209,26 +228,29 @@ class BaseHandler(object):
|
|||
event
|
||||
]
|
||||
|
||||
def _get_entities(self, event):
|
||||
self.session._local_cache.clear()
|
||||
selection = event['data'].get('selection', [])
|
||||
def _get_entities(self, event, session=None):
|
||||
if session is None:
|
||||
session = self.session
|
||||
session._local_cache.clear()
|
||||
selection = event['data'].get('selection') or []
|
||||
_entities = []
|
||||
for entity in selection:
|
||||
_entities.append(
|
||||
self.session.get(
|
||||
self._get_entity_type(entity),
|
||||
entity.get('entityId')
|
||||
)
|
||||
)
|
||||
_entities.append(session.get(
|
||||
self._get_entity_type(entity, session),
|
||||
entity.get('entityId')
|
||||
))
|
||||
event['data']['entities_object'] = _entities
|
||||
return _entities
|
||||
|
||||
def _get_entity_type(self, entity):
|
||||
def _get_entity_type(self, entity, session=None):
|
||||
'''Return translated entity type tht can be used with API.'''
|
||||
# Get entity type and make sure it is lower cased. Most places except
|
||||
# the component tab in the Sidebar will use lower case notation.
|
||||
entity_type = entity.get('entityType').replace('_', '').lower()
|
||||
|
||||
if session is None:
|
||||
session = self.session
|
||||
|
||||
for schema in self.session.schemas:
|
||||
alias_for = schema.get('alias_for')
|
||||
|
||||
|
|
@ -305,30 +327,13 @@ class BaseHandler(object):
|
|||
|
||||
# Launch preactions
|
||||
for preaction in self.preactions:
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier=preaction,
|
||||
selection=selection
|
||||
),
|
||||
source=dict(
|
||||
user=dict(username=session.api_user)
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
self.trigger_action(preaction, event)
|
||||
|
||||
# Relaunch this action
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier=self.identifier,
|
||||
selection=selection,
|
||||
preactions_launched=True
|
||||
),
|
||||
source=dict(
|
||||
user=dict(username=session.api_user)
|
||||
)
|
||||
additional_data = {"preactions_launched": True}
|
||||
self.trigger_action(
|
||||
self.identifier, event, additional_event_data=additional_data
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
|
||||
return False
|
||||
|
||||
|
|
@ -430,12 +435,47 @@ class BaseHandler(object):
|
|||
on_error='ignore'
|
||||
)
|
||||
|
||||
def show_interface(self, event, items, title=''):
|
||||
def show_interface(
|
||||
self, items, title='',
|
||||
event=None, user=None, username=None, user_id=None
|
||||
):
|
||||
"""
|
||||
Shows interface to user who triggered event
|
||||
Shows interface to user
|
||||
- to identify user must be entered one of args:
|
||||
event, user, username, user_id
|
||||
- 'items' must be list containing Ftrack interface items
|
||||
"""
|
||||
user_id = event['source']['user']['id']
|
||||
if not any([event, user, username, user_id]):
|
||||
raise TypeError((
|
||||
'Missing argument `show_interface` requires one of args:'
|
||||
' event (ftrack_api Event object),'
|
||||
' user (ftrack_api User object)'
|
||||
' username (string) or user_id (string)'
|
||||
))
|
||||
|
||||
if event:
|
||||
user_id = event['source']['user']['id']
|
||||
elif user:
|
||||
user_id = user['id']
|
||||
else:
|
||||
if user_id:
|
||||
key = 'id'
|
||||
value = user_id
|
||||
else:
|
||||
key = 'username'
|
||||
value = username
|
||||
|
||||
user = self.session.query(
|
||||
'User where {} is "{}"'.format(key, value)
|
||||
).first()
|
||||
|
||||
if not user:
|
||||
raise TypeError((
|
||||
'Ftrack user with {} "{}" was not found!'.format(key, value)
|
||||
))
|
||||
|
||||
user_id = user['id']
|
||||
|
||||
target = (
|
||||
'applicationId=ftrack.client.web and user.id="{0}"'
|
||||
).format(user_id)
|
||||
|
|
@ -452,3 +492,90 @@ class BaseHandler(object):
|
|||
),
|
||||
on_error='ignore'
|
||||
)
|
||||
|
||||
def show_interface_from_dict(
|
||||
self, messages, title="", event=None,
|
||||
user=None, username=None, user_id=None
|
||||
):
|
||||
if not messages:
|
||||
self.log.debug("No messages to show! (messages dict is empty)")
|
||||
return
|
||||
items = []
|
||||
splitter = {'type': 'label', 'value': '---'}
|
||||
first = True
|
||||
for key, value in messages.items():
|
||||
if not first:
|
||||
items.append(splitter)
|
||||
else:
|
||||
first = False
|
||||
|
||||
subtitle = {'type': 'label', 'value':'<h3>{}</h3>'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
message = {
|
||||
'type': 'label', 'value': '<p>{}</p>'.format(item)
|
||||
}
|
||||
items.append(message)
|
||||
else:
|
||||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||
items.append(message)
|
||||
|
||||
self.show_interface(items, title, event, user, username, user_id)
|
||||
|
||||
def trigger_action(
|
||||
self, action_name, event=None, session=None,
|
||||
selection=None, user_data=None,
|
||||
topic="ftrack.action.launch", additional_event_data={},
|
||||
on_error="ignore"
|
||||
):
|
||||
self.log.debug("Triggering action \"{}\" Begins".format(action_name))
|
||||
|
||||
if not session:
|
||||
session = self.session
|
||||
|
||||
# Getting selection and user data
|
||||
_selection = None
|
||||
_user_data = None
|
||||
|
||||
if event:
|
||||
_selection = event.get("data", {}).get("selection")
|
||||
_user_data = event.get("source", {}).get("user")
|
||||
|
||||
if selection is not None:
|
||||
_selection = selection
|
||||
|
||||
if user_data is not None:
|
||||
_user_data = user_data
|
||||
|
||||
# Without selection and user data skip triggering
|
||||
msg = "Can't trigger \"{}\" action without {}."
|
||||
if _selection is None:
|
||||
self.log.error(msg.format(action_name, "selection"))
|
||||
return
|
||||
|
||||
if _user_data is None:
|
||||
self.log.error(msg.format(action_name, "user data"))
|
||||
return
|
||||
|
||||
_event_data = {
|
||||
"actionIdentifier": action_name,
|
||||
"selection": _selection
|
||||
}
|
||||
|
||||
# Add additional data
|
||||
if additional_event_data:
|
||||
_event_data.update(additional_event_data)
|
||||
|
||||
# Create and trigger event
|
||||
session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=_event_data,
|
||||
source=dict(user=_user_data)
|
||||
),
|
||||
on_error=on_error
|
||||
)
|
||||
self.log.debug(
|
||||
"Action \"{}\" Triggered successfully".format(action_name)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ class BaseEvent(BaseHandler):
|
|||
|
||||
type = 'Event'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
|
||||
# Decorator
|
||||
def launch_log(self, func):
|
||||
|
|
@ -25,9 +25,12 @@ class BaseEvent(BaseHandler):
|
|||
def wrapper_launch(*args, **kwargs):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
self.log.info('{} Failed ({})'.format(
|
||||
self.__class__.__name__, str(e))
|
||||
except Exception as exc:
|
||||
self.log.error(
|
||||
'Event "{}" Failed: {}'.format(
|
||||
self.__class__.__name__, str(exc)
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
return wrapper_launch
|
||||
|
||||
|
|
@ -43,22 +46,7 @@ class BaseEvent(BaseHandler):
|
|||
self.session.rollback()
|
||||
self.session._local_cache.clear()
|
||||
|
||||
try:
|
||||
self.launch(
|
||||
self.session, event
|
||||
)
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
log_message = "{}/{}/Line: {}".format(
|
||||
exc_type, fname, exc_tb.tb_lineno
|
||||
)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(log_message),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return
|
||||
self.launch(self.session, event)
|
||||
|
||||
def _translate_event(self, session, event):
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
|
|
|
|||
433
pype/ftrack/lib/io_nonsingleton.py
Normal file
433
pype/ftrack/lib/io_nonsingleton.py
Normal file
|
|
@ -0,0 +1,433 @@
|
|||
"""
|
||||
Wrapper around interactions with the database
|
||||
|
||||
Copy of io module in avalon-core.
|
||||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
object = args[0]
|
||||
for retry in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
object.log.error("Reconnecting..")
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class DbConnector(object):
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.Session = {}
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
return
|
||||
|
||||
logging.basicConfig()
|
||||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
t1 = time.time()
|
||||
self._mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
self.log.error("Retrying..")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||
|
||||
self._install_sentry()
|
||||
|
||||
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||
self._is_installed = True
|
||||
|
||||
def _install_sentry(self):
|
||||
if "AVALON_SENTRY" not in self.Session:
|
||||
return
|
||||
|
||||
try:
|
||||
from raven import Client
|
||||
from raven.handlers.logging import SentryHandler
|
||||
from raven.conf import setup_logging
|
||||
except ImportError:
|
||||
# Note: There was a Sentry address in this Session
|
||||
return self.log.warning("Sentry disabled, raven not installed")
|
||||
|
||||
client = Client(self.Session["AVALON_SENTRY"])
|
||||
|
||||
# Transmit log messages to Sentry
|
||||
handler = SentryHandler(client)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
setup_logging(handler)
|
||||
|
||||
self._sentry_client = client
|
||||
self._sentry_logging_handler = handler
|
||||
self.log.info(
|
||||
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||
)
|
||||
|
||||
def _from_environment(self):
|
||||
Session = {
|
||||
item[0]: os.getenv(item[0], item[1])
|
||||
for item in (
|
||||
# Root directory of projects on disk
|
||||
("AVALON_PROJECTS", None),
|
||||
|
||||
# Name of current Project
|
||||
("AVALON_PROJECT", ""),
|
||||
|
||||
# Name of current Asset
|
||||
("AVALON_ASSET", ""),
|
||||
|
||||
# Name of current silo
|
||||
("AVALON_SILO", ""),
|
||||
|
||||
# Name of current task
|
||||
("AVALON_TASK", None),
|
||||
|
||||
# Name of current app
|
||||
("AVALON_APP", None),
|
||||
|
||||
# Path to working directory
|
||||
("AVALON_WORKDIR", None),
|
||||
|
||||
# Name of current Config
|
||||
# TODO(marcus): Establish a suitable default config
|
||||
("AVALON_CONFIG", "no_config"),
|
||||
|
||||
# Name of Avalon in graphical user interfaces
|
||||
# Use this to customise the visual appearance of Avalon
|
||||
# to better integrate with your surrounding pipeline
|
||||
("AVALON_LABEL", "Avalon"),
|
||||
|
||||
# Used during any connections to the outside world
|
||||
("AVALON_TIMEOUT", "1000"),
|
||||
|
||||
# Address to Asset Database
|
||||
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||
|
||||
# Name of database used in MongoDB
|
||||
("AVALON_DB", "avalon"),
|
||||
|
||||
# Address to Sentry
|
||||
("AVALON_SENTRY", None),
|
||||
|
||||
# Address to Deadline Web Service
|
||||
# E.g. http://192.167.0.1:8082
|
||||
("AVALON_DEADLINE", None),
|
||||
|
||||
# Enable features not necessarily stable. The user's own risk
|
||||
("AVALON_EARLY_ADOPTER", None),
|
||||
|
||||
# Address of central asset repository, contains
|
||||
# the following interface:
|
||||
# /upload
|
||||
# /download
|
||||
# /manager (optional)
|
||||
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||
|
||||
# Boolean of whether to upload published material
|
||||
# to central asset repository
|
||||
("AVALON_UPLOAD", None),
|
||||
|
||||
# Generic username and password
|
||||
("AVALON_USERNAME", "avalon"),
|
||||
("AVALON_PASSWORD", "secret"),
|
||||
|
||||
# Unique identifier for instances in working files
|
||||
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||
|
||||
# Enable debugging
|
||||
("AVALON_DEBUG", None),
|
||||
|
||||
) if os.getenv(item[0], item[1]) is not None
|
||||
}
|
||||
|
||||
Session["schema"] = "avalon-core:session-1.0"
|
||||
try:
|
||||
schema.validate(Session)
|
||||
except schema.ValidationError as e:
|
||||
# TODO(marcus): Make this mandatory
|
||||
self.log.warning(e)
|
||||
|
||||
return Session
|
||||
|
||||
def uninstall(self):
|
||||
"""Close any connection to the database"""
|
||||
try:
|
||||
self._mongo_client.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._mongo_client = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def active_project(self):
|
||||
"""Return the name of the active project"""
|
||||
return self.Session["AVALON_PROJECT"]
|
||||
|
||||
def activate_project(self, project_name):
|
||||
self.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
def projects(self):
|
||||
"""List available projects
|
||||
|
||||
Returns:
|
||||
list of project documents
|
||||
|
||||
"""
|
||||
|
||||
collection_names = self.collections()
|
||||
for project in collection_names:
|
||||
if project in ("system.indexes",):
|
||||
continue
|
||||
|
||||
# Each collection will have exactly one project document
|
||||
document = self.find_project(project)
|
||||
|
||||
if document is not None:
|
||||
yield document
|
||||
|
||||
def locate(self, path):
|
||||
"""Traverse a hierarchy from top-to-bottom
|
||||
|
||||
Example:
|
||||
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||
|
||||
Returns:
|
||||
representation (ObjectId)
|
||||
|
||||
"""
|
||||
|
||||
components = zip(
|
||||
("project", "asset", "subset", "version", "representation"),
|
||||
path
|
||||
)
|
||||
|
||||
parent = None
|
||||
for type_, name in components:
|
||||
latest = (type_ == "version") and name in (None, -1)
|
||||
|
||||
try:
|
||||
if latest:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
sort=[("name", -1)]
|
||||
)["_id"]
|
||||
else:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"name": name,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
)["_id"]
|
||||
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
return parent
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
|
||||
@auto_reconnect
|
||||
def find_project(self, project):
|
||||
return self._database[project].find_one({"type": "project"})
|
||||
|
||||
@auto_reconnect
|
||||
def insert_one(self, item):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
schema.validate(item)
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True):
|
||||
# check if all items are valid
|
||||
assert isinstance(items, list), "`items` must be of type <list>"
|
||||
for item in items:
|
||||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
schema.validate(item)
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||
items,
|
||||
ordered=ordered)
|
||||
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def save(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||
filter, replacement)
|
||||
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||
filter, update)
|
||||
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def drop(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def delete_many(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||
*args, **kwargs)
|
||||
|
||||
def parenthood(self, document):
|
||||
assert document is not None, "This is a bug"
|
||||
|
||||
parents = list()
|
||||
|
||||
while document.get("parent") is not None:
|
||||
document = self.find_one({"_id": document["parent"]})
|
||||
|
||||
if document is None:
|
||||
break
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(self):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def download(self, src, dst):
|
||||
"""Download `src` to `dst`
|
||||
|
||||
Arguments:
|
||||
src (str): URL to source file
|
||||
dst (str): Absolute path to destination file
|
||||
|
||||
Yields tuple (progress, error):
|
||||
progress (int): Between 0-100
|
||||
error (Exception): Any exception raised when first making connection
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
src,
|
||||
stream=True,
|
||||
auth=requests.auth.HTTPBasicAuth(
|
||||
self.Session["AVALON_USERNAME"],
|
||||
self.Session["AVALON_PASSWORD"]
|
||||
)
|
||||
)
|
||||
except requests.ConnectionError as e:
|
||||
yield None, e
|
||||
return
|
||||
|
||||
with self.tempdir() as dirname:
|
||||
tmp = os.path.join(dirname, os.path.basename(src))
|
||||
|
||||
with open(tmp, "wb") as f:
|
||||
total_length = response.headers.get("content-length")
|
||||
|
||||
if total_length is None: # no content length header
|
||||
f.write(response.content)
|
||||
else:
|
||||
downloaded = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
downloaded += len(data)
|
||||
f.write(data)
|
||||
|
||||
yield int(100.0 * downloaded / total_length), None
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
except OSError as e:
|
||||
# An already existing destination directory is fine.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
shutil.copy(tmp, dst)
|
||||
|
|
@ -88,9 +88,11 @@ class FtrackModule:
|
|||
def set_action_server(self):
|
||||
try:
|
||||
self.action_server.run_server()
|
||||
except Exception:
|
||||
msg = 'Ftrack Action server crashed! Please try to start again.'
|
||||
log.error(msg)
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Ftrack Action server crashed! Please try to start again.",
|
||||
exc_info=True
|
||||
)
|
||||
# TODO show message to user
|
||||
self.bool_action_server = False
|
||||
self.set_menu_visibility()
|
||||
|
|
|
|||
|
|
@ -138,8 +138,8 @@ def update_frame_range(comp, representations):
|
|||
versions = io.find({"type": "version", "_id": {"$in": version_ids}})
|
||||
versions = list(versions)
|
||||
|
||||
start = min(v["data"]["startFrame"] for v in versions)
|
||||
end = max(v["data"]["endFrame"] for v in versions)
|
||||
start = min(v["data"]["frameStart"] for v in versions)
|
||||
end = max(v["data"]["frameEnd"] for v in versions)
|
||||
|
||||
fusion_lib.update_frame_range(start, end, comp=comp)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,10 +10,7 @@ from avalon.houdini import pipeline as houdini
|
|||
|
||||
from pype.houdini import lib
|
||||
|
||||
from pype.lib import (
|
||||
any_outdated,
|
||||
update_task_from_path
|
||||
)
|
||||
from pype.lib import any_outdated
|
||||
|
||||
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
|
|
@ -57,8 +54,6 @@ def on_save(*args):
|
|||
|
||||
avalon.logger.info("Running callback on save..")
|
||||
|
||||
update_task_from_path(hou.hipFile.path())
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
|
@ -68,8 +63,6 @@ def on_open(*args):
|
|||
|
||||
avalon.logger.info("Running callback on open..")
|
||||
|
||||
update_task_from_path(hou.hipFile.path())
|
||||
|
||||
if any_outdated():
|
||||
from ..widgets import popup
|
||||
|
||||
|
|
|
|||
|
|
@ -205,7 +205,7 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = lib.get_asset_fps()
|
||||
fps = lib.get_asset()["data"]["fps"]
|
||||
current_fps = hou.fps() # returns float
|
||||
|
||||
if current_fps != fps:
|
||||
|
|
|
|||
444
pype/lib.py
444
pype/lib.py
|
|
@ -4,6 +4,8 @@ import logging
|
|||
import importlib
|
||||
import itertools
|
||||
import contextlib
|
||||
import subprocess
|
||||
import inspect
|
||||
|
||||
from .vendor import pather
|
||||
from .vendor.pather.error import ParseError
|
||||
|
|
@ -15,11 +17,68 @@ import avalon
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_handle_irregular(asset):
|
||||
data = asset["data"]
|
||||
handle_start = data.get("handle_start", 0)
|
||||
handle_end = data.get("handle_end", 0)
|
||||
return (handle_start, handle_end)
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
def _subprocess(args):
|
||||
"""Convenience method for getting output errors for subprocess."""
|
||||
|
||||
proc = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
env=os.environ
|
||||
)
|
||||
|
||||
output = proc.communicate()[0]
|
||||
|
||||
if proc.returncode != 0:
|
||||
log.error(output)
|
||||
raise ValueError("\"{}\" was not successful: {}".format(args, output))
|
||||
return output
|
||||
|
||||
|
||||
def get_hierarchy(asset_name=None):
|
||||
"""
|
||||
Obtain asset hierarchy path string from mongo db
|
||||
|
||||
Returns:
|
||||
string: asset hierarchy path
|
||||
|
||||
"""
|
||||
if not asset_name:
|
||||
asset_name = io.Session.get("AVALON_ASSET", os.environ["AVALON_ASSET"])
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": 'asset',
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
not_set = "PARENTS_NOT_SET"
|
||||
entity_parents = asset_entity.get("data", {}).get("parents", not_set)
|
||||
|
||||
# If entity already have parents then just return joined
|
||||
if entity_parents != not_set:
|
||||
return "/".join(entity_parents)
|
||||
|
||||
# Else query parents through visualParents and store result to entity
|
||||
hierarchy_items = []
|
||||
entity = asset_entity
|
||||
while True:
|
||||
parent_id = entity.get("data", {}).get("visualParent")
|
||||
if not parent_id:
|
||||
break
|
||||
entity = io.find_one({"_id": parent_id})
|
||||
hierarchy_items.append(entity["name"])
|
||||
|
||||
# Add parents to entity data for next query
|
||||
entity_data = asset_entity.get("data", {})
|
||||
entity_data["parents"] = hierarchy_items
|
||||
io.update_many(
|
||||
{"_id": asset_entity["_id"]},
|
||||
{"$set": {"data": entity_data}}
|
||||
)
|
||||
|
||||
return "/".join(hierarchy_items)
|
||||
|
||||
|
||||
def add_tool_to_environment(tools):
|
||||
|
|
@ -138,45 +197,6 @@ def any_outdated():
|
|||
return False
|
||||
|
||||
|
||||
def update_task_from_path(path):
|
||||
"""Update the context using the current scene state.
|
||||
|
||||
When no changes to the context it will not trigger an update.
|
||||
When the context for a file could not be parsed an error is logged but not
|
||||
raised.
|
||||
|
||||
"""
|
||||
if not path:
|
||||
log.warning("Can't update the current task. Scene is not saved.")
|
||||
return
|
||||
|
||||
# Find the current context from the filename
|
||||
project = io.find_one({"type": "project"},
|
||||
projection={"config.template.work": True})
|
||||
template = project['config']['template']['work']
|
||||
# Force to use the registered to root to avoid using wrong paths
|
||||
template = pather.format(template, {"root": avalon.api.registered_root()})
|
||||
try:
|
||||
context = pather.parse(template, path)
|
||||
except ParseError:
|
||||
log.error("Can't update the current task. Unable to parse the "
|
||||
"task for: %s (pattern: %s)", path, template)
|
||||
return
|
||||
|
||||
# Find the changes between current Session and the path's context.
|
||||
current = {
|
||||
"asset": avalon.api.Session["AVALON_ASSET"],
|
||||
"task": avalon.api.Session["AVALON_TASK"]
|
||||
# "app": avalon.api.Session["AVALON_APP"]
|
||||
}
|
||||
changes = {key: context[key] for key, current_value in current.items()
|
||||
if context[key] != current_value}
|
||||
|
||||
if changes:
|
||||
log.info("Updating work task to: %s", context)
|
||||
avalon.api.update_current_task(**changes)
|
||||
|
||||
|
||||
def _rreplace(s, a, b, n=1):
|
||||
"""Replace a with b in string s from right side n times"""
|
||||
return b.join(s.rsplit(a, n))
|
||||
|
|
@ -196,7 +216,7 @@ def version_up(filepath):
|
|||
dirname = os.path.dirname(filepath)
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
regex = "[._]v\d+"
|
||||
regex = r"[._]v\d+"
|
||||
matches = re.findall(regex, str(basename), re.IGNORECASE)
|
||||
if not matches:
|
||||
log.info("Creating version...")
|
||||
|
|
@ -204,7 +224,7 @@ def version_up(filepath):
|
|||
new_basename = "{}{}".format(basename, new_label)
|
||||
else:
|
||||
label = matches[-1]
|
||||
version = re.search("\d+", label).group()
|
||||
version = re.search(r"\d+", label).group()
|
||||
padding = len(version)
|
||||
|
||||
new_version = int(version) + 1
|
||||
|
|
@ -312,140 +332,107 @@ def _get_host_name():
|
|||
return _host.__name__.rsplit(".", 1)[-1]
|
||||
|
||||
|
||||
def collect_container_metadata(container):
|
||||
"""Add additional data based on the current host
|
||||
def get_asset(asset_name=None):
|
||||
entity_data_keys_from_project_when_miss = [
|
||||
"frameStart", "frameEnd", "handleStart", "handleEnd", "fps",
|
||||
"resolutionWidth", "resolutionHeight"
|
||||
]
|
||||
|
||||
If the host application's lib module does not have a function to inject
|
||||
additional data it will return the input container
|
||||
entity_keys_from_project_when_miss = []
|
||||
|
||||
alternatives = {
|
||||
"handleStart": "handles",
|
||||
"handleEnd": "handles"
|
||||
}
|
||||
|
||||
defaults = {
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
if not asset_name:
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
|
||||
asset_document = io.find_one({"name": asset_name, "type": "asset"})
|
||||
if not asset_document:
|
||||
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
|
||||
|
||||
project_document = io.find_one({"type": "project"})
|
||||
|
||||
for key in entity_data_keys_from_project_when_miss:
|
||||
if asset_document["data"].get(key):
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document["data"].get(alt_key)
|
||||
if value is not None:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(alt_key)
|
||||
if value:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document["data"][key] = defaults[key]
|
||||
|
||||
for key in entity_keys_from_project_when_miss:
|
||||
if asset_document.get(key):
|
||||
continue
|
||||
|
||||
value = project_document.get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
value = project_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document[key] = defaults[key]
|
||||
|
||||
return asset_document
|
||||
|
||||
|
||||
def get_project():
|
||||
io.install()
|
||||
return io.find_one({"type": "project"})
|
||||
|
||||
|
||||
def get_version_from_path(file):
|
||||
"""
|
||||
Finds version number in file path string
|
||||
|
||||
Args:
|
||||
container (dict): collection if representation data in host
|
||||
file (string): file path
|
||||
|
||||
Returns:
|
||||
generator
|
||||
"""
|
||||
# TODO: Improve method of getting the host lib module
|
||||
host_name = _get_host_name()
|
||||
package_name = "pype.{}.lib".format(host_name)
|
||||
hostlib = importlib.import_module(package_name)
|
||||
|
||||
if not hasattr(hostlib, "get_additional_data"):
|
||||
return {}
|
||||
|
||||
return hostlib.get_additional_data(container)
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
"""Returns project's FPS, if not found will return 25 by default
|
||||
|
||||
Returns:
|
||||
int, float
|
||||
v: version number in string ('001')
|
||||
|
||||
"""
|
||||
|
||||
key = "fps"
|
||||
|
||||
# FPS from asset data (if set)
|
||||
asset_data = get_asset_data()
|
||||
if key in asset_data:
|
||||
return asset_data[key]
|
||||
|
||||
# FPS from project data (if set)
|
||||
project_data = get_project_data()
|
||||
if key in project_data:
|
||||
return project_data[key]
|
||||
|
||||
# Fallback to 25 FPS
|
||||
return 25.0
|
||||
|
||||
|
||||
def get_project_data():
|
||||
"""Get the data of the current project
|
||||
|
||||
The data of the project can contain things like:
|
||||
resolution
|
||||
fps
|
||||
renderer
|
||||
|
||||
Returns:
|
||||
dict:
|
||||
|
||||
"""
|
||||
|
||||
project_name = io.active_project()
|
||||
project = io.find_one({"name": project_name,
|
||||
"type": "project"},
|
||||
projection={"data": True})
|
||||
|
||||
data = project.get("data", {})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_asset_data(asset=None):
|
||||
"""Get the data from the current asset
|
||||
|
||||
Args:
|
||||
asset(str, Optional): name of the asset, eg:
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
asset_name = asset or avalon.api.Session["AVALON_ASSET"]
|
||||
document = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
data = document.get("data", {})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_data_hierarchical_attr(entity, attr_name):
|
||||
vp_attr = 'visualParent'
|
||||
data = entity['data']
|
||||
value = data.get(attr_name, None)
|
||||
if value is not None:
|
||||
return value
|
||||
elif vp_attr in data:
|
||||
if data[vp_attr] is None:
|
||||
parent_id = entity['parent']
|
||||
else:
|
||||
parent_id = data[vp_attr]
|
||||
parent = io.find_one({"_id": parent_id})
|
||||
return get_data_hierarchical_attr(parent, attr_name)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_avalon_project_config_schema():
|
||||
schema = 'avalon-core:config-1.0'
|
||||
return schema
|
||||
|
||||
|
||||
def get_avalon_project_template_schema():
|
||||
schema = "avalon-core:project-2.0"
|
||||
return schema
|
||||
|
||||
|
||||
def get_avalon_project_template():
|
||||
from pypeapp import Anatomy
|
||||
|
||||
"""
|
||||
Get avalon template
|
||||
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
templates = Anatomy().templates
|
||||
proj_template = {}
|
||||
proj_template['workfile'] = templates["avalon"]["workfile"]
|
||||
proj_template['work'] = templates["avalon"]["work"]
|
||||
proj_template['publish'] = templates["avalon"]["publish"]
|
||||
return proj_template
|
||||
|
||||
|
||||
def get_avalon_asset_template_schema():
|
||||
schema = "avalon-core:asset-2.0"
|
||||
return schema
|
||||
pattern = re.compile(r"[\._]v([0-9]+)")
|
||||
try:
|
||||
return pattern.findall(file)[0]
|
||||
except IndexError:
|
||||
log.error(
|
||||
"templates:get_version_from_workfile:"
|
||||
"`{}` missing version string."
|
||||
"Example `v004`".format(file)
|
||||
)
|
||||
|
||||
|
||||
def get_avalon_database():
|
||||
|
|
@ -455,31 +442,20 @@ def get_avalon_database():
|
|||
|
||||
|
||||
def set_io_database():
|
||||
project = os.environ.get('AVALON_PROJECT', '')
|
||||
asset = os.environ.get('AVALON_ASSET', '')
|
||||
silo = os.environ.get('AVALON_SILO', '')
|
||||
os.environ['AVALON_PROJECT'] = project
|
||||
os.environ['AVALON_ASSET'] = asset
|
||||
os.environ['AVALON_SILO'] = silo
|
||||
required_keys = ["AVALON_PROJECT", "AVALON_ASSET", "AVALON_SILO"]
|
||||
for key in required_keys:
|
||||
os.environ[key] = os.environ.get(key, "")
|
||||
io.install()
|
||||
|
||||
|
||||
def get_all_avalon_projects():
|
||||
db = get_avalon_database()
|
||||
project_names = db.collection_names()
|
||||
projects = []
|
||||
for name in project_names:
|
||||
for name in db.collection_names():
|
||||
projects.append(db[name].find_one({'type': 'project'}))
|
||||
return projects
|
||||
|
||||
|
||||
def get_presets_path():
|
||||
templates = os.environ['PYPE_CONFIG']
|
||||
path_items = [templates, 'presets']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
return filepath
|
||||
|
||||
|
||||
def filter_pyblish_plugins(plugins):
|
||||
"""
|
||||
This servers as plugin filter / modifier for pyblish. It will load plugin
|
||||
|
|
@ -494,13 +470,29 @@ def filter_pyblish_plugins(plugins):
|
|||
|
||||
host = api.current_host()
|
||||
|
||||
presets = config.get_presets().get('plugins', {})
|
||||
|
||||
# iterate over plugins
|
||||
for plugin in plugins[:]:
|
||||
try:
|
||||
config_data = config.get_presets()['plugins'][host]["publish"][plugin.__name__] # noqa: E501
|
||||
except KeyError:
|
||||
# skip if there are no presets to process
|
||||
if not presets:
|
||||
continue
|
||||
|
||||
file = os.path.normpath(inspect.getsourcefile(plugin))
|
||||
file = os.path.normpath(file)
|
||||
|
||||
# host determined from path
|
||||
host_from_file = file.split(os.path.sep)[-3:-2][0]
|
||||
plugin_kind = file.split(os.path.sep)[-2:-1][0]
|
||||
|
||||
try:
|
||||
config_data = presets[host]["publish"][plugin.__name__]
|
||||
except KeyError:
|
||||
try:
|
||||
config_data = presets[host_from_file][plugin_kind][plugin.__name__] # noqa: E501
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
for option, value in config_data.items():
|
||||
if option == "enabled" and value is False:
|
||||
log.info('removing plugin {}'.format(plugin.__name__))
|
||||
|
|
@ -510,3 +502,73 @@ def filter_pyblish_plugins(plugins):
|
|||
option, value, plugin.__name__))
|
||||
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
def get_subsets(asset_name,
|
||||
regex_filter=None,
|
||||
version=None,
|
||||
representations=["exr", "dpx"]):
|
||||
"""
|
||||
Query subsets with filter on name.
|
||||
|
||||
The method will return all found subsets and its defined version and subsets. Version could be specified with number. Representation can be filtered.
|
||||
|
||||
Arguments:
|
||||
asset_name (str): asset (shot) name
|
||||
regex_filter (raw): raw string with filter pattern
|
||||
version (str or int): `last` or number of version
|
||||
representations (list): list for all representations
|
||||
|
||||
Returns:
|
||||
dict: subsets with version and representaions in keys
|
||||
"""
|
||||
from avalon import io
|
||||
|
||||
# query asset from db
|
||||
asset_io = io.find_one({"type": "asset",
|
||||
"name": asset_name})
|
||||
|
||||
# check if anything returned
|
||||
assert asset_io, "Asset not existing. \
|
||||
Check correct name: `{}`".format(asset_name)
|
||||
|
||||
# create subsets query filter
|
||||
filter_query = {"type": "subset", "parent": asset_io["_id"]}
|
||||
|
||||
# add reggex filter string into query filter
|
||||
if regex_filter:
|
||||
filter_query.update({"name": {"$regex": r"{}".format(regex_filter)}})
|
||||
else:
|
||||
filter_query.update({"name": {"$regex": r'.*'}})
|
||||
|
||||
# query all assets
|
||||
subsets = [s for s in io.find(filter_query)]
|
||||
|
||||
assert subsets, "No subsets found. Check correct filter. Try this for start `r'.*'`: asset: `{}`".format(asset_name)
|
||||
|
||||
output_dict = {}
|
||||
# Process subsets
|
||||
for subset in subsets:
|
||||
if not version:
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
else:
|
||||
assert isinstance(version, int), "version needs to be `int` type"
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": int(version)})
|
||||
|
||||
find_dict = {"type": "representation",
|
||||
"parent": version_sel["_id"]}
|
||||
|
||||
filter_repr = {"$or": [{"name": repr} for repr in representations]}
|
||||
|
||||
find_dict.update(filter_repr)
|
||||
repres_out = [i for i in io.find(find_dict)]
|
||||
|
||||
if len(repres_out) > 0:
|
||||
output_dict[subset["name"]] = {"version": version_sel,
|
||||
"representaions": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
|
|
|||
|
|
@ -280,8 +280,8 @@ def collect_animation_data():
|
|||
|
||||
# build attributes
|
||||
data = OrderedDict()
|
||||
data["startFrame"] = start
|
||||
data["endFrame"] = end
|
||||
data["frameStart"] = start
|
||||
data["frameEnd"] = end
|
||||
data["handles"] = 0
|
||||
data["step"] = 1.0
|
||||
data["fps"] = fps
|
||||
|
|
@ -1858,16 +1858,16 @@ def set_context_settings():
|
|||
|
||||
# Todo (Wijnand): apply renderer and resolution of project
|
||||
|
||||
project_data = lib.get_project_data()
|
||||
asset_data = lib.get_asset_data()
|
||||
project_data = lib.get_project()["data"]
|
||||
asset_data = lib.get_asset()["data"]
|
||||
|
||||
# Set project fps
|
||||
fps = asset_data.get("fps", project_data.get("fps", 25))
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Set project resolution
|
||||
width_key = "resolution_width"
|
||||
height_key = "resolution_height"
|
||||
width_key = "resolutionWidth"
|
||||
height_key = "resolutionHeight"
|
||||
|
||||
width = asset_data.get(width_key, project_data.get(width_key, 1920))
|
||||
height = asset_data.get(height_key, project_data.get(height_key, 1080))
|
||||
|
|
@ -1887,7 +1887,7 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = lib.get_asset_fps()
|
||||
fps = lib.get_asset()["data"]["fps"]
|
||||
current_fps = mel.eval('currentTimeUnitToFPS()') # returns float
|
||||
|
||||
if current_fps != fps:
|
||||
|
|
|
|||
|
|
@ -59,13 +59,14 @@ class NukeHandler(logging.Handler):
|
|||
|
||||
|
||||
'''Adding Nuke Logging Handler'''
|
||||
log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
nuke_handler = NukeHandler()
|
||||
if nuke_handler.get_name() \
|
||||
not in [handler.get_name()
|
||||
for handler in logging.root.handlers[:]]:
|
||||
logging.getLogger().addHandler(nuke_handler)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
|
@ -77,10 +78,7 @@ def reload_config():
|
|||
import importlib
|
||||
|
||||
for module in (
|
||||
"app",
|
||||
"app.api",
|
||||
"{}.api".format(AVALON_CONFIG),
|
||||
"{}.templates".format(AVALON_CONFIG),
|
||||
"{}.nuke.actions".format(AVALON_CONFIG),
|
||||
"{}.nuke.templates".format(AVALON_CONFIG),
|
||||
"{}.nuke.menu".format(AVALON_CONFIG),
|
||||
|
|
@ -96,9 +94,8 @@ def reload_config():
|
|||
|
||||
|
||||
def install():
|
||||
|
||||
# api.set_avalon_workdir()
|
||||
# reload_config()
|
||||
''' Installing all requarements for Nuke host
|
||||
'''
|
||||
|
||||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -107,7 +104,7 @@ def install():
|
|||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
|
|
@ -117,8 +114,6 @@ def install():
|
|||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
menu.install()
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
|
|
@ -126,16 +121,23 @@ def install():
|
|||
nuke.addOnCreate(launch_workfiles_app, nodeClass="Root")
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(lib.set_context_settings, nodeClass="Root")
|
||||
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
|
||||
|
||||
menu.install()
|
||||
|
||||
|
||||
|
||||
def launch_workfiles_app():
|
||||
'''Function letting start workfiles after start of host
|
||||
'''
|
||||
if not self.workfiles_launched:
|
||||
self.workfiles_launched = True
|
||||
workfiles.show(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
|
||||
def uninstall():
|
||||
'''Uninstalling host's integration
|
||||
'''
|
||||
log.info("Deregistering Nuke plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
|
|
@ -144,8 +146,13 @@ def uninstall():
|
|||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
reload_config()
|
||||
menu.uninstall()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node passthrough states on instance toggles."""
|
||||
|
||||
log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
|
||||
instance, old_value, new_value))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from avalon.nuke.lib import (
|
||||
|
|
@ -12,7 +9,7 @@ from ..action import get_errored_instances_from_context
|
|||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
"""Select invalid nodes in Nuke when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
|
|
|||
1035
pype/nuke/lib.py
1035
pype/nuke/lib.py
File diff suppressed because it is too large
Load diff
|
|
@ -2,21 +2,25 @@ import nuke
|
|||
from avalon.api import Session
|
||||
|
||||
from pype.nuke import lib
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "nuke")
|
||||
|
||||
def install():
|
||||
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.findItem(Session["AVALON_LABEL"])
|
||||
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# replace reset resolution from avalon core to pype's
|
||||
name = "Reset Resolution"
|
||||
new_name = "Set Resolution"
|
||||
rm_item = [
|
||||
(i, item) for i, item in enumerate(menu.items()) if name in item.name()
|
||||
][0]
|
||||
|
||||
log.debug("Changing Item: {}".format(rm_item))
|
||||
# rm_item[1].setEnabled(False)
|
||||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(new_name, lib.reset_resolution, index=rm_item[0])
|
||||
menu.addCommand(new_name, workfile_settings.reset_resolution, index=(rm_item[0]))
|
||||
|
||||
# replace reset frame range from avalon core to pype's
|
||||
name = "Reset Frame Range"
|
||||
|
|
@ -24,18 +28,41 @@ def install():
|
|||
rm_item = [
|
||||
(i, item) for i, item in enumerate(menu.items()) if name in item.name()
|
||||
][0]
|
||||
log.debug("Changing Item: {}".format(rm_item))
|
||||
# rm_item[1].setEnabled(False)
|
||||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(new_name, lib.reset_frame_range_handles, index=rm_item[0])
|
||||
menu.addCommand(new_name, workfile_settings.reset_frame_range_handles, index=(rm_item[0]))
|
||||
|
||||
# add colorspace menu item
|
||||
name = "Set colorspace"
|
||||
menu.addCommand(
|
||||
name, lib.set_colorspace,
|
||||
name, workfile_settings.set_colorspace,
|
||||
index=(rm_item[0]+2)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
# add workfile builder menu item
|
||||
name = "Build First Workfile.."
|
||||
menu.addCommand(
|
||||
name, lib.BuildWorkfile().process,
|
||||
index=(rm_item[0]+7)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
# add item that applies all setting above
|
||||
name = "Apply all settings"
|
||||
menu.addCommand(
|
||||
name, lib.set_context_settings, index=(rm_item[0]+3)
|
||||
name, workfile_settings.set_context_settings, index=(rm_item[0]+3)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.findItem(Session["AVALON_LABEL"])
|
||||
|
||||
for item in menu.items():
|
||||
log.info("Removing menu item: {}".format(item.name()))
|
||||
menu.removeItem(item.name())
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ def get_colorspace_preset():
|
|||
|
||||
|
||||
def get_node_dataflow_preset(**kwarg):
|
||||
''' Get preset data for dataflow (fileType, compression, bitDepth)
|
||||
'''
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
|
|
@ -39,6 +41,8 @@ def get_node_dataflow_preset(**kwarg):
|
|||
|
||||
|
||||
def get_node_colorspace_preset(**kwarg):
|
||||
''' Get preset data for colorspace
|
||||
'''
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,42 @@
|
|||
import os
|
||||
|
||||
from avalon.tools import workfiles
|
||||
from pypeapp import Logger
|
||||
import hiero
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from .. import api
|
||||
from .workio import (
|
||||
open,
|
||||
save,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
from .menu import (
|
||||
install as menu_install,
|
||||
_update_menu_task_label
|
||||
)
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
import hiero
|
||||
__all__ = [
|
||||
# Workfiles API
|
||||
"open",
|
||||
"save",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
]
|
||||
|
||||
# get logger
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
|
||||
''' Creating all important host related variables '''
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
|
||||
# plugin root path
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
|
@ -28,13 +46,21 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "inventory")
|
||||
|
||||
|
||||
# registering particular pyblish gui but `lite` is recomended!!
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
def install(config):
|
||||
"""
|
||||
Installing Nukestudio integration for avalon
|
||||
|
||||
Args:
|
||||
config (obj): avalon config module `pype` in our case, it is not used but required by avalon.api.install()
|
||||
|
||||
"""
|
||||
|
||||
# adding all events
|
||||
_register_events()
|
||||
|
||||
log.info("Registering NukeStudio plug-ins..")
|
||||
|
|
@ -53,6 +79,7 @@ def install(config):
|
|||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# install menu
|
||||
menu_install()
|
||||
|
||||
# Workfiles.
|
||||
|
|
@ -70,11 +97,26 @@ def install(config):
|
|||
|
||||
|
||||
def add_tags(event):
|
||||
"""
|
||||
Event for automatic tag creation after nukestudio start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
"""
|
||||
|
||||
add_tags_from_presets()
|
||||
|
||||
|
||||
def launch_workfiles_app(event):
|
||||
workfiles.show(os.environ["AVALON_WORKDIR"])
|
||||
"""
|
||||
Event for launching workfiles after nukestudio start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
"""
|
||||
from .lib import set_workfiles
|
||||
|
||||
set_workfiles()
|
||||
|
||||
# Closing the new project.
|
||||
event.sender.close()
|
||||
|
|
@ -86,6 +128,10 @@ def launch_workfiles_app(event):
|
|||
|
||||
|
||||
def uninstall():
|
||||
"""
|
||||
Uninstalling Nukestudio integration for avalon
|
||||
|
||||
"""
|
||||
log.info("Deregistering NukeStudio plug-ins..")
|
||||
pyblish.deregister_host("nukestudio")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -94,6 +140,11 @@ def uninstall():
|
|||
|
||||
|
||||
def _register_events():
|
||||
"""
|
||||
Adding all callbacks.
|
||||
"""
|
||||
|
||||
# if task changed then change notext of nukestudio
|
||||
avalon.on("taskChanged", _update_menu_task_label)
|
||||
log.info("Installed event callback for 'taskChanged'..")
|
||||
|
||||
|
|
@ -108,4 +159,5 @@ def ls():
|
|||
See the `container.json` schema for details on how it should look,
|
||||
and the Maya equivalent, which is in `avalon.maya.pipeline`
|
||||
"""
|
||||
# TODO: listing all availabe containers form sequence
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,19 +1,13 @@
|
|||
# Standard library
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Pyblish libraries
|
||||
import pyblish.api
|
||||
|
||||
import avalon.api as avalon
|
||||
import pype.api as pype
|
||||
|
||||
from avalon.vendor.Qt import (QtWidgets, QtGui)
|
||||
|
||||
# Host libraries
|
||||
import hiero
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api as avalon
|
||||
from avalon.vendor.Qt import (QtWidgets, QtGui)
|
||||
import pype.api as pype
|
||||
from pypeapp import Logger
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
cached_process = None
|
||||
|
|
@ -30,12 +24,18 @@ AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
|||
def set_workfiles():
|
||||
''' Wrapping function for workfiles launcher '''
|
||||
from avalon.tools import workfiles
|
||||
|
||||
# import session to get project dir
|
||||
S = avalon.Session
|
||||
active_project_root = os.path.normpath(
|
||||
os.path.join(S['AVALON_PROJECTS'], S['AVALON_PROJECT'])
|
||||
)
|
||||
workdir = os.environ["AVALON_WORKDIR"]
|
||||
|
||||
# show workfile gui
|
||||
workfiles.show(workdir)
|
||||
|
||||
# getting project
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
# set project root with backward compatibility
|
||||
|
|
@ -46,14 +46,14 @@ def set_workfiles():
|
|||
project.setProjectRoot(active_project_root)
|
||||
|
||||
# get project data from avalon db
|
||||
project_data = pype.get_project_data()
|
||||
project_data = pype.get_project()["data"]
|
||||
|
||||
log.info("project_data: {}".format(project_data))
|
||||
|
||||
# get format and fps property from avalon db on project
|
||||
width = project_data['resolution_width']
|
||||
height = project_data['resolution_height']
|
||||
pixel_aspect = project_data['pixel_aspect']
|
||||
width = project_data["resolutionWidth"]
|
||||
height = project_data["resolutionHeight"]
|
||||
pixel_aspect = project_data["pixelAspect"]
|
||||
fps = project_data['fps']
|
||||
format_name = project_data['code']
|
||||
|
||||
|
|
@ -64,11 +64,10 @@ def set_workfiles():
|
|||
# set fps to hiero project
|
||||
project.setFramerate(fps)
|
||||
|
||||
# TODO: add auto colorspace set from project drop
|
||||
log.info("Project property has been synchronised with Avalon db")
|
||||
|
||||
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
|
|
@ -189,6 +188,10 @@ def add_submission():
|
|||
|
||||
|
||||
class PublishAction(QtWidgets.QAction):
|
||||
"""
|
||||
Action with is showing as menu item
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
QtWidgets.QAction.__init__(self, "Publish", None)
|
||||
self.triggered.connect(self.publish)
|
||||
|
|
@ -213,7 +216,8 @@ class PublishAction(QtWidgets.QAction):
|
|||
|
||||
|
||||
def _show_no_gui():
|
||||
"""Popup with information about how to register a new GUI
|
||||
"""
|
||||
Popup with information about how to register a new GUI
|
||||
In the event of no GUI being registered or available,
|
||||
this information dialog will appear to guide the user
|
||||
through how to get set up with one.
|
||||
|
|
@ -283,3 +287,59 @@ def _show_no_gui():
|
|||
|
||||
messagebox.setStandardButtons(messagebox.Ok)
|
||||
messagebox.exec_()
|
||||
|
||||
|
||||
def CreateNukeWorkfile(nodes=None,
|
||||
nodes_effects=None,
|
||||
to_timeline=False,
|
||||
**kwargs):
|
||||
''' Creating nuke workfile with particular version with given nodes
|
||||
Also it is creating timeline track items as precomps.
|
||||
|
||||
Arguments:
|
||||
nodes(list of dict): each key in dict is knob order is important
|
||||
to_timeline(type): will build trackItem with metadata
|
||||
|
||||
Returns:
|
||||
bool: True if done
|
||||
|
||||
Raises:
|
||||
Exception: with traceback
|
||||
|
||||
'''
|
||||
import hiero.core
|
||||
from avalon.nuke import imprint
|
||||
from pype.nuke import (
|
||||
lib as nklib
|
||||
)
|
||||
|
||||
# check if the file exists if does then Raise "File exists!"
|
||||
if os.path.exists(filepath):
|
||||
raise FileExistsError("File already exists: `{}`".format(filepath))
|
||||
|
||||
# if no representations matching then
|
||||
# Raise "no representations to be build"
|
||||
if len(representations) == 0:
|
||||
raise AttributeError("Missing list of `representations`")
|
||||
|
||||
# check nodes input
|
||||
if len(nodes) == 0:
|
||||
log.warning("Missing list of `nodes`")
|
||||
|
||||
# create temp nk file
|
||||
nuke_script = hiero.core.nuke.ScriptWriter()
|
||||
|
||||
# create root node and save all metadata
|
||||
root_node = hiero.core.nuke.RootNode()
|
||||
|
||||
root_path = os.environ["AVALON_PROJECTS"]
|
||||
|
||||
nuke_script.addNode(root_node)
|
||||
|
||||
# here to call pype.nuke.lib.BuildWorkfile
|
||||
script_builder = nklib.BuildWorkfile(
|
||||
root_node=root_node,
|
||||
root_path=root_path,
|
||||
nodes=nuke_script.getNodes(),
|
||||
**kwargs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,23 @@
|
|||
import os
|
||||
import sys
|
||||
import hiero.core
|
||||
from pypeapp import Logger
|
||||
from avalon.api import Session
|
||||
from hiero.ui import findMenuAction
|
||||
|
||||
# this way we secure compatibility between nuke 10 and 11
|
||||
try:
|
||||
from PySide.QtGui import *
|
||||
except Exception:
|
||||
from PySide2.QtGui import *
|
||||
from PySide2.QtWidgets import *
|
||||
|
||||
from hiero.ui import findMenuAction
|
||||
|
||||
from avalon.api import Session
|
||||
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
from .lib import (
|
||||
reload_config,
|
||||
set_workfiles
|
||||
)
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
|
|
@ -45,6 +44,11 @@ def _update_menu_task_label(*args):
|
|||
|
||||
|
||||
def install():
|
||||
"""
|
||||
Installing menu into Nukestudio
|
||||
|
||||
"""
|
||||
|
||||
# here is the best place to add menu
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
|
|
@ -127,8 +131,6 @@ def install():
|
|||
'icon': QIcon('icons:ColorAdd.png')
|
||||
}]
|
||||
|
||||
|
||||
|
||||
# Create menu items
|
||||
for a in actions:
|
||||
add_to_menu = menu
|
||||
|
|
|
|||
|
|
@ -14,9 +14,9 @@ def create_nk_script_clips(script_lst, seq=None):
|
|||
'handles': 10,
|
||||
'handleStart': 15, # added asymetrically to handles
|
||||
'handleEnd': 10, # added asymetrically to handles
|
||||
'timelineIn': 16,
|
||||
'startFrame': 991,
|
||||
'endFrame': 1023,
|
||||
"clipIn": 16,
|
||||
"frameStart": 991,
|
||||
"frameEnd": 1023,
|
||||
'task': 'Comp-tracking',
|
||||
'work_dir': 'VFX_PR',
|
||||
'shot': '00010'
|
||||
|
|
@ -55,12 +55,12 @@ def create_nk_script_clips(script_lst, seq=None):
|
|||
if media_in:
|
||||
source_in = media_in + handle_start
|
||||
else:
|
||||
source_in = nk['startFrame'] + handle_start
|
||||
source_in = nk["frameStart"] + handle_start
|
||||
|
||||
if media_duration:
|
||||
source_out = (media_in + media_duration - 1) - handle_end
|
||||
else:
|
||||
source_out = nk['endFrame'] - handle_end
|
||||
source_out = nk["frameEnd"] - handle_end
|
||||
|
||||
print("__ media: `{}`".format(media))
|
||||
print("__ media_in: `{}`".format(media_in))
|
||||
|
|
@ -98,8 +98,8 @@ def create_nk_script_clips(script_lst, seq=None):
|
|||
trackItem.setSourceIn(source_in)
|
||||
trackItem.setSourceOut(source_out)
|
||||
trackItem.setSourceIn(source_in)
|
||||
trackItem.setTimelineIn(nk['timelineIn'])
|
||||
trackItem.setTimelineOut(nk['timelineIn'] + (source_out - source_in))
|
||||
trackItem.setTimelineIn(nk["clipIn"])
|
||||
trackItem.setTimelineOut(nk["clipIn"] + (source_out - source_in))
|
||||
track.addTrackItem(trackItem)
|
||||
track.addTrackItem(trackItem)
|
||||
clips_lst.append(trackItem)
|
||||
|
|
@ -179,9 +179,9 @@ script_lst = [{
|
|||
'handles': 10,
|
||||
'handleStart': 10,
|
||||
'handleEnd': 10,
|
||||
'timelineIn': 16,
|
||||
'startFrame': 991,
|
||||
'endFrame': 1023,
|
||||
"clipIn": 16,
|
||||
"frameStart": 991,
|
||||
"frameEnd": 1023,
|
||||
'task': 'platesMain',
|
||||
'work_dir': 'shots',
|
||||
'shot': '120sh020'
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import re
|
||||
import os
|
||||
import hiero
|
||||
|
||||
from pypeapp import (
|
||||
config,
|
||||
|
|
@ -7,8 +8,6 @@ from pypeapp import (
|
|||
)
|
||||
from avalon import io
|
||||
|
||||
import hiero
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
|
||||
|
|
|
|||
44
pype/nukestudio/workio.py
Normal file
44
pype/nukestudio/workio.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import os
|
||||
|
||||
import hiero
|
||||
|
||||
from avalon import api
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [".hrox"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
# There are no methods for querying unsaved changes to a project, so
|
||||
# enforcing to always save.
|
||||
return True
|
||||
|
||||
|
||||
def save(filepath):
|
||||
project = hiero.core.projects()[-1]
|
||||
if project:
|
||||
project.saveAs(filepath)
|
||||
else:
|
||||
project = hiero.core.newProject()
|
||||
project.saveAs(filepath)
|
||||
|
||||
|
||||
def open(filepath):
|
||||
hiero.core.openProject(filepath)
|
||||
return True
|
||||
|
||||
|
||||
def current_file():
|
||||
current_file = hiero.core.projects()[-1].path()
|
||||
normalised = os.path.normpath(current_file)
|
||||
|
||||
# Unsaved current file
|
||||
if normalised == "":
|
||||
return None
|
||||
|
||||
return normalised
|
||||
|
||||
|
||||
def work_root():
|
||||
return os.path.normpath(api.Session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
@ -2,13 +2,54 @@ import tempfile
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from pypeapp import config
|
||||
import inspect
|
||||
|
||||
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
|
||||
ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
|
||||
ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2
|
||||
ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3
|
||||
|
||||
|
||||
class Extractor(pyblish.api.InstancePlugin):
|
||||
def imprint_attributes(plugin):
|
||||
"""
|
||||
Load presets by class and set them as attributes (if found)
|
||||
|
||||
:param plugin: plugin instance
|
||||
:type plugin: instance
|
||||
"""
|
||||
file = inspect.getfile(plugin.__class__)
|
||||
file = os.path.normpath(file)
|
||||
plugin_kind = file.split(os.path.sep)[-2:-1][0]
|
||||
plugin_host = file.split(os.path.sep)[-3:-2][0]
|
||||
plugin_name = type(plugin).__name__
|
||||
try:
|
||||
config_data = config.get_presets()['plugins'][plugin_host][plugin_kind][plugin_name] # noqa: E501
|
||||
except KeyError:
|
||||
print("preset not found")
|
||||
return
|
||||
|
||||
for option, value in config_data.items():
|
||||
if option == "enabled" and value is False:
|
||||
setattr(plugin, "active", False)
|
||||
else:
|
||||
setattr(plugin, option, value)
|
||||
print("setting {}: {} on {}".format(option, value, plugin_name))
|
||||
|
||||
|
||||
class ContextPlugin(pyblish.api.ContextPlugin):
|
||||
def process(cls, *args, **kwargs):
|
||||
imprint_attributes(cls)
|
||||
super(ContextPlugin, cls).process(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class InstancePlugin(pyblish.api.InstancePlugin):
|
||||
def process(cls, *args, **kwargs):
|
||||
imprint_attributes(cls)
|
||||
super(ContextPlugin, cls).process(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class Extractor(InstancePlugin):
|
||||
"""Extractor base class.
|
||||
|
||||
The extractor base class implements a "staging_dir" function used to
|
||||
|
|
|
|||
|
|
@ -87,13 +87,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
context.data["currentFile"] = current_file
|
||||
|
||||
# get project data from avalon
|
||||
project_data = pype.get_project_data()
|
||||
project_data = pype.get_project()["data"]
|
||||
assert project_data, "No `project_data` data in avalon db"
|
||||
context.data["projectData"] = project_data
|
||||
self.log.debug("project_data: {}".format(project_data))
|
||||
|
||||
# get asset data from avalon and fix all paths
|
||||
asset_data = pype.get_asset_data()
|
||||
asset_data = pype.get_asset()["data"]
|
||||
assert asset_data, "No `asset_data` data in avalon db"
|
||||
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
|
||||
if isinstance(v, str)}
|
||||
|
|
|
|||
|
|
@ -39,19 +39,18 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
assert instances_data, "No `asset_default` data in json file"
|
||||
|
||||
asset_name = a_session["AVALON_ASSET"]
|
||||
entity = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
entity = pype.get_asset(asset_name)
|
||||
|
||||
# get frame start > first try from asset data
|
||||
frame_start = context.data["assetData"].get("fstart", None)
|
||||
frame_start = context.data["assetData"].get("frameStart", None)
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on assetData")
|
||||
# get frame start > second try from parent data
|
||||
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
|
||||
frame_start = entity["data"]["frameStart"]
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on any parent entity")
|
||||
# get frame start > third try from parent data
|
||||
frame_start = asset_default["fstart"]
|
||||
frame_start = asset_default["frameStart"]
|
||||
|
||||
assert frame_start, "No `frame_start` data found, "
|
||||
"please set `fstart` on asset"
|
||||
|
|
@ -61,7 +60,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
handles = context.data["assetData"].get("handles", None)
|
||||
if not handles:
|
||||
# get frame start > second try from parent data
|
||||
handles = pype.get_data_hierarchical_attr(entity, "handles")
|
||||
handles = entity["data"]["handles"]
|
||||
if not handles:
|
||||
# get frame start > third try from parent data
|
||||
handles = asset_default["handles"]
|
||||
|
|
@ -129,7 +128,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"task": task,
|
||||
"fstart": frame_start,
|
||||
"frameStart": frame_start,
|
||||
"handles": handles,
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
|
|
|
|||
|
|
@ -76,11 +76,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
)
|
||||
else:
|
||||
end_frame += (
|
||||
instance.data['endFrame'] - instance.data['startFrame']
|
||||
instance.data["frameEnd"] - instance.data["frameStart"]
|
||||
)
|
||||
|
||||
if not comp.get('frameRate'):
|
||||
comp['frameRate'] = instance.context.data['fps']
|
||||
if not comp.get('fps'):
|
||||
comp['fps'] = instance.context.data['fps']
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.server', ft_session
|
||||
)
|
||||
|
|
@ -90,7 +90,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"metadata": {'ftr_meta': json.dumps({
|
||||
'frameIn': int(start_frame),
|
||||
'frameOut': int(end_frame),
|
||||
'frameRate': float(comp['frameRate'])})}
|
||||
'frameRate': float(comp['fps'])})}
|
||||
}
|
||||
comp['thumbnail'] = False
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -106,11 +106,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
tasks_to_create = []
|
||||
for child in entity['children']:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'])
|
||||
existing_tasks.append(child['name'].lower())
|
||||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task in existing_tasks:
|
||||
if task.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ class FusionSetFrameRangeLoader(api.Loader):
|
|||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
start = version_data.get("frameStart", None)
|
||||
end = version_data.get("frameEnd", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print("Skipping setting frame range because start or "
|
||||
|
|
@ -60,8 +60,8 @@ class FusionSetFrameRangeWithHandlesLoader(api.Loader):
|
|||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
start = version_data.get("startFrame", None)
|
||||
end = version_data.get("endFrame", None)
|
||||
start = version_data.get("frameStart", None)
|
||||
end = version_data.get("frameEnd", None)
|
||||
|
||||
if start is None or end is None:
|
||||
print("Skipping setting frame range because start or "
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ class FusionLoadSequence(api.Loader):
|
|||
tool["Clip"] = path
|
||||
|
||||
# Set global in point to start frame (if in version.data)
|
||||
start = context["version"]["data"].get("startFrame", None)
|
||||
start = context["version"]["data"].get("frameStart", None)
|
||||
if start is not None:
|
||||
loader_shift(tool, start, relative=False)
|
||||
|
||||
|
|
@ -175,7 +175,7 @@ class FusionLoadSequence(api.Loader):
|
|||
been set.
|
||||
|
||||
- GlobalIn: Fusion reset to comp's global in if duration changes
|
||||
- We change it to the "startFrame"
|
||||
- We change it to the "frameStart"
|
||||
|
||||
- GlobalEnd: Fusion resets to globalIn + length if duration changes
|
||||
- We do the same like Fusion - allow fusion to take control.
|
||||
|
|
@ -212,7 +212,7 @@ class FusionLoadSequence(api.Loader):
|
|||
# Get start frame from version data
|
||||
version = io.find_one({"type": "version",
|
||||
"_id": representation["parent"]})
|
||||
start = version["data"].get("startFrame")
|
||||
start = version["data"].get("frameStart")
|
||||
if start is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"""Collect Fusion saver instances
|
||||
|
||||
This additionally stores the Comp start and end render range in the
|
||||
current context's data as "startFrame" and "endFrame".
|
||||
current context's data as "frameStart" and "frameEnd".
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -43,8 +43,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
savers = [tool for tool in tools if tool.ID == "Saver"]
|
||||
|
||||
start, end = get_comp_render_range(comp)
|
||||
context.data["startFrame"] = start
|
||||
context.data["endFrame"] = end
|
||||
context.data["frameStart"] = start
|
||||
context.data["frameEnd"] = end
|
||||
|
||||
for tool in savers:
|
||||
path = tool["Clip"][comp.TIME_UNDEFINED]
|
||||
|
|
|
|||
|
|
@ -53,8 +53,8 @@ class PublishImageSequence(pyblish.api.InstancePlugin):
|
|||
# The instance has most of the information already stored
|
||||
metadata = {
|
||||
"regex": regex,
|
||||
"startFrame": instance.context.data["startFrame"],
|
||||
"endFrame": instance.context.data["endFrame"],
|
||||
"frameStart": instance.context.data["frameStart"],
|
||||
"frameEnd": instance.context.data["frameEnd"],
|
||||
"families": ["imagesequence"],
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -79,8 +79,8 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
"Plugin": "Fusion",
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(context.data["startFrame"]),
|
||||
end=int(context.data["endFrame"])
|
||||
start=int(context.data["frameStart"]),
|
||||
end=int(context.data["frameEnd"])
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
|
|
|
|||
|
|
@ -1,22 +1,15 @@
|
|||
import os
|
||||
import subprocess
|
||||
import json
|
||||
from pype import lib as pypelib
|
||||
from pypeapp import config
|
||||
from avalon import api
|
||||
|
||||
|
||||
def get_config_data():
|
||||
path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
data = dict()
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
return data
|
||||
|
||||
|
||||
def get_families():
|
||||
families = []
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
"djv_paths", []
|
||||
)
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
families.append("*")
|
||||
|
|
@ -25,13 +18,15 @@ def get_families():
|
|||
|
||||
|
||||
def get_representation():
|
||||
return get_config_data().get('file_ext', [])
|
||||
return config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
'file_ext', []
|
||||
)
|
||||
|
||||
|
||||
class OpenInDJV(api.Loader):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
config_data = get_config_data()
|
||||
config_data = config.get_presets().get("djv_view", {}).get("config", {})
|
||||
families = get_families()
|
||||
representations = get_representation()
|
||||
|
||||
|
|
@ -42,7 +37,9 @@ class OpenInDJV(api.Loader):
|
|||
|
||||
def load(self, context, name, namespace, data):
|
||||
self.djv_path = None
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
"djv_paths", []
|
||||
)
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
self.djv_path = path
|
||||
|
|
|
|||
|
|
@ -67,9 +67,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
if isinstance(component['files'], list):
|
||||
collections, remainder = clique.assemble(component['files'])
|
||||
self.log.debug("collecting sequence: {}".format(collections))
|
||||
instance.data['startFrame'] = int(component['startFrame'])
|
||||
instance.data['endFrame'] = int(component['endFrame'])
|
||||
instance.data['frameRate'] = int(component['frameRate'])
|
||||
instance.data["frameStart"] = int(component["frameStart"])
|
||||
instance.data["frameEnd"] = int(component["frameEnd"])
|
||||
instance.data['fps'] = int(component['fps'])
|
||||
|
||||
instance.data["representations"].append(component)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,14 +6,13 @@ from pprint import pformat
|
|||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
def collect(root,
|
||||
regex=None,
|
||||
exclude_regex=None,
|
||||
startFrame=None,
|
||||
endFrame=None):
|
||||
frame_start=None,
|
||||
frame_end=None):
|
||||
"""Collect sequence collections in root"""
|
||||
|
||||
from avalon.vendor import clique
|
||||
|
|
@ -52,10 +51,10 @@ def collect(root,
|
|||
# Exclude any frames outside start and end frame.
|
||||
for collection in collections:
|
||||
for index in list(collection.indexes):
|
||||
if startFrame is not None and index < startFrame:
|
||||
if frame_start is not None and index < frame_start:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
if endFrame is not None and index > endFrame:
|
||||
if frame_end is not None and index > frame_end:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
|
||||
|
|
@ -77,8 +76,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
api.Session["AVALON_ASSET"]
|
||||
subset (str): The subset to publish to. If not provided the sequence's
|
||||
head (up to frame number) will be used.
|
||||
startFrame (int): The start frame for the sequence
|
||||
endFrame (int): The end frame for the sequence
|
||||
frame_start (int): The start frame for the sequence
|
||||
frame_end (int): The end frame for the sequence
|
||||
root (str): The path to collect from (can be relative to the .json)
|
||||
regex (str): A regex for the sequence filename
|
||||
exclude_regex (str): A regex for filename to exclude from collection
|
||||
|
|
@ -143,8 +142,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
collections = collect(root=root,
|
||||
regex=regex,
|
||||
exclude_regex=data.get("exclude_regex"),
|
||||
startFrame=data.get("startFrame"),
|
||||
endFrame=data.get("endFrame"))
|
||||
frame_start=data.get("frameStart"),
|
||||
frame_end=data.get("frameEnd"))
|
||||
|
||||
self.log.info("Found collections: {}".format(collections))
|
||||
|
||||
|
|
@ -179,8 +178,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("startFrame", indices[0])
|
||||
end = data.get("endFrame", indices[-1])
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
# root = os.path.normpath(root)
|
||||
# self.log.info("Source: {}}".format(data.get("source", "")))
|
||||
|
|
@ -194,8 +193,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"subset": subset,
|
||||
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
})
|
||||
|
|
@ -211,7 +210,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
'files': list(collection),
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"frameRate": fps,
|
||||
"fps": fps,
|
||||
"tags": ['review']
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
from pype import lib as pypelib
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
||||
|
|
@ -12,13 +12,5 @@ class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
|||
hosts = ["shell"]
|
||||
|
||||
def process(self, context):
|
||||
config_items = [
|
||||
pypelib.get_presets_path(),
|
||||
"ftrack",
|
||||
"output_representation.json"
|
||||
]
|
||||
config_file = os.path.sep.join(config_items)
|
||||
with open(config_file) as data_file:
|
||||
config_data = json.load(data_file)
|
||||
|
||||
config_data = config.get_presets()["ftrack"]["output_representation"]
|
||||
context.data['output_repre_config'] = config_data
|
||||
|
|
|
|||
|
|
@ -12,6 +12,6 @@ class CollectProjectData(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
# get project data from avalon db
|
||||
context.data["projectData"] = pype.get_project_data()
|
||||
context.data["projectData"] = pype.get_project()["data"]
|
||||
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import subprocess
|
||||
import pype.api
|
||||
import json
|
||||
|
||||
import pype.api
|
||||
import pyblish
|
||||
|
||||
|
||||
|
|
@ -33,7 +33,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"username": instance.context.data['user'],
|
||||
"asset": os.environ['AVALON_ASSET'],
|
||||
"task": os.environ['AVALON_TASK'],
|
||||
"start_frame": int(instance.data['startFrame']),
|
||||
"start_frame": int(instance.data["frameStart"]),
|
||||
"version": version
|
||||
}
|
||||
self.log.debug("__ prep_data: {}".format(prep_data))
|
||||
|
|
@ -92,31 +92,21 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
|
||||
self.log.debug("__ EXE: {}".format(executable))
|
||||
|
||||
try:
|
||||
args = [executable, scriptpath, json_data]
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
args = [executable, scriptpath, json_data]
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
output = pype.api.subprocess(args)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
# Explicitly passing the environment, because there are cases
|
||||
# where enviroment is not inherited.
|
||||
p = subprocess.Popen(args, env=os.environ)
|
||||
p.wait()
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
}
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
|
||||
if not os.path.isfile(full_burnin_path):
|
||||
raise RuntimeError("File not existing: {}".format(full_burnin_path))
|
||||
except Exception as e:
|
||||
raise RuntimeError("Burnin script didn't work: `{}`".format(e))
|
||||
|
||||
if os.path.exists(full_burnin_path):
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
}
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
|
||||
# removing the source mov file
|
||||
os.remove(full_movie_path)
|
||||
self.log.debug("Removed: `{}`".format(full_movie_path))
|
||||
# removing the source mov file
|
||||
os.remove(full_movie_path)
|
||||
self.log.debug("Removed: `{}`".format(full_movie_path))
|
||||
|
||||
# Remove any representations tagged for deletion.
|
||||
for repre in instance.data["representations"]:
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ import pyblish.api
|
|||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""Create entities in Avalon based on collected data."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = "Integrate Hierarchy To Avalon"
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract Hierarchy To Avalon"
|
||||
families = ["clip", "shot"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import pype.api
|
||||
|
||||
|
||||
class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,9 +21,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
start = instance.data.get("startFrame")
|
||||
start = instance.data.get("frameStart")
|
||||
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
|
|
@ -59,8 +59,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
sub_proc = subprocess.Popen(subprocess_jpeg)
|
||||
sub_proc.wait()
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import pype.api
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
|
|
@ -29,7 +30,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("startFrame")
|
||||
start_frame = inst_data.get("frameStart")
|
||||
|
||||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
|
||||
|
|
@ -86,7 +87,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
repre_new = repre.copy()
|
||||
|
||||
new_tags = tags[:]
|
||||
new_tags = [x for x in tags if x != "delete"]
|
||||
p_tags = profile.get('tags', [])
|
||||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
|
|
@ -169,22 +170,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
subprcs_cmd = " ".join(mov_args)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprcs_cmd))
|
||||
sub_proc = subprocess.Popen(
|
||||
subprcs_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.dirname(output_args[-1])
|
||||
)
|
||||
|
||||
output = sub_proc.communicate()[0]
|
||||
|
||||
if not os.path.isfile(full_output_path):
|
||||
raise ValueError(
|
||||
"Quicktime wasn't created succesfully: "
|
||||
"{}".format(output)
|
||||
)
|
||||
self.log.debug("Executing: {}".format(subprcs_cmd))
|
||||
output = pype.api.subprocess(subprcs_cmd)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
# create representation data
|
||||
repre_new.update({
|
||||
|
|
@ -200,16 +188,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
repre_new.pop("thumbnail")
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(repre_new))
|
||||
representations_new.append(repre_new)
|
||||
# if "delete" in tags:
|
||||
# if "mov" in full_input_path:
|
||||
# os.remove(full_input_path)
|
||||
# self.log.debug("Removed: `{}`".format(full_input_path))
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
for repre in representations_new:
|
||||
if "delete" in repre.get("tags", []):
|
||||
representations_new.remove(repre)
|
||||
|
||||
self.log.debug(
|
||||
"new representations: {}".format(representations_new))
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -404,7 +404,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
"startFrame", "endFrame", "step", "handles", "sourceHashes"
|
||||
"frameStart", "frameEnd", "step", "handles", "sourceHashes"
|
||||
]
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
|
|
|
|||
|
|
@ -36,9 +36,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
template from anatomy that should be used for
|
||||
integrating this file. Only the first level can
|
||||
be specified right now.
|
||||
'startFrame'
|
||||
'endFrame'
|
||||
'framerate'
|
||||
"frameStart"
|
||||
"frameEnd"
|
||||
'fps'
|
||||
"""
|
||||
|
||||
label = "Integrate Asset New"
|
||||
|
|
@ -63,6 +63,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"lut",
|
||||
"audio"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
|
@ -303,10 +304,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst_tail = dst_collection.format("{tail}")
|
||||
|
||||
index_frame_start = None
|
||||
if repre.get('startFrame'):
|
||||
if repre.get("frameStart"):
|
||||
frame_start_padding = len(str(
|
||||
repre.get('endFrame')))
|
||||
index_frame_start = repre.get('startFrame')
|
||||
repre.get("frameEnd")))
|
||||
index_frame_start = repre.get("frameStart")
|
||||
|
||||
dst_padding_exp = src_padding_exp
|
||||
for i in src_collection.indexes:
|
||||
|
|
@ -410,6 +411,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
Args:
|
||||
instance: the instance to integrate
|
||||
"""
|
||||
transfers = instance.data.get("transfers", list())
|
||||
|
||||
for src, dest in transfers:
|
||||
if os.path.normpath(src) != os.path.normpath(dest):
|
||||
self.copy_file(src, dest)
|
||||
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for src, dest in transfers:
|
||||
self.copy_file(src, dest)
|
||||
|
||||
# Produce hardlinked copies
|
||||
# Note: hardlink can only be produced between two files on the same
|
||||
|
|
@ -544,8 +554,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
"startFrame", "endFrame", "step", "handles",
|
||||
"handle_end", "handle_start", "sourceHashes"
|
||||
"frameStart", "frameEnd", "step", "handles",
|
||||
"handleEnd", "handleStart", "sourceHashes"
|
||||
]
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
|
|
|
|||
|
|
@ -408,7 +408,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
"comment": context.data.get("comment")}
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = ["startFrame", "endFrame", "step",
|
||||
optionals = ["frameStart", "frameEnd", "step",
|
||||
"handles", "colorspace", "fps", "outputDir"]
|
||||
|
||||
for key in optionals:
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
This requires a "startFrame" and "endFrame" to be present in instance.data
|
||||
This requires a "frameStart" and "frameEnd" to be present in instance.data
|
||||
or in context.data.
|
||||
|
||||
"""
|
||||
|
|
@ -138,6 +138,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"imagesequence"
|
||||
]
|
||||
|
||||
enviro_filter = [
|
||||
"PATH",
|
||||
"PYTHONPATH",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"PYPE_ROOT"
|
||||
]
|
||||
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""
|
||||
Deadline specific code separated from :meth:`process` for sake of
|
||||
|
|
@ -181,13 +191,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
|
||||
|
||||
environment = job["Props"].get("Env", {})
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
i = 0
|
||||
for index, key in enumerate(environment):
|
||||
self.log.info("KEY: {}".format(key))
|
||||
self.log.info("FILTER: {}".format(self.enviro_filter))
|
||||
|
||||
if key.upper() in self.enviro_filter:
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % i: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
)
|
||||
})
|
||||
i += 1
|
||||
|
||||
# Avoid copied pools and remove secondary pool
|
||||
payload["JobInfo"]["Pool"] = "none"
|
||||
|
|
@ -212,40 +231,36 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
# Get a submission job
|
||||
data = instance.data.copy()
|
||||
job = instance.data.get("deadlineSubmissionJob")
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
submission_type = "deadline"
|
||||
|
||||
if not job:
|
||||
if not render_job:
|
||||
# No deadline job. Try Muster: musterSubmissionJob
|
||||
job = data.pop("musterSubmissionJob")
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
submission_type = "muster"
|
||||
if not job:
|
||||
if not render_job:
|
||||
raise RuntimeError("Can't continue without valid Deadline "
|
||||
"or Muster submission prior to this "
|
||||
"plug-in.")
|
||||
|
||||
if submission_type == "deadline":
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
self.DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert self.DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self._submit_deadline_post_job(instance, job)
|
||||
|
||||
if submission_type == "muster":
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
self._submit_deadline_post_job(instance, render_job)
|
||||
|
||||
asset = data.get("asset") or api.Session["AVALON_ASSET"]
|
||||
subset = data["subset"]
|
||||
|
||||
# Get start/end frame from instance, if not available get from context
|
||||
context = instance.context
|
||||
start = instance.data.get("startFrame")
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["startFrame"]
|
||||
end = instance.data.get("endFrame")
|
||||
start = context.data["frameStart"]
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["endFrame"]
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
# Add in regex for sequence filename
|
||||
# This assumes the output files start with subset name and ends with
|
||||
|
|
@ -270,8 +285,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
metadata = {
|
||||
"asset": asset,
|
||||
"regex": regex,
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": context.data.get("fps", None),
|
||||
"families": ["render"],
|
||||
"source": source,
|
||||
|
|
@ -319,8 +334,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# Set prev start / end frames for comparison
|
||||
if not prev_start and not prev_end:
|
||||
prev_start = version["data"]["startFrame"]
|
||||
prev_end = version["data"]["endFrame"]
|
||||
prev_start = version["data"]["frameStart"]
|
||||
prev_end = version["data"]["frameEnd"]
|
||||
|
||||
subset_resources = get_resources(version, _ext)
|
||||
resource_files = get_resource_files(subset_resources,
|
||||
|
|
@ -356,12 +371,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# Please do so when fixing this.
|
||||
|
||||
# Start frame
|
||||
metadata["startFrame"] = updated_start
|
||||
metadata["metadata"]["instance"]["startFrame"] = updated_start
|
||||
metadata["frameStart"] = updated_start
|
||||
metadata["metadata"]["instance"]["frameStart"] = updated_start
|
||||
|
||||
# End frame
|
||||
metadata["endFrame"] = updated_end
|
||||
metadata["metadata"]["instance"]["endFrame"] = updated_end
|
||||
metadata["frameEnd"] = updated_end
|
||||
metadata["metadata"]["instance"]["frameEnd"] = updated_end
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(subset)
|
||||
|
||||
|
|
|
|||
12
pype/plugins/global/publish/validate_filesequences.py
Normal file
12
pype/plugins/global/publish/validate_filesequences.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateFileSequences(pyblish.api.ContextPlugin):
|
||||
"""Validates whether any file sequences were collected."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
targets = ["filesequence"]
|
||||
label = "Validate File Sequences"
|
||||
|
||||
def process(self, context):
|
||||
assert context, "Nothing collected."
|
||||
|
|
@ -22,8 +22,8 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
frames = list(collection.indexes)
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (instance.data["startFrame"],
|
||||
instance.data["endFrame"])
|
||||
required_range = (instance.data["frameStart"],
|
||||
instance.data["frameEnd"])
|
||||
|
||||
if current_range != required_range:
|
||||
raise ValueError("Invalid frame range: {0} - "
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue