Merge branch 'develop' into feature/PYPE-438-muster-templates-preset

# Conflicts:
#	pype/plugins/maya/publish/submit_maya_muster.py
This commit is contained in:
Milan Kolar 2019-09-05 15:01:28 +02:00
commit ecbdf94e40
246 changed files with 7451 additions and 4891 deletions

54
changelog.md Normal file
View file

@ -0,0 +1,54 @@
# Pype changelog #
Welcome to pype changelog
## 2.1 ##
A large cleanup release. Most of the change are under the hood.
**new**:
- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts
- _(pype)_ Added configurable option to add burnins to any generated quicktimes
- _(ftrack)_ Action that identifies what machines pype is running on.
- _(system)_ unify subprocess calls
- _(maya)_ add audio to review quicktimes
- _(nuke)_ add crop before write node to prevent overscan problems in ffmpeg
- **Nuke Studio** publishing and workfiles support
- **Muster** render manager support
- _(nuke)_ Framerange, FPS and Resolution are set automatically at startup
- _(maya)_ Ability to load published sequences as image planes
- _(system)_ Ftrack event that sets asset folder permissions based on task assignees in ftrack.
- _(maya)_ Pyblish plugin that allow validation of maya attributes
- _(system)_ added better startup logging to tray debug, including basic connection information
- _(avalon)_ option to group published subsets to groups in the loader
- _(avalon)_ loader family filters are working now
**changed**:
- change multiple key attributes to unify their behaviour across the pipeline
- `frameRate` to `fps`
- `startFrame` to `frameStart`
- `endFrame` to `frameEnd`
- `fstart` to `frameStart`
- `fend` to `frameEnd`
- `handle_start` to `handleStart`
- `handle_end` to `handleEnd`
- `resolution_width` to `resolutionWidth`
- `resolution_height` to `resolutionHeight`
- `pixel_aspect` to `pixelAspect`
- _(nuke)_ write nodes are now created inside group with only some attributes editable by the artist
- rendered frames are now deleted from temporary location after their publishing is finished.
- _(ftrack)_ RV action can now be launched from any entity
- after publishing only refresh button is now available in pyblish UI
- added context instance pyblish-lite so that artist knows if context plugin fails
- _(avalon)_ allow opening selected files using enter key
- _(avalon)_ core updated to v5.2.9 with our forked changes on top
**fix**:
- faster hierarchy retrieval from db
- _(nuke)_ A lot of stability enhancements
- _(nuke studio)_ A lot of stability enhancements
- _(nuke)_ now only renders a single write node on farm
- _(ftrack)_ pype would crash when launcher project level task
- work directory was sometimes not being created correctly
- major pype.lib cleanup. Removing of unused functions, merging those that were doing the same and general house cleaning.
- _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner

View file

@ -7,6 +7,8 @@ from .lib import filter_pyblish_plugins
import logging
log = logging.getLogger(__name__)
__version__ = "2.1.0"
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")

View file

@ -18,31 +18,20 @@ from .action import (
from pypeapp import Logger
from .templates import (
get_project_name,
get_project_code,
get_hierarchy,
get_asset,
get_task,
set_avalon_workdir,
get_version_from_path,
get_workdir_template,
set_hierarchy,
set_project_code
)
from .lib import (
version_up,
get_handle_irregular,
get_project_data,
get_asset_data,
get_asset,
get_project,
get_hierarchy,
get_subsets,
get_version_from_path,
modified_environ,
add_tool_to_environment,
get_data_hierarchical_attr,
get_avalon_project_template
add_tool_to_environment
)
# Special naming case for subprocess since its a built-in method.
from .lib import _subprocess as subprocess
__all__ = [
# plugin classes
"Extractor",
@ -54,28 +43,21 @@ __all__ = [
# action
"get_errored_instances_from_context",
"RepairAction",
"RepairContextAction",
"Logger",
"ValidationException",
# get contextual data
"get_handle_irregular",
"get_project_data",
"get_asset_data",
"get_project_name",
"get_project_code",
"version_up",
"get_project",
"get_hierarchy",
"get_asset",
"get_task",
"set_avalon_workdir",
"get_subsets",
"get_version_from_path",
"get_workdir_template",
"modified_environ",
"add_tool_to_environment",
"set_hierarchy",
"set_project_code",
"get_data_hierarchical_attr",
"get_avalon_project_template",
"subprocess"
]

View file

@ -6,6 +6,7 @@ from pyblish import api as pyblish
from pypeapp import execute, Logger
from .. import api
from .lib import set_avalon_workdir
log = Logger().get_logger(__name__, "aport")
@ -33,7 +34,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def install():
api.set_avalon_workdir()
set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)

View file

@ -80,17 +80,23 @@ def publish(json_data_path, gui):
@pico.expose()
def context(project, asset, task, app):
def context(project_name, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
os.environ["AVALON_PROJECT"] = project_name
io.Session["AVALON_PROJECT"] = project_name
avalon.update_current_task(task, asset, app)
project_code = pype.get_project_code()
pype.set_project_code(project_code)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
pype.set_hierarchy(hierarchy)
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)

135
pype/aport/lib.py Normal file
View file

@ -0,0 +1,135 @@
import os
import re
import sys
from avalon import io, api as avalon, lib as avalonlib
from pype import lib
from pype import api as pype
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
def get_asset():
"""
Obtain Asset string from session or environment variable
Returns:
string: asset name
Raises:
log: error
"""
lib.set_io_database()
asset = io.Session.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
"in avalon session "
"or os.environ!")
return asset
def get_context_data(
project_name=None, hierarchy=None, asset=None, task_name=None
):
"""
Collect all main contextual data
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
Returns:
dict: contextual data
"""
if not task_name:
lib.set_io_database()
task_name = io.Session.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task_name, log.error(
"missing `AVALON_TASK` in avalon session or os.environ!"
)
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
os.environ['AVALON_PROJECT'] = project_name
io.Session['AVALON_PROJECT'] = project_name
if not hierarchy:
hierarchy = pype.get_hierarchy()
project_doc = io.find_one({"type": "project"})
data = {
"task": task_name,
"asset": asset or get_asset(),
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code", '')
},
"hierarchy": hierarchy,
"app": application["application_dir"]
}
return data
def set_avalon_workdir(
project=None, hierarchy=None, asset=None, task=None
):
"""
Updates os.environ and session with filled workdir
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
Returns:
os.environ[AVALON_WORKDIR]: workdir path
avalon.session[AVALON_WORKDIR]: workdir path
"""
lib.set_io_database()
awd = io.Session.get("AVALON_WORKDIR", None) or \
os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
awd = anatomy_filled["work"]["folder"]
awd_filled = os.path.normpath(format(awd, data))
io.Session["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
def get_workdir_template(data=None):
"""
Obtain workdir templated path from Anatomy()
Args:
data (dict, optional): basic contextual data
Returns:
string: template path
"""
anatomy = Anatomy()
anatomy_filled = anatomy.format(data or get_context_data())
try:
work = anatomy_filled["work"]
except Exception as e:
log.error(
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
)
return work

View file

@ -82,13 +82,19 @@ def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project_code()
pype.set_project_code(project_code)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
pype.set_hierarchy(hierarchy)
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)

View file

@ -81,13 +81,19 @@ def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project_code()
pype.set_project_code(project_code)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
pype.set_hierarchy(hierarchy)
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)

View file

@ -1,9 +1,14 @@
from .clockify_api import ClockifyAPI
from .widget_settings import ClockifySettings
from .widget_message import MessageWidget
from .clockify import ClockifyModule
__all__ = [
'ClockifyAPI',
'ClockifySettings',
'ClockifyModule'
"ClockifyAPI",
"ClockifySettings",
"ClockifyModule",
"MessageWidget"
]
def tray_init(tray_widget, main_widget):
return ClockifyModule(main_widget, tray_widget)

View file

@ -1,15 +1,19 @@
import os
import threading
from pypeapp import style
from pypeapp import style, Logger
from Qt import QtWidgets
from pype.clockify import ClockifySettings, ClockifyAPI
from . import ClockifySettings, ClockifyAPI, MessageWidget
class ClockifyModule:
def __init__(self, main_parent=None, parent=None):
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
self.main_parent = main_parent
self.parent = parent
self.clockapi = ClockifyAPI()
self.message_widget = None
self.widget_settings = ClockifySettings(main_parent, self)
self.widget_settings_required = None
@ -20,9 +24,10 @@ class ClockifyModule:
self.bool_workspace_set = False
self.bool_timer_run = False
def start_up(self):
self.clockapi.set_master(self)
self.bool_api_key_set = self.clockapi.set_api()
def tray_start(self):
if self.bool_api_key_set is False:
self.show_settings()
return
@ -41,7 +46,7 @@ class ClockifyModule:
os.path.dirname(__file__),
'ftrack_actions'
])
current = os.environ('FTRACK_ACTIONS_PATH', '')
current = os.environ.get('FTRACK_ACTIONS_PATH', '')
if current:
current += os.pathsep
os.environ['FTRACK_ACTIONS_PATH'] = current + actions_path
@ -57,6 +62,25 @@ class ClockifyModule:
current += os.pathsep
os.environ['AVALON_ACTIONS'] = current + actions_path
if 'TimersManager' in modules:
self.timer_manager = modules['TimersManager']
self.timer_manager.add_module(self)
def start_timer_manager(self, data):
self.start_timer(data)
def stop_timer_manager(self):
self.stop_timer()
def timer_started(self, data):
if hasattr(self, 'timer_manager'):
self.timer_manager.start_timers(data)
def timer_stopped(self):
self.bool_timer_run = False
if hasattr(self, 'timer_manager'):
self.timer_manager.stop_timers()
def start_timer_check(self):
self.bool_thread_check_running = True
if self.thread_timer_check is None:
@ -75,21 +99,129 @@ class ClockifyModule:
def check_running(self):
import time
while self.bool_thread_check_running is True:
bool_timer_run = False
if self.clockapi.get_in_progress() is not None:
self.bool_timer_run = True
else:
self.bool_timer_run = False
self.set_menu_visibility()
bool_timer_run = True
if self.bool_timer_run != bool_timer_run:
if self.bool_timer_run is True:
self.timer_stopped()
elif self.bool_timer_run is False:
actual_timer = self.clockapi.get_in_progress()
if not actual_timer:
continue
actual_proj_id = actual_timer["projectId"]
if not actual_proj_id:
continue
project = self.clockapi.get_project_by_id(actual_proj_id)
if project and project.get("code") == 501:
continue
project_name = project["name"]
actual_timer_hierarchy = actual_timer["description"]
hierarchy_items = actual_timer_hierarchy.split("/")
# Each pype timer must have at least 2 items!
if len(hierarchy_items) < 2:
continue
task_name = hierarchy_items[-1]
hierarchy = hierarchy_items[:-1]
task_type = None
if len(actual_timer.get("tags", [])) > 0:
task_type = actual_timer["tags"][0].get("name")
data = {
"task_name": task_name,
"hierarchy": hierarchy,
"project_name": project_name,
"task_type": task_type
}
self.timer_started(data)
self.bool_timer_run = bool_timer_run
self.set_menu_visibility()
time.sleep(5)
def stop_timer(self):
self.clockapi.finish_time_entry()
self.bool_timer_run = False
if self.bool_timer_run:
self.timer_stopped()
def signed_in(self):
if hasattr(self, 'timer_manager'):
if not self.timer_manager:
return
if not self.timer_manager.last_task:
return
if self.timer_manager.is_running:
self.start_timer_manager(self.timer_manager.last_task)
def start_timer(self, input_data):
# If not api key is not entered then skip
if not self.clockapi.get_api_key():
return
actual_timer = self.clockapi.get_in_progress()
actual_timer_hierarchy = None
actual_project_id = None
if actual_timer is not None:
actual_timer_hierarchy = actual_timer.get("description")
actual_project_id = actual_timer.get("projectId")
# Concatenate hierarchy and task to get description
desc_items = [val for val in input_data.get("hierarchy", [])]
desc_items.append(input_data["task_name"])
description = "/".join(desc_items)
# Check project existence
project_name = input_data["project_name"]
project_id = self.clockapi.get_project_id(project_name)
if not project_id:
self.log.warning((
"Project \"{}\" was not found in Clockify. Timer won't start."
).format(project_name))
msg = (
"Project <b>\"{}\"</b> is not in Clockify Workspace <b>\"{}\"</b>."
"<br><br>Please inform your Project Manager."
).format(project_name, str(self.clockapi.workspace))
self.message_widget = MessageWidget(
self.main_parent, msg, "Clockify - Info Message"
)
self.message_widget.closed.connect(self.message_widget)
self.message_widget.show()
return
if (
actual_timer is not None and
description == actual_timer_hierarchy and
project_id == actual_project_id
):
return
tag_ids = []
task_tag_id = self.clockapi.get_tag_id(input_data["task_type"])
if task_tag_id is not None:
tag_ids.append(task_tag_id)
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)
def on_message_widget_close(self):
self.message_widget = None
# Definition of Tray menu
def tray_menu(self, parent):
def tray_menu(self, parent_menu):
# Menu for Tray App
self.menu = QtWidgets.QMenu('Clockify', parent)
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
self.menu.setProperty('submenu', 'on')
self.menu.setStyleSheet(style.load_stylesheet())
@ -109,7 +241,7 @@ class ClockifyModule:
self.set_menu_visibility()
return self.menu
parent_menu.addMenu(self.menu)
def show_settings(self):
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())

View file

@ -1,4 +1,5 @@
import os
import re
import requests
import json
import datetime
@ -22,7 +23,9 @@ class ClockifyAPI(metaclass=Singleton):
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
file_name = 'clockify.json'
fpath = os.path.join(app_dir, file_name)
admin_permission_names = ['WORKSPACE_OWN', 'WORKSPACE_ADMIN']
master_parent = None
workspace = None
workspace_id = None
def set_master(self, master_parent):
@ -41,6 +44,8 @@ class ClockifyAPI(metaclass=Singleton):
if api_key is not None and self.validate_api_key(api_key) is True:
self.headers["X-Api-Key"] = api_key
self.set_workspace()
if self.master_parent:
self.master_parent.signed_in()
return True
return False
@ -55,31 +60,41 @@ class ClockifyAPI(metaclass=Singleton):
return False
return True
def validate_workspace_perm(self):
test_project = '__test__'
action_url = 'workspaces/{}/projects/'.format(self.workspace_id)
body = {
"name": test_project, "clientId": "", "isPublic": "false",
"estimate": {"type": "AUTO"},
"color": "#f44336", "billable": "true"
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers, json=body
def validate_workspace_perm(self, workspace_id=None):
user_id = self.get_user_id()
if user_id is None:
return False
if workspace_id is None:
workspace_id = self.workspace_id
action_url = "/workspaces/{}/users/{}/permissions".format(
workspace_id, user_id
)
if response.status_code == 201:
self.delete_project(self.get_project_id(test_project))
return True
else:
projects = self.get_projects()
if test_project in projects:
try:
self.delete_project(self.get_project_id(test_project))
return True
except json.decoder.JSONDecodeError:
return False
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
user_permissions = response.json()
for perm in user_permissions:
if perm['name'] in self.admin_permission_names:
return True
return False
def get_user_id(self):
action_url = 'v1/user/'
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
# this regex is neccessary: UNICODE strings are crashing
# during json serialization
id_regex ='\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
result = re.findall(id_regex, str(response.content))
if len(result) != 1:
# replace with log and better message?
print('User ID was not found (this is a BUG!!!)')
return None
return json.loads('{'+result[0]+'}')['id']
def set_workspace(self, name=None):
if name is None:
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
@ -147,6 +162,19 @@ class ClockifyAPI(metaclass=Singleton):
project["name"]: project["id"] for project in response.json()
}
def get_project_by_id(self, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/'.format(
workspace_id, project_id
)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return response.json()
def get_tags(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
@ -279,6 +307,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None:
workspace_id = self.workspace_id
current = self.get_in_progress(workspace_id)
if current is None:
return
current_id = current["id"]
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, current_id

View file

@ -1,108 +0,0 @@
import os
import sys
import argparse
import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pype.clockify import ClockifyAPI
class StartClockify(BaseAction):
'''Starts timer on clockify.'''
#: Action identifier.
identifier = 'clockify.start.timer'
#: Action label.
label = 'Start timer'
#: Action description.
description = 'Starts timer on clockify'
#: roles that are allowed to register this action
icon = '{}/app_icons/clockify.png'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: Clockify api
clockapi = ClockifyAPI()
def discover(self, session, entities, event):
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != 'task':
return False
if self.clockapi.workspace_id is None:
return False
return True
def launch(self, session, entities, event):
task = entities[0]
task_name = task['type']['name']
project_name = task['project']['full_name']
def get_parents(entity):
output = []
if entity.entity_type.lower() == 'project':
return output
output.extend(get_parents(entity['parent']))
output.append(entity['name'])
return output
desc_items = get_parents(task['parent'])
desc_items.append(task['name'])
description = '/'.join(desc_items)
project_id = self.clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(self.clockapi.get_tag_id(task_name))
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)
return True
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
StartClockify(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -17,10 +17,8 @@ class SyncClocify(BaseAction):
label = 'Sync To Clockify'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: priority
priority = 100
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
role_list = ["Pypeclub", "Administrator", "project Manager"]
#: icon
icon = '{}/app_icons/clockify-white.png'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
@ -28,16 +26,22 @@ class SyncClocify(BaseAction):
#: CLockifyApi
clockapi = ClockifyAPI()
def register(self):
def preregister(self):
if self.clockapi.workspace_id is None:
raise ValueError('Clockify Workspace or API key are not set!')
return "Clockify Workspace or API key are not set!"
if self.clockapi.validate_workspace_perm() is False:
raise MissingPermision('Clockify')
super().register()
return True
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def launch(self, session, entities, event):

View file

@ -0,0 +1,91 @@
from Qt import QtCore, QtGui, QtWidgets
from pypeapp import style
class MessageWidget(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
closed = QtCore.Signal()
def __init__(self, parent=None, messages=[], title="Message"):
super(MessageWidget, self).__init__()
self._parent = parent
# Icon
if parent and hasattr(parent, 'icon'):
self.setWindowIcon(parent.icon)
else:
from pypeapp.resources import get_resource
self.setWindowIcon(QtGui.QIcon(get_resource('icon.png')))
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
# Style
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._ui_layout(messages))
self.setWindowTitle(title)
def _ui_layout(self, messages):
if not messages:
messages = ["*Misssing messages (This is a bug)*", ]
elif not isinstance(messages, (tuple, list)):
messages = [messages, ]
main_layout = QtWidgets.QVBoxLayout(self)
labels = []
for message in messages:
label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True)
labels.append(label)
main_layout.addWidget(label)
btn_close = QtWidgets.QPushButton("Close")
btn_close.setToolTip('Close this window')
btn_close.clicked.connect(self.on_close_clicked)
btn_group = QtWidgets.QHBoxLayout()
btn_group.addStretch(1)
btn_group.addWidget(btn_close)
main_layout.addLayout(btn_group)
self.labels = labels
self.btn_group = btn_group
self.btn_close = btn_close
self.main_layout = main_layout
return main_layout
def on_close_clicked(self):
self.close()
def close(self, *args, **kwargs):
self.closed.emit()
super(MessageWidget, self).close(*args, **kwargs)

View file

@ -4,21 +4,18 @@ import time
from pype.ftrack import AppAction
from avalon import lib
from pypeapp import Logger
from pype import lib as pypelib
from pype.lib import get_all_avalon_projects
log = Logger().get_logger(__name__)
def registerApp(app, session):
def registerApp(app, session, plugins_presets):
name = app['name']
variant = ""
try:
variant = app['name'].split("_")[1]
except Exception:
log.warning((
'"{0}" - App "name" and "variant" is not separated by "_"'
' (variant is not set)'
).format(app['name']))
pass
abspath = lib.which_app(app['name'])
if abspath is None:
@ -44,29 +41,42 @@ def registerApp(app, session):
# register action
AppAction(
session, label, name, executable, variant,
icon, description, preactions
icon, description, preactions, plugins_presets
).register()
if not variant:
log.info('- Variant is not set')
def register(session):
projects = pypelib.get_all_avalon_projects()
def register(session, plugins_presets={}):
# WARNING getting projects only helps to check connection to mongo
# - without will `discover` of ftrack apps actions take ages
result = get_all_avalon_projects()
apps = []
appNames = []
# Get all application from all projects
for project in projects:
for app in project['config']['apps']:
if app['name'] not in appNames:
appNames.append(app['name'])
apps.append(app)
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
for file in os.listdir(launchers_path):
filename, ext = os.path.splitext(file)
if ext.lower() != ".toml":
continue
loaded_data = toml.load(os.path.join(launchers_path, file))
app_data = {
"name": filename,
"label": loaded_data.get("label", filename)
}
apps.append(app_data)
apps = sorted(apps, key=lambda x: x['name'])
app_counter = 0
for app in apps:
try:
registerApp(app, session)
registerApp(app, session, plugins_presets)
if app_counter%5 == 0:
time.sleep(0.1)
app_counter += 1
except Exception as e:
log.exception("'{0}' - not proper App ({1})".format(app['name'], e))
except Exception as exc:
log.exception(
"\"{}\" - not a proper App ({})".format(app['name'], str(exc)),
exc_info=True
)

View file

@ -78,7 +78,7 @@ class AssetDelete(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -87,7 +87,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
AssetDelete(session).register()
AssetDelete(session, plugins_presets).register()
def main(arguments=None):

View file

@ -0,0 +1,286 @@
import os
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
class AttributesRemapper(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'attributes.remapper'
#: Action label.
label = "Pype Doctor"
variant = '- Attributes Remapper'
#: Action description.
description = 'Remaps attributes in avalon DB'
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator"]
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
db_con = DbConnector()
keys_to_change = {
"fstart": "frameStart",
"startFrame": "frameStart",
"edit_in": "frameStart",
"fend": "frameEnd",
"endFrame": "frameEnd",
"edit_out": "frameEnd",
"handle_start": "handleStart",
"handle_end": "handleEnd",
"handles": ["handleEnd", "handleStart"],
"frameRate": "fps",
"framerate": "fps",
"resolution_width": "resolutionWidth",
"resolution_height": "resolutionHeight",
"pixel_aspect": "pixelAspect"
}
def discover(self, session, entities, event):
''' Validation '''
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
title = 'Select Projects where attributes should be remapped'
items = []
selection_enum = {
'label': 'Process type',
'type': 'enumerator',
'name': 'process_type',
'data': [
{
'label': 'Selection',
'value': 'selection'
}, {
'label': 'Inverted selection',
'value': 'except'
}
],
'value': 'selection'
}
selection_label = {
'type': 'label',
'value': (
'Selection based variants:<br/>'
'- `Selection` - '
'NOTHING is processed when nothing is selected<br/>'
'- `Inverted selection` - '
'ALL Projects are processed when nothing is selected'
)
}
items.append(selection_enum)
items.append(selection_label)
item_splitter = {'type': 'label', 'value': '---'}
all_projects = session.query('Project').all()
for project in all_projects:
item_label = {
'type': 'label',
'value': '{} (<i>{}</i>)'.format(
project['full_name'], project['name']
)
}
item = {
'name': project['id'],
'type': 'boolean',
'value': False
}
if len(items) > 0:
items.append(item_splitter)
items.append(item_label)
items.append(item)
if len(items) == 0:
return {
'success': False,
'message': 'Didn\'t found any projects'
}
else:
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
values = event['data']['values']
process_type = values.pop('process_type')
selection = True
if process_type == 'except':
selection = False
interface_messages = {}
projects_to_update = []
for project_id, update_bool in values.items():
if not update_bool and selection:
continue
if update_bool and not selection:
continue
project = session.query(
'Project where id is "{}"'.format(project_id)
).one()
projects_to_update.append(project)
if not projects_to_update:
self.log.debug('Nothing to update')
return {
'success': True,
'message': 'Nothing to update'
}
self.db_con.install()
relevant_types = ["project", "asset", "version"]
for ft_project in projects_to_update:
self.log.debug(
"Processing project \"{}\"".format(ft_project["full_name"])
)
self.db_con.Session["AVALON_PROJECT"] = ft_project["full_name"]
project = self.db_con.find_one({'type': 'project'})
if not project:
key = "Projects not synchronized to db"
if key not in interface_messages:
interface_messages[key] = []
interface_messages[key].append(ft_project["full_name"])
continue
# Get all entities in project collection from MongoDB
_entities = self.db_con.find({})
for _entity in _entities:
ent_t = _entity.get("type", "*unknown type")
name = _entity.get("name", "*unknown name")
self.log.debug(
"- {} ({})".format(name, ent_t)
)
# Skip types that do not store keys to change
if ent_t.lower() not in relevant_types:
self.log.debug("-- skipping - type is not relevant")
continue
# Get data which will change
updating_data = {}
source_data = _entity["data"]
for key_from, key_to in self.keys_to_change.items():
# continue if final key already exists
if type(key_to) == list:
for key in key_to:
# continue if final key was set in update_data
if key in updating_data:
continue
# continue if source key not exist or value is None
value = source_data.get(key_from)
if value is None:
continue
self.log.debug(
"-- changing key {} to {}".format(
key_from,
key
)
)
updating_data[key] = value
else:
if key_to in source_data:
continue
# continue if final key was set in update_data
if key_to in updating_data:
continue
# continue if source key not exist or value is None
value = source_data.get(key_from)
if value is None:
continue
self.log.debug(
"-- changing key {} to {}".format(key_from, key_to)
)
updating_data[key_to] = value
# Pop out old keys from entity
is_obsolete = False
for key in self.keys_to_change:
if key not in source_data:
continue
is_obsolete = True
source_data.pop(key)
# continue if there is nothing to change
if not is_obsolete and not updating_data:
self.log.debug("-- nothing to change")
continue
source_data.update(updating_data)
self.db_con.update_many(
{"_id": _entity["_id"]},
{"$set": {"data": source_data}}
)
self.db_con.uninstall()
if interface_messages:
self.show_interface_from_dict(
messages=interface_messages,
title="Errors during remapping attributes",
event=event
)
return True
def show_interface_from_dict(self, event, messages, title=""):
items = []
for key, value in messages.items():
if not value:
continue
subtitle = {'type': 'label', 'value': '# {}'.format(key)}
items.append(subtitle)
if isinstance(value, list):
for item in value:
message = {
'type': 'label', 'value': '<p>{}</p>'.format(item)
}
items.append(message)
else:
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
items.append(message)
self.show_interface(event, items, title)
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
AttributesRemapper(session, plugins_presets).register()

View file

@ -53,12 +53,12 @@ class ClientReviewSort(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
action_handler = ClientReviewSort(session)
action_handler = ClientReviewSort(session, plugins_presets)
action_handler.register()

View file

@ -65,7 +65,7 @@ class ComponentOpen(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -74,7 +74,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
ComponentOpen(session).register()
ComponentOpen(session, plugins_presets).register()
def main(arguments=None):

View file

@ -7,6 +7,7 @@ import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, get_ca_mongoid
from pypeapp import config
from ftrack_api.exception import NoResultFoundError
"""
This action creates/updates custom attributes.
@ -109,27 +110,21 @@ class CustomAttributes(BaseAction):
#: Action identifier.
identifier = 'create.update.attributes'
#: Action label.
label = 'Create/Update Avalon Attributes'
label = "Pype Admin"
variant = '- Create/Update Avalon Attributes'
#: Action description.
description = 'Creates Avalon/Mongo ID for double check'
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
icon = '{}/ftrack/action_icons/CustomAttributes.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
def __init__(self, session):
super().__init__(session)
self.types = {}
self.object_type_ids = {}
self.groups = {}
self.security_roles = {}
self.required_keys = ['key', 'label', 'type']
self.type_posibilities = [
'text', 'boolean', 'date', 'enumerator',
'dynamic enumerator', 'number'
]
required_keys = ['key', 'label', 'type']
type_posibilities = [
'text', 'boolean', 'date', 'enumerator',
'dynamic enumerator', 'number'
]
def discover(self, session, entities, event):
'''
@ -139,8 +134,12 @@ class CustomAttributes(BaseAction):
return True
def launch(self, session, entities, event):
# JOB SETTINGS
self.types = {}
self.object_type_ids = {}
self.groups = {}
self.security_roles = {}
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
@ -159,11 +158,14 @@ class CustomAttributes(BaseAction):
job['status'] = 'done'
session.commit()
except Exception as e:
except Exception as exc:
session.rollback()
job['status'] = 'failed'
session.commit()
self.log.error('Creating custom attributes failed ({})'.format(e))
self.log.error(
'Creating custom attributes failed ({})'.format(exc),
exc_info=True
)
return True
@ -226,24 +228,30 @@ class CustomAttributes(BaseAction):
def custom_attributes_from_file(self, session, event):
presets = config.get_presets()['ftrack']['ftrack_custom_attributes']
for cust_attr_name in presets:
for cust_attr_data in presets:
cust_attr_name = cust_attr_data.get(
'label',
cust_attr_data.get('key')
)
try:
data = {}
cust_attr = presets[cust_attr_name]
# Get key, label, type
data.update(self.get_required(cust_attr))
data.update(self.get_required(cust_attr_data))
# Get hierachical/ entity_type/ object_id
data.update(self.get_entity_type(cust_attr))
data.update(self.get_entity_type(cust_attr_data))
# Get group, default, security roles
data.update(self.get_optional(cust_attr))
data.update(self.get_optional(cust_attr_data))
# Process data
self.process_attribute(data)
except CustAttrException as cae:
msg = 'Custom attribute error "{}" - {}'.format(
cust_attr_name, str(cae)
)
self.log.warning(msg)
if cust_attr_name:
msg = 'Custom attribute error "{}" - {}'.format(
cust_attr_name, str(cae)
)
else:
msg = 'Custom attribute error - {}'.format(str(cae))
self.log.warning(msg, exc_info=True)
self.show_message(event, msg)
return True
@ -422,9 +430,10 @@ class CustomAttributes(BaseAction):
def get_security_role(self, security_roles):
roles = []
if len(security_roles) == 0 or security_roles[0] == 'ALL':
security_roles_lowered = [role.lower() for role in security_roles]
if len(security_roles) == 0 or 'all' in security_roles_lowered:
roles = self.get_role_ALL()
elif security_roles[0] == 'except':
elif security_roles_lowered[0] == 'except':
excepts = security_roles[1:]
all = self.get_role_ALL()
for role in all:
@ -443,10 +452,10 @@ class CustomAttributes(BaseAction):
role = self.session.query(query).one()
self.security_roles[role_name] = role
roles.append(role)
except Exception:
raise CustAttrException(
'Securit role "{}" does not exist'.format(role_name)
)
except NoResultFoundError:
raise CustAttrException((
'Securit role "{}" does not exist'
).format(role_name))
return roles
@ -560,7 +569,7 @@ class CustomAttributes(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -569,7 +578,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
CustomAttributes(session).register()
CustomAttributes(session, plugins_presets).register()
def main(arguments=None):

View file

@ -30,11 +30,13 @@ class CreateFolders(BaseAction):
def discover(self, session, entities, event):
''' Validation '''
not_allowed = ['assetversion']
if len(entities) != 1:
return False
not_allowed = ['assetversion', 'project']
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def interface(self, session, entities, event):
@ -322,13 +324,13 @@ class PartialDict(dict):
return '{'+key+'}'
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CreateFolders(session).register()
CreateFolders(session, plugins_presets).register()
def main(arguments=None):

View file

@ -13,9 +13,9 @@ class CreateProjectFolders(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'create.project.folders'
identifier = 'create.project.structure'
#: Action label.
label = 'Create Project Folders'
label = 'Create Project Structure'
#: Action description.
description = 'Creates folder structure'
#: roles that are allowed to register this action
@ -31,6 +31,11 @@ class CreateProjectFolders(BaseAction):
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
@ -190,13 +195,13 @@ class CreateProjectFolders(BaseAction):
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CreateProjectFolders(session).register()
CreateProjectFolders(session, plugins_presets).register()
def main(arguments=None):

View file

@ -12,14 +12,15 @@ class CustomAttributeDoctor(BaseAction):
#: Action identifier.
identifier = 'custom.attributes.doctor'
#: Action label.
label = 'Custom Attributes Doctor'
label = "Pype Doctor"
variant = '- Custom Attributes Doctor'
#: Action description.
description = (
'Fix hierarchical custom attributes mainly handles, fstart'
' and fend'
)
icon = '{}/ftrack/action_icons/TestAction.svg'.format(
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend']
@ -286,13 +287,13 @@ class CustomAttributeDoctor(BaseAction):
return all_roles
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CustomAttributeDoctor(session).register()
CustomAttributeDoctor(session, plugins_presets).register()
def main(arguments=None):

View file

@ -311,7 +311,7 @@ class DeleteAsset(BaseAction):
return assets
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -320,7 +320,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
DeleteAsset(session).register()
DeleteAsset(session, plugins_presets).register()
def main(arguments=None):

View file

@ -13,12 +13,13 @@ class AssetsRemover(BaseAction):
#: Action identifier.
identifier = 'remove.assets'
#: Action label.
label = 'Delete Assets by Name'
label = "Pype Admin"
variant = '- Delete Assets by Name'
#: Action description.
description = 'Removes assets from Ftrack and Avalon db with all childs'
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
icon = '{}/ftrack/action_icons/AssetsRemover.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: Db
@ -131,7 +132,7 @@ class AssetsRemover(BaseAction):
return assets
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -140,7 +141,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
AssetsRemover(session).register()
AssetsRemover(session, plugins_presets).register()
def main(arguments=None):

View file

@ -42,7 +42,7 @@ class VersionsCleanup(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -51,7 +51,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
VersionsCleanup(session).register()
VersionsCleanup(session, plugins_presets).register()
def main(arguments=None):

View file

@ -21,9 +21,9 @@ class DJVViewAction(BaseAction):
)
type = 'Application'
def __init__(self, session):
def __init__(self, session, plugins_presets):
'''Expects a ftrack_api.Session instance'''
super().__init__(session)
super().__init__(session, plugins_presets)
self.djv_path = None
self.config_data = config.get_presets()['djv_view']['config']
@ -218,12 +218,12 @@ class DJVViewAction(BaseAction):
return True
def register(session):
def register(session, plugins_presets={}):
"""Register hooks."""
if not isinstance(session, ftrack_api.session.Session):
return
DJVViewAction(session).register()
DJVViewAction(session, plugins_presets).register()
def main(arguments=None):

View file

@ -14,12 +14,13 @@ class JobKiller(BaseAction):
#: Action identifier.
identifier = 'job.killer'
#: Action label.
label = 'Job Killer'
label = "Pype Admin"
variant = '- Job Killer'
#: Action description.
description = 'Killing selected running jobs'
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
icon = '{}/ftrack/action_icons/JobKiller.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
@ -117,7 +118,7 @@ class JobKiller(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -126,7 +127,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
JobKiller(session).register()
JobKiller(session, plugins_presets).register()
def main(arguments=None):

View file

@ -112,13 +112,13 @@ class MultipleNotes(BaseAction):
return True
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
MultipleNotes(session).register()
MultipleNotes(session, plugins_presets).register()
def main(arguments=None):

View file

@ -0,0 +1,378 @@
import os
import json
from ruamel import yaml
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pypeapp import config
from pype.ftrack.lib import get_avalon_attr
from pype.vendor.ftrack_api import session as fa_session
class PrepareProject(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'prepare.project'
#: Action label.
label = 'Prepare Project'
#: Action description.
description = 'Set basic attributes on the project'
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator", "Project manager"]
icon = '{}/ftrack/action_icons/PrepareProject.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
# Key to store info about trigerring create folder structure
create_project_structure_key = "create_folder_structure"
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
self.log.debug("Loading custom attributes")
cust_attrs, hier_cust_attrs = get_avalon_attr(session, True)
project_defaults = config.get_presets(
entities[0]["full_name"]
).get("ftrack", {}).get("project_defaults", {})
self.log.debug("Preparing data which will be shown")
attributes_to_set = {}
for attr in hier_cust_attrs:
key = attr["key"]
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": project_defaults.get(key)
}
for attr in cust_attrs:
if attr["entity_type"].lower() != "show":
continue
key = attr["key"]
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": project_defaults.get(key)
}
# Sort by label
attributes_to_set = dict(sorted(
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
item_splitter = {'type': 'label', 'value': '---'}
title = "Prepare Project"
items = []
# Ask if want to trigger Action Create Folder Structure
items.append({
"type": "label",
"value": "<h3>Want to create basic Folder Structure?</h3>"
})
items.append({
"name": self.create_project_structure_key,
"type": "boolean",
"value": False,
"label": "Check if Yes"
})
items.append(item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
multiselect_enumerators = []
# This item will be last (before enumerators)
# - sets value of auto synchronization
auto_sync_name = "avalon_auto_sync"
auto_sync_item = {
"name": auto_sync_name,
"type": "boolean",
"value": project_defaults.get(auto_sync_name, False),
"label": "AutoSync to Avalon"
}
for key, in_data in attributes_to_set.items():
attr = in_data["object"]
# initial item definition
item = {
"name": key,
"label": in_data["label"]
}
# cust attr type - may have different visualization
type_name = attr["type"]["name"].lower()
easy_types = ["text", "boolean", "date", "number"]
easy_type = False
if type_name in easy_types:
easy_type = True
elif type_name == "enumerator":
attr_config = json.loads(attr["config"])
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
multiselect_enumerators.append(item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": in_data["label"]
})
default = in_data["default"]
names = []
for option in sorted(
attr_config_data, key=lambda x: x["menu"]
):
name = option["value"]
new_name = "__{}__{}".format(key, name)
names.append(new_name)
item = {
"name": new_name,
"type": "boolean",
"label": "- {}".format(option["menu"])
}
if default:
if (
isinstance(default, list) or
isinstance(default, tuple)
):
if name in default:
item["value"] = True
else:
if name == default:
item["value"] = True
multiselect_enumerators.append(item)
multiselect_enumerators.append({
"type": "hidden",
"name": "__hidden__{}".format(key),
"value": json.dumps(names)
})
else:
easy_type = True
item["data"] = attr_config_data
else:
self.log.warning((
"Custom attribute \"{}\" has type \"{}\"."
" I don't know how to handle"
).format(key, type_name))
items.append({
"type": "label",
"value": (
"!!! Can't handle Custom attritubte type \"{}\""
" (key: \"{}\")"
).format(type_name, key)
})
if easy_type:
item["type"] = type_name
# default value in interface
default = in_data["default"]
if default is not None:
item["value"] = default
items.append(item)
# Add autosync attribute
items.append(auto_sync_item)
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
if not event['data'].get('values', {}):
return
in_data = event['data']['values']
# pop out info about creating project structure
create_proj_struct = in_data.pop(self.create_project_structure_key)
# Find hidden items for multiselect enumerators
keys_to_process = []
for key in in_data:
if key.startswith("__hidden__"):
keys_to_process.append(key)
self.log.debug("Preparing data for Multiselect Enumerators")
enumerators = {}
for key in keys_to_process:
new_key = key.replace("__hidden__", "")
enumerator_items = in_data.pop(key)
enumerators[new_key] = json.loads(enumerator_items)
# find values set for multiselect enumerator
for key, enumerator_items in enumerators.items():
in_data[key] = []
name = "__{}__".format(key)
for item in enumerator_items:
value = in_data.pop(item)
if value is True:
new_key = item.replace(name, "")
in_data[key].append(new_key)
self.log.debug("Setting Custom Attribute values:")
entity = entities[0]
for key, value in in_data.items():
entity["custom_attributes"][key] = value
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
session.commit()
# Create project structure
self.create_project_specific_config(entities[0]["full_name"], in_data)
# Trigger Create Project Structure action
if create_proj_struct is True:
self.trigger_action("create.project.structure", event)
return True
def create_project_specific_config(self, project_name, json_data):
self.log.debug("*** Creating project specifig configs ***")
path_proj_configs = os.environ.get('PYPE_PROJECT_CONFIGS', "")
# Skip if PYPE_PROJECT_CONFIGS is not set
# TODO show user OS message
if not path_proj_configs:
self.log.warning((
"Environment variable \"PYPE_PROJECT_CONFIGS\" is not set."
" Project specific config can't be set."
))
return
path_proj_configs = os.path.normpath(path_proj_configs)
# Skip if path does not exist
# TODO create if not exist?!!!
if not os.path.exists(path_proj_configs):
self.log.warning((
"Path set in Environment variable \"PYPE_PROJECT_CONFIGS\""
" Does not exist."
))
return
project_specific_path = os.path.normpath(
os.path.join(path_proj_configs, project_name)
)
if not os.path.exists(project_specific_path):
os.makedirs(project_specific_path)
self.log.debug((
"Project specific config folder for project \"{}\" created."
).format(project_name))
# Anatomy ####################################
self.log.debug("--- Processing Anatomy Begins: ---")
anatomy_dir = os.path.normpath(os.path.join(
project_specific_path, "anatomy"
))
anatomy_path = os.path.normpath(os.path.join(
anatomy_dir, "default.yaml"
))
anatomy = None
if os.path.exists(anatomy_path):
self.log.debug(
"Anatomy file already exist. Trying to read: \"{}\"".format(
anatomy_path
)
)
# Try to load data
with open(anatomy_path, 'r') as file_stream:
try:
anatomy = yaml.load(file_stream, Loader=yaml.loader.Loader)
self.log.debug("Reading Anatomy file was successful")
except yaml.YAMLError as exc:
self.log.warning(
"Reading Yaml file failed: \"{}\"".format(anatomy_path),
exc_info=True
)
if not anatomy:
self.log.debug("Anatomy is not set. Duplicating default.")
# Create Anatomy folder
if not os.path.exists(anatomy_dir):
self.log.debug(
"Creating Anatomy folder: \"{}\"".format(anatomy_dir)
)
os.makedirs(anatomy_dir)
source_items = [
os.environ["PYPE_CONFIG"], "anatomy", "default.yaml"
]
source_path = os.path.normpath(os.path.join(*source_items))
with open(source_path, 'r') as file_stream:
source_data = file_stream.read()
with open(anatomy_path, 'w') as file_stream:
file_stream.write(source_data)
# Presets ####################################
self.log.debug("--- Processing Presets Begins: ---")
project_defaults_dir = os.path.normpath(os.path.join(*[
project_specific_path, "presets", "ftrack"
]))
project_defaults_path = os.path.normpath(os.path.join(*[
project_defaults_dir, "project_defaults.json"
]))
# Create folder if not exist
if not os.path.exists(project_defaults_dir):
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
project_defaults_dir
))
os.makedirs(project_defaults_dir)
with open(project_defaults_path, 'w') as file_stream:
json.dump(json_data, file_stream, indent=4)
self.log.debug("*** Creating project specifig configs Finished ***")
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
PrepareProject(session, plugins_presets).register()

View file

@ -1,13 +1,14 @@
from pype.ftrack import BaseAction
import os
import sys
import json
import subprocess
from pype.vendor import ftrack_api
import logging
import operator
import re
import traceback
import json
from pypeapp import Logger, config
from pype.ftrack import BaseAction
from pype.vendor import ftrack_api
from avalon import io, api
log = Logger().get_logger(__name__)
@ -22,13 +23,13 @@ class RVAction(BaseAction):
)
type = 'Application'
def __init__(self, session):
def __init__(self, session, plugins_presets):
""" Constructor
:param session: ftrack Session
:type session: :class:`ftrack_api.Session`
"""
super().__init__(session)
super().__init__(session, plugins_presets)
self.rv_path = None
self.config_data = None
@ -53,14 +54,7 @@ class RVAction(BaseAction):
def discover(self, session, entities, event):
"""Return available actions based on *event*. """
selection = event["data"].get("selection", [])
if len(selection) != 1:
return False
entityType = selection[0].get("entityType", None)
if entityType in ["assetversion", "task"]:
return True
return False
return True
def set_rv_path(self):
self.rv_path = self.config_data.get("rv_path")
@ -72,151 +66,272 @@ class RVAction(BaseAction):
)
super().register()
def get_components_from_entity(self, session, entity, components):
"""Get components from various entity types.
The components dictionary is modifid in place, so nothing is returned.
Args:
entity (Ftrack entity)
components (dict)
"""
if entity.entity_type.lower() == "assetversion":
for component in entity["components"]:
if component["file_type"][1:] not in self.allowed_types:
continue
try:
components[entity["asset"]["parent"]["name"]].append(
component
)
except KeyError:
components[entity["asset"]["parent"]["name"]] = [component]
return
if entity.entity_type.lower() == "task":
query = "AssetVersion where task_id is '{0}'".format(entity["id"])
for assetversion in session.query(query):
self.get_components_from_entity(
session, assetversion, components
)
return
if entity.entity_type.lower() == "shot":
query = "AssetVersion where asset.parent.id is '{0}'".format(
entity["id"]
)
for assetversion in session.query(query):
self.get_components_from_entity(
session, assetversion, components
)
return
raise NotImplementedError(
"\"{}\" entity type is not implemented yet.".format(
entity.entity_type
)
)
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
entity = entities[0]
versions = []
entity_type = entity.entity_type.lower()
if entity_type == "assetversion":
if (
entity[
'components'
][0]['file_type'][1:] in self.allowed_types
):
versions.append(entity)
else:
master_entity = entity
if entity_type == "task":
master_entity = entity['parent']
for asset in master_entity['assets']:
for version in asset['versions']:
# Get only AssetVersion of selected task
if (
entity_type == "task" and
version['task']['id'] != entity['id']
):
continue
# Get only components with allowed type
filetype = version['components'][0]['file_type']
if filetype[1:] in self.allowed_types:
versions.append(version)
if len(versions) < 1:
return {
'success': False,
'message': 'There are no Asset Versions to open.'
user = session.query(
"User where username is '{0}'".format(
os.environ["FTRACK_API_USER"]
)
).one()
job = session.create(
"Job",
{
"user": user,
"status": "running",
"data": json.dumps({
"description": "RV: Collecting components."
})
}
)
# Commit to feedback to user.
session.commit()
items = []
base_label = "v{0} - {1} - {2}"
default_component = self.config_data.get(
'default_component', None
)
last_available = None
select_value = None
for version in versions:
for component in version['components']:
label = base_label.format(
str(version['version']).zfill(3),
version['asset']['type']['name'],
component['name']
)
try:
location = component[
'component_locations'
][0]['location']
file_path = location.get_filesystem_path(component)
except Exception:
file_path = component[
'component_locations'
][0]['resource_identifier']
if os.path.isdir(os.path.dirname(file_path)):
last_available = file_path
if component['name'] == default_component:
select_value = file_path
items.append(
{'label': label, 'value': file_path}
)
if len(items) == 0:
return {
'success': False,
'message': (
'There are no Asset Versions with accessible path.'
)
}
item = {
'label': 'Items to view',
'type': 'enumerator',
'name': 'path',
'data': sorted(
items,
key=operator.itemgetter('label'),
reverse=True
)
}
if select_value is not None:
item['value'] = select_value
try:
items = self.get_interface_items(session, entities)
except Exception:
log.error(traceback.format_exc())
job["status"] = "failed"
else:
item['value'] = last_available
job["status"] = "done"
return {'items': [item]}
# Commit to end job.
session.commit()
return {"items": items}
def get_interface_items(self, session, entities):
components = {}
for entity in entities:
self.get_components_from_entity(session, entity, components)
# Sort by version
for parent_name, entities in components.items():
version_mapping = {}
for entity in entities:
try:
version_mapping[entity["version"]["version"]].append(
entity
)
except KeyError:
version_mapping[entity["version"]["version"]] = [entity]
# Sort same versions by date.
for version, entities in version_mapping.items():
version_mapping[version] = sorted(
entities, key=lambda x: x["version"]["date"], reverse=True
)
components[parent_name] = []
for version in reversed(sorted(version_mapping.keys())):
components[parent_name].extend(version_mapping[version])
# Items to present to user.
items = []
label = "{} - v{} - {}"
for parent_name, entities in components.items():
data = []
for entity in entities:
data.append(
{
"label": label.format(
entity["version"]["asset"]["name"],
str(entity["version"]["version"]).zfill(3),
entity["file_type"][1:]
),
"value": entity["id"]
}
)
items.append(
{
"label": parent_name,
"type": "enumerator",
"name": parent_name,
"data": data,
"value": data[0]["value"]
}
)
return items
def launch(self, session, entities, event):
"""Callback method for RV action."""
# Launching application
if "values" not in event["data"]:
return
filename = event['data']['values']['path']
fps = entities[0].get('custom_attributes', {}).get('fps', None)
cmd = []
# change frame number to padding string for RV to play sequence
try:
frame = re.findall(r'(\d+).', filename)[-1]
except KeyError:
# we didn't detected frame number
pass
else:
padding = '#' * len(frame)
pos = filename.rfind(frame)
filename = filename[:pos] + padding + filename[
filename.rfind('.'):]
# RV path
cmd.append(os.path.normpath(self.rv_path))
if fps is not None:
cmd.append("-fps {}".format(int(fps)))
cmd.append(os.path.normpath(filename))
log.info('Running rv: {}'.format(' '.join(cmd)))
try:
# Run RV with these commands
subprocess.Popen(' '.join(cmd), shell=True)
except Exception as e:
return {
'success': False,
'message': 'File "{}" was not found.'.format(
e
)
user = session.query(
"User where username is '{0}'".format(
os.environ["FTRACK_API_USER"]
)
).one()
job = session.create(
"Job",
{
"user": user,
"status": "running",
"data": json.dumps({
"description": "RV: Collecting file paths."
})
}
)
# Commit to feedback to user.
session.commit()
paths = []
try:
paths = self.get_file_paths(session, event)
except Exception:
log.error(traceback.format_exc())
job["status"] = "failed"
else:
job["status"] = "done"
# Commit to end job.
session.commit()
args = [os.path.normpath(self.rv_path)]
fps = entities[0].get("custom_attributes", {}).get("fps", None)
if fps is not None:
args.extend(["-fps", str(fps)])
args.extend(paths)
log.info("Running rv: {}".format(args))
subprocess.Popen(args)
return True
def get_file_paths(self, session, event):
"""Get file paths from selected components."""
def register(session):
link = session.get(
"Component", list(event["data"]["values"].values())[0]
)["version"]["asset"]["parent"]["link"][0]
project = session.get(link["type"], link["id"])
os.environ["AVALON_PROJECT"] = project["name"]
api.Session["AVALON_PROJECT"] = project["name"]
io.install()
location = ftrack_api.Session().pick_location()
paths = []
for parent_name in sorted(event["data"]["values"].keys()):
component = session.get(
"Component", event["data"]["values"][parent_name]
)
# Newer publishes have the source referenced in Ftrack.
online_source = False
for neighbour_component in component["version"]["components"]:
if neighbour_component["name"] != "ftrackreview-mp4_src":
continue
paths.append(
location.get_filesystem_path(neighbour_component)
)
online_source = True
if online_source:
continue
asset = io.find_one({"type": "asset", "name": parent_name})
subset = io.find_one(
{
"type": "subset",
"name": component["version"]["asset"]["name"],
"parent": asset["_id"]
}
)
version = io.find_one(
{
"type": "version",
"name": component["version"]["version"],
"parent": subset["_id"]
}
)
representation = io.find_one(
{
"type": "representation",
"parent": version["_id"],
"name": component["file_type"][1:]
}
)
if representation is None:
representation = io.find_one(
{
"type": "representation",
"parent": version["_id"],
"name": "preview"
}
)
paths.append(api.get_representation_path(representation))
return paths
def register(session, plugins_presets={}):
"""Register hooks."""
if not isinstance(session, ftrack_api.session.Session):
return
RVAction(session).register()
RVAction(session, plugins_presets).register()
def main(arguments=None):
@ -257,249 +372,3 @@ def main(arguments=None):
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))
"""
Usage: RV movie and image sequence viewer
One File: rv foo.jpg
This Directory: rv .
Other Directory: rv /path/to/dir
Image Sequence w/Audio: rv [ in.#.tif in.wav ]
Stereo w/Audio: rv [ left.#.tif right.#.tif in.wav ]
Stereo Movies: rv [ left.mov right.mov ]
Stereo Movie (from rvio): rv stereo.mov
Cuts Sequenced: rv cut1.mov cut2.#.exr cut3.mov
Stereo Cuts Sequenced: rv [ l1.mov r1.mov ] [ l2.mov r2.mov ]
Forced Anamorphic: rv [ -pa 2.0 fullaperture.#.dpx ]
Compare: rv -wipe a.exr b.exr
Difference: rv -diff a.exr b.exr
Slap Comp Over: rv -over a.exr b.exr
Tile Images: rv -tile *.jpg
Cache + Play Movie: rv -l -play foo.mov
Cache Images to Examine: rv -c big.#.exr
Fullscreen on 2nd monitor: rv -fullscreen -screen 1
Select Source View: rv [ in.exr -select view right ]
Select Source Layer: rv [ in.exr -select layer light1.diffuse ]
(single-view source)
Select Source Layer: rv [ in.exr -select layer left,light1.diffuse ]
(multi-view source)
Select Source Channel: rv [ in.exr -select channel R ]
(single-view, single-layer source)
Select Source Channel: rv [ in.exr -select channel left,Diffuse,R ]
(multi-view, multi-layer source)
Image Sequence Numbering
Frames 1 to 100 no padding: image.1-100@.jpg
Frames 1 to 100 padding 4: image.1-100#.jpg -or- image.1-100@@@@.jpg
Frames 1 to 100 padding 5: image.1-100@@@@@.jpg
Frames -100 to -200 padding 4: image.-100--200#jpg
printf style padding 4: image.%04d.jpg
printf style w/range: image.%04d.jpg 1-100
printf no padding w/range: image.%d.jpg 1-100
Complicated no pad 1 to 100: image_887f1-100@_982.tif
Stereo pair (left,right): image.#.%V.tif
Stereo pair (L,R): image.#.%v.tif
All Frames, padding 4: image.#.jpg
All Frames in Sequence: image.*.jpg
All Frames in Directory: /path/to/directory
All Frames in current dir: .
Per-source arguments (inside [ and ] restricts to that source only)
-pa %f Per-source pixel aspect ratio
-ro %d Per-source range offset
-rs %d Per-source range start
-fps %f Per-source or global fps
-ao %f Per-source audio offset in seconds
-so %f Per-source stereo relative eye offset
-rso %f Per-source stereo right eye offset
-volume %f Per-source or global audio volume (default=1)
-fcdl %S Per-source file CDL
-lcdl %S Per-source look CDL
-flut %S Per-source file LUT
-llut %S Per-source look LUT
-pclut %S Per-source pre-cache software LUT
-cmap %S Per-source channel mapping
(channel names, separated by ',')
-select %S %S Per-source view/layer/channel selection
-crop %d %d %d %d Per-source crop (xmin, ymin, xmax, ymax)
-uncrop %d %d %d %d Per-source uncrop (width, height, xoffset, yoffset)
-in %d Per-source cut-in frame
-out %d Per-source cut-out frame
-noMovieAudio Disable source movie's baked-in audio
-inparams ... Source specific input parameters
... Input sequence patterns, images, movies, or directories
-c Use region frame cache
-l Use look-ahead cache
-nc Use no caching
-s %f Image scale reduction
-ns Nuke style sequence notation
(deprecated and ignored -- no longer needed)
-noRanges No separate frame ranges
(i.e. 1-10 will be considered a file)
-sessionType %S Session type (sequence, stack) (deprecated, use -view)
-stereo %S Stereo mode
(hardware, checker, scanline, anaglyph, lumanaglyph,
left, right, pair, mirror, hsqueezed, vsqueezed)
-stereoSwap %d Swap left and right eyes stereo display
(0 == no, 1 == yes, default=0)
-vsync %d Video Sync (1 = on, 0 = off, default = 1)
-comp %S Composite mode
(over, add, difference, replace, topmost)
-layout %S Layout mode (packed, row, column, manual)
-over Same as -comp over -view defaultStack
-diff Same as -comp difference -view defaultStack
-replace Same as -comp replace -view defaultStack
-topmost Same as -comp topmost -view defaultStack
-layer Same as -comp topmost -view defaultStack, with strict
frame ranges
-tile Same as -layout packed -view defaultLayout
-wipe Same as -over with wipes enabled
-view %S Start with a particular view
-noSequence Don't contract files into sequences
-inferSequence Infer sequences from one file
-autoRetime %d Automatically retime conflicting media fps in
sequences and stacks (1 = on, 0 = off, default = 1)
-rthreads %d Number of reader threads (default=1)
-fullscreen Start in fullscreen mode
-present Start in presentation mode (using presentation device)
-presentAudio %d Use presentation audio device in presentation mode
(1 = on, 0 = off)
-presentDevice %S Presentation mode device
-presentVideoFormat %S Presentation mode override video format
(device specific)
-presentDataFormat %S Presentation mode override data format
(device specific)
-screen %d Start on screen (0, 1, 2, ...)
-noBorders No window manager decorations
-geometry %d %d [%d %d] Start geometry X, Y, W, H
-fitMedia Fit the window to the first media shown
-init %S Override init script
-nofloat Turn off floating point by default
-maxbits %d Maximum default bit depth (default=32)
-gamma %f Set display gamma (default=1)
-sRGB Display using linear -> sRGB conversion
-rec709 Display using linear -> Rec 709 conversion
-dlut %S Apply display LUT
-brightness %f Set display relative brightness in stops (default=0)
-resampleMethod %S Resampling method
(area, linear, cubic, nearest, default=area)
-eval %S Evaluate Mu expression at every session start
-pyeval %S Evaluate Python expression at every session start
-nomb Hide menu bar on start up
-play Play on startup
-playMode %d Playback mode (0=Context dependent, 1=Play all frames,
2=Realtime, default=0)
-loopMode %d Playback loop mode
(0=Loop, 1=Play Once, 2=Ping-Pong, default=0)
-cli Mu command line interface
-vram %f VRAM usage limit in Mb, default = 64.000000
-cram %f Max region cache RAM usage in Gb,
(6.4Gb available, default 1Gb)
-lram %f Max look-ahead cache RAM usage in Gb,
(6.4Gb available, default 0.2Gb)
-noPBO Prevent use of GL PBOs for pixel transfer
-prefetch Prefetch images for rendering
-useAppleClientStorage Use APPLE_client_storage extension
-useThreadedUpload Use threading for texture uploading/downloading
if possible
-bwait %f Max buffer wait time in cached seconds, default 5.0
-lookback %f Percentage of the lookahead cache reserved for
frames behind the playhead, default 25
-yuv Assume YUV hardware conversion
-noaudio Turn off audio
-audiofs %d Use fixed audio frame size
(results are hardware dependant ... try 512)
-audioCachePacket %d Audio cache packet size in samples (default=2048)
-audioMinCache %f Audio cache min size in seconds (default=0.300000)
-audioMaxCache %f Audio cache max size in seconds (default=0.600000)
-audioModule %S Use specific audio module
-audioDevice %S Use specific audio device
-audioRate %f Use specific output audio rate (default=ask hardware)
-audioPrecision %d Use specific output audio precision (default=16)
-audioNice %d Close audio device when not playing
(may cause problems on some hardware) default=0
-audioNoLock %d Do not use hardware audio/video syncronization
(use software instead, default=0)
-audioPreRoll %d Preroll audio on device open (Linux only; default=0)
-audioGlobalOffset %f Global audio offset in seconds
-audioDeviceLatency %f Audio device latency compensation in milliseconds
-bg %S Background pattern (default=black, white, grey18,
grey50, checker, crosshatch)
-formats Show all supported image and movie formats
-apple Use Quicktime and NSImage libraries (on OS X)
-cinalt Use alternate Cineon/DPX readers
-exrcpus %d EXR thread count (default=0)
-exrRGBA EXR Always read as RGBA (default=false)
-exrInherit EXR guess channel inheritance (default=false)
-exrNoOneChannel EXR never use one channel planar images (default=false)
-exrIOMethod %d [%d] EXR I/O Method (0=standard, 1=buffered, 2=unbuffered,
3=MemoryMap, 4=AsyncBuffered, 5=AsyncUnbuffered,
default=1) and optional chunk size (default=61440)
-exrReadWindowIsDisplayWindow
EXR read window is display window (default=false)
-exrReadWindow %d EXR Read Window Method (0=Data, 1=Display,
2=Union, 3=Data inside Display, default=3)
-jpegRGBA Make JPEG four channel RGBA on read
(default=no, use RGB or YUV)
-jpegIOMethod %d [%d] JPEG I/O Method (0=standard, 1=buffered,
2=unbuffered, 3=MemoryMap, 4=AsyncBuffered,
5=AsyncUnbuffered, default=1) and optional
chunk size (default=61440)
-cinpixel %S Cineon pixel storage (default=RGB8_PLANAR)
-cinchroma Use Cineon chromaticity values
(for default reader only)
-cinIOMethod %d [%d] Cineon I/O Method (0=standard, 1=buffered,
2=unbuffered, 3=MemoryMap, 4=AsyncBuffered,
5=AsyncUnbuffered, default=1) and optional
chunk size (default=61440)
-dpxpixel %S DPX pixel storage (default=RGB8_PLANAR)
-dpxchroma Use DPX chromaticity values (for default reader only)
-dpxIOMethod %d [%d] DPX I/O Method (0=standard, 1=buffered, 2=unbuffered,
3=MemoryMap, 4=AsyncBuffered, 5=AsyncUnbuffered,
default=1) and optional chunk size (default=61440)
-tgaIOMethod %d [%d] TARGA I/O Method (0=standard, 1=buffered,
2=unbuffered, 3=MemoryMap, 4=AsyncBuffered,
5=AsyncUnbuffered, default=1)
and optional chunk size (default=61440)
-tiffIOMethod %d [%d] TIFF I/O Method (0=standard, 1=buffered,
2=unbuffered, 3=MemoryMap, 4=AsyncBuffered,
5=AsyncUnbuffered, default=1) and optional
chunk size (default=61440)
-lic %S Use specific license file
-noPrefs Ignore preferences
-resetPrefs Reset preferences to default values
-qtcss %S Use QT style sheet for UI
-qtstyle %S Use QT style
-qtdesktop %d QT desktop aware, default=1 (on)
-xl Aggressively absorb screen space for large media
-mouse %d Force tablet/stylus events to be treated as a
mouse events, default=0 (off)
-network Start networking
-networkPort %d Port for networking
-networkHost %S Alternate host/address for incoming connections
-networkTag %S Tag to mark automatically saved port file
-networkConnect %S [%d] Start networking and connect to host at port
-networkPerm %d Default network connection permission
(0=Ask, 1=Allow, 2=Deny, default=0)
-reuse %d Try to re-use the current session for
incoming URLs (1 = reuse session,
0 = new session, default = 1)
-nopackages Don't load any packages at startup (for debugging)
-encodeURL Encode the command line as
an rvlink URL, print, and exit
-bakeURL Fully bake the command line as an
rvlink URL, print, and exit
-sendEvent ... Send external events e.g. -sendEvent 'name' 'content'
-flags ... Arbitrary flags (flag, or 'name=value')
for use in Mu code
-debug ... Debug category
-version Show RV version number
-strictlicense Exit rather than consume an rv license if no rvsolo
licenses are available
-prefsPath %S Alternate path to preferences directory
-sleep %d Sleep (in seconds) before starting to
allow attaching debugger
"""

View file

@ -71,7 +71,7 @@ class SetVersion(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -80,7 +80,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
SetVersion(session).register()
SetVersion(session, plugins_presets).register()
def main(arguments=None):

View file

@ -19,61 +19,25 @@ class StartTimer(BaseAction):
entity = entities[0]
if entity.entity_type.lower() != 'task':
return
self.start_ftrack_timer(entity)
try:
self.start_clockify_timer(entity)
except Exception:
self.log.warning(
'Failed starting Clockify timer for task: ' + entity['name']
)
user = self.session.query(
"User where username is \"{}\"".format(self.session.api_user)
).one()
user.start_timer(entity, force=True)
self.session.commit()
self.log.info(
"Starting Ftrack timer for task: {}".format(entity['name'])
)
return
def start_ftrack_timer(self, task):
user_query = 'User where username is "{}"'.format(self.session.api_user)
user = self.session.query(user_query).one()
self.log.info('Starting Ftrack timer for task: ' + task['name'])
user.start_timer(task, force=True)
self.session.commit()
def start_clockify_timer(self, task):
# Validate Clockify settings if Clockify is required
clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
if clockify_timer is None:
return
from pype.clockify import ClockifyAPI
clockapi = ClockifyAPI()
if clockapi.verify_api() is False:
return
task_type = task['type']['name']
project_name = task['project']['full_name']
def get_parents(entity):
output = []
if entity.entity_type.lower() == 'project':
return output
output.extend(get_parents(entity['parent']))
output.append(entity['name'])
return output
desc_items = get_parents(task['parent'])
desc_items.append(task['name'])
description = '/'.join(desc_items)
project_id = clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(clockapi.get_tag_id(task_type))
clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)
self.log.info('Starting Clockify timer for task: ' + task['name'])
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
StartTimer(session).register()
StartTimer(session, plugins_presets).register()

View file

@ -19,16 +19,17 @@ class SyncHierarchicalAttrs(BaseAction):
#: Action identifier.
identifier = 'sync.hierarchical.attrs.local'
#: Action label.
label = 'Sync HierAttrs - Local'
label = "Pype Admin"
variant = '- Sync Hier Attrs (Local)'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: roles that are allowed to register this action
role_list = ['Administrator']
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
def discover(self, session, entities, event):
''' Validation '''
@ -41,6 +42,7 @@ class SyncHierarchicalAttrs(BaseAction):
return False
def launch(self, session, entities, event):
self.interface_messages = {}
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
@ -53,13 +55,27 @@ class SyncHierarchicalAttrs(BaseAction):
})
})
session.commit()
self.log.debug('Job with id "{}" created'.format(job['id']))
process_session = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
try:
# Collect hierarchical attrs
self.log.debug('Collecting Hierarchical custom attributes started')
custom_attributes = {}
all_avalon_attr = session.query(
all_avalon_attr = process_session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
error_key = (
'Hierarchical attributes with set "default" value (not allowed)'
)
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
@ -68,6 +84,12 @@ class SyncHierarchicalAttrs(BaseAction):
continue
if cust_attr['default']:
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
cust_attr['label']
)
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
@ -76,6 +98,10 @@ class SyncHierarchicalAttrs(BaseAction):
custom_attributes[cust_attr['key']] = cust_attr
self.log.debug(
'Collecting Hierarchical custom attributes has finished'
)
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
@ -93,28 +119,61 @@ class SyncHierarchicalAttrs(BaseAction):
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
for entity in entities:
_entities = self._get_entities(event, process_session)
for entity in _entities:
self.log.debug(30*'-')
self.log.debug(
'Processing entity "{}"'.format(entity.get('name', entity))
)
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
for key in custom_attributes:
self.log.debug(30*'*')
self.log.debug(
'Processing Custom attribute key "{}"'.format(key)
)
# check if entity has that attribute
if key not in entity['custom_attributes']:
self.log.debug(
'Hierachical attribute "{}" not found on "{}"'.format(
key, entity.get('name', entity)
)
error_key = 'Missing key on entities'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.error((
'- key "{}" not found on "{}"'
).format(key, ent_name))
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
self.log.warning(
'Hierarchical attribute "{}" not set on "{}"'.format(
key, entity.get('name', entity)
)
error_key = (
'Missing value for key on entity'
' and its parents (synchronization was skipped)'
)
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.warning((
'- key "{}" not set on "{}" or its parents'
).format(key, ent_name))
continue
self.update_hierarchical_attribute(entity, key, value)
job['status'] = 'done'
session.commit()
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
@ -127,6 +186,11 @@ class SyncHierarchicalAttrs(BaseAction):
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
title = "Errors during SyncHierarchicalAttrs"
self.show_interface_from_dict(
messages=self.interface_messages, title=title, event=event
)
return True
@ -146,6 +210,27 @@ class SyncHierarchicalAttrs(BaseAction):
entity.entity_type.lower() == 'task'
):
return
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
hierarchy = '/'.join(
[a['name'] for a in entity.get('ancestors', [])]
)
if hierarchy:
hierarchy = '/'.join(
[entity['project']['full_name'], hierarchy, entity['name']]
)
elif entity.entity_type.lower() == 'project':
hierarchy = entity['full_name']
else:
hierarchy = '/'.join(
[entity['project']['full_name'], entity['name']]
)
self.log.debug('- updating entity "{}"'.format(hierarchy))
# collect entity's custom attributes
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
@ -153,24 +238,49 @@ class SyncHierarchicalAttrs(BaseAction):
mongoid = custom_attributes.get(self.ca_mongoid)
if not mongoid:
self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
))
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
ent_name
)
)
return
try:
mongoid = ObjectId(mongoid)
except Exception:
self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
entity.get('name', entity)
))
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
ent_name
)
)
return
# Find entity in Mongo DB
mongo_entity = self.db_con.find_one({'_id': mongoid})
if not mongo_entity:
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
ent_name, str(mongoid)
)
)
return
@ -188,17 +298,21 @@ class SyncHierarchicalAttrs(BaseAction):
{'$set': {'data': data}}
)
self.log.debug(
'-- stored value "{}"'.format(value)
)
for child in entity.get('children', []):
self.update_hierarchical_attribute(child, key, value)
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session).register()
SyncHierarchicalAttrs(session, plugins_presets).register()
def main(arguments=None):

View file

@ -47,11 +47,12 @@ class SyncToAvalon(BaseAction):
#: Action identifier.
identifier = 'sync.to.avalon.local'
#: Action label.
label = 'SyncToAvalon - Local'
label = "Pype Admin"
variant = '- Sync To Avalon (Local)'
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = '{}/ftrack/action_icons/SyncToAvalon-local.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: roles that are allowed to register this action
@ -59,7 +60,7 @@ class SyncToAvalon(BaseAction):
#: Action priority
priority = 200
def __init__(self, session):
def __init__(self, session, plugins_presets):
super(SyncToAvalon, self).__init__(session)
# reload utils on initialize (in case of server restart)
@ -177,17 +178,7 @@ class SyncToAvalon(BaseAction):
job['status'] = 'failed'
session.commit()
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier='sync.hierarchical.attrs.local',
selection=event['data']['selection']
),
source=dict(
user=event['source']['user']
)
)
session.event_hub.publish(event, on_error='ignore')
self.trigger_action("sync.hierarchical.attrs.local", event)
if len(message) > 0:
message = "Unable to sync: {}".format(message)
@ -212,7 +203,7 @@ class SyncToAvalon(BaseAction):
self.add_childs_to_importable(child)
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -221,7 +212,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
SyncToAvalon(session).register()
SyncToAvalon(session, plugins_presets).register()
def main(arguments=None):

View file

@ -11,12 +11,10 @@ from pype.ftrack import BaseAction
from avalon import io, inventory, schema
ignore_me = True
class TestAction(BaseAction):
'''Edit meta data action.'''
ignore_me = True
#: Action identifier.
identifier = 'test.action'
#: Action label.
@ -42,13 +40,13 @@ class TestAction(BaseAction):
return True
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
TestAction(session).register()
TestAction(session, plugins_presets).register()
def main(arguments=None):

View file

@ -14,9 +14,11 @@ class ThumbToChildren(BaseAction):
# Action identifier
identifier = 'thumb.to.children'
# Action label
label = 'Thumbnail to Children'
label = 'Thumbnail'
# Action variant
variant = " to Children"
# Action icon
icon = '{}/ftrack/action_icons/thumbToChildren.svg'.format(
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
@ -64,12 +66,12 @@ class ThumbToChildren(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbToChildren(session).register()
ThumbToChildren(session, plugins_presets).register()
def main(arguments=None):

View file

@ -13,9 +13,11 @@ class ThumbToParent(BaseAction):
# Action identifier
identifier = 'thumb.to.parent'
# Action label
label = 'Thumbnail to Parent'
label = 'Thumbnail'
# Action variant
variant = " to Parent"
# Action icon
icon = '{}/ftrack/action_icons/thumbToParent.svg'.format(
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
@ -86,12 +88,12 @@ class ThumbToParent(BaseAction):
}
def register(session, **kw):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbToParent(session).register()
ThumbToParent(session, plugins_presets).register()
def main(arguments=None):

View file

@ -0,0 +1,46 @@
import os
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pype.vendor.ftrack_api import session as fa_session
class ActionAskWhereIRun(BaseAction):
""" Sometimes user forget where pipeline with his credentials is running.
- this action triggers `ActionShowWhereIRun`
"""
# Action is ignored by default
ignore_me = True
#: Action identifier.
identifier = 'ask.where.i.run'
#: Action label.
label = 'Ask where I run'
#: Action description.
description = 'Triggers PC info where user have running Pype'
#: Action icon
icon = '{}/ftrack/action_icons/ActionAskWhereIRun.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
def discover(self, session, entities, event):
""" Hide by default - Should be enabled only if you want to run.
- best practise is to create another action that triggers this one
"""
return True
def launch(self, session, entities, event):
more_data = {"event_hub_id": session.event_hub.id}
self.trigger_action(
"show.where.i.run", event, additional_event_data=more_data
)
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ActionAskWhereIRun(session, plugins_presets).register()

View file

@ -0,0 +1,86 @@
import platform
import socket
import getpass
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
class ActionShowWhereIRun(BaseAction):
""" Sometimes user forget where pipeline with his credentials is running.
- this action shows on which PC, Username and IP is running
- requirement action MUST be registered where we want to locate the PC:
- - can't be used retrospectively...
"""
#: Action identifier.
identifier = 'show.where.i.run'
#: Action label.
label = 'Show where I run'
#: Action description.
description = 'Shows PC info where user have running Pype'
def discover(self, session, entities, event):
""" Hide by default - Should be enabled only if you want to run.
- best practise is to create another action that triggers this one
"""
return False
def launch(self, session, entities, event):
# Don't show info when was launch from this session
if session.event_hub.id == event.get("data", {}).get("event_hub_id"):
return True
title = "Where Do I Run?"
msgs = {}
all_keys = ["Hostname", "IP", "Username", "System name", "PC name"]
try:
host_name = socket.gethostname()
msgs["Hostname"] = host_name
host_ip = socket.gethostbyname(host_name)
msgs["IP"] = host_ip
except Exception:
pass
try:
system_name, pc_name, *_ = platform.uname()
msgs["System name"] = system_name
msgs["PC name"] = pc_name
except Exception:
pass
try:
msgs["Username"] = getpass.getuser()
except Exception:
pass
for key in all_keys:
if not msgs.get(key):
msgs[key] = "-Undefined-"
items = []
first = True
splitter = {'type': 'label', 'value': '---'}
for key, value in msgs.items():
if first:
first = False
else:
items.append(splitter)
self.log.debug("{}: {}".format(key, value))
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
items.append(subtitle)
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
items.append(message)
self.show_interface(items, title, event=event)
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ActionShowWhereIRun(session, plugins_presets).register()

View file

@ -20,11 +20,12 @@ class SyncHierarchicalAttrs(BaseAction):
#: Action identifier.
identifier = 'sync.hierarchical.attrs'
#: Action label.
label = 'Sync HierAttrs'
label = "Pype Admin"
variant = '- Sync Hier Attrs (server)'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get(
'PYPE_STATICS_SERVER',
'http://localhost:{}'.format(
@ -61,7 +62,7 @@ class SyncHierarchicalAttrs(BaseAction):
if role['security_role']['name'] in role_list:
role_check = True
break
print(self.icon)
if role_check is True:
for entity in entities:
context_type = entity.get('context_type', '').lower()
@ -75,6 +76,8 @@ class SyncHierarchicalAttrs(BaseAction):
return discover
def launch(self, session, entities, event):
self.interface_messages = {}
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
@ -87,13 +90,26 @@ class SyncHierarchicalAttrs(BaseAction):
})
})
session.commit()
self.log.debug('Job with id "{}" created'.format(job['id']))
process_session = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
try:
# Collect hierarchical attrs
self.log.debug('Collecting Hierarchical custom attributes started')
custom_attributes = {}
all_avalon_attr = session.query(
all_avalon_attr = process_session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
error_key = (
'Hierarchical attributes with set "default" value (not allowed)'
)
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
@ -102,6 +118,12 @@ class SyncHierarchicalAttrs(BaseAction):
continue
if cust_attr['default']:
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
cust_attr['label']
)
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
@ -110,6 +132,10 @@ class SyncHierarchicalAttrs(BaseAction):
custom_attributes[cust_attr['key']] = cust_attr
self.log.debug(
'Collecting Hierarchical custom attributes has finished'
)
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
@ -127,28 +153,61 @@ class SyncHierarchicalAttrs(BaseAction):
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
for entity in entities:
_entities = self._get_entities(event, process_session)
for entity in _entities:
self.log.debug(30*'-')
self.log.debug(
'Processing entity "{}"'.format(entity.get('name', entity))
)
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
for key in custom_attributes:
self.log.debug(30*'*')
self.log.debug(
'Processing Custom attribute key "{}"'.format(key)
)
# check if entity has that attribute
if key not in entity['custom_attributes']:
self.log.debug(
'Hierachical attribute "{}" not found on "{}"'.format(
key, entity.get('name', entity)
)
error_key = 'Missing key on entities'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.error((
'- key "{}" not found on "{}"'
).format(key, entity.get('name', entity)))
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
self.log.warning(
'Hierarchical attribute "{}" not set on "{}"'.format(
key, entity.get('name', entity)
)
error_key = (
'Missing value for key on entity'
' and its parents (synchronization was skipped)'
)
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.warning((
'- key "{}" not set on "{}" or its parents'
).format(key, ent_name))
continue
self.update_hierarchical_attribute(entity, key, value)
job['status'] = 'done'
session.commit()
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
@ -161,6 +220,9 @@ class SyncHierarchicalAttrs(BaseAction):
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
self.show_interface_from_dict(self.interface_messages, event)
return True
@ -180,6 +242,27 @@ class SyncHierarchicalAttrs(BaseAction):
entity.entity_type.lower() == 'task'
):
return
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
hierarchy = '/'.join(
[a['name'] for a in entity.get('ancestors', [])]
)
if hierarchy:
hierarchy = '/'.join(
[entity['project']['full_name'], hierarchy, entity['name']]
)
elif entity.entity_type.lower() == 'project':
hierarchy = entity['full_name']
else:
hierarchy = '/'.join(
[entity['project']['full_name'], entity['name']]
)
self.log.debug('- updating entity "{}"'.format(hierarchy))
# collect entity's custom attributes
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
@ -187,24 +270,49 @@ class SyncHierarchicalAttrs(BaseAction):
mongoid = custom_attributes.get(self.ca_mongoid)
if not mongoid:
self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
))
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
ent_name
)
)
return
try:
mongoid = ObjectId(mongoid)
except Exception:
self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
entity.get('name', entity)
))
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
ent_name
)
)
return
# Find entity in Mongo DB
mongo_entity = self.db_con.find_one({'_id': mongoid})
if not mongo_entity:
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
ent_name, str(mongoid)
)
)
return
@ -226,13 +334,13 @@ class SyncHierarchicalAttrs(BaseAction):
self.update_hierarchical_attribute(child, key, value)
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session).register()
SyncHierarchicalAttrs(session, plugins_presets).register()
def main(arguments=None):

View file

@ -48,11 +48,12 @@ class Sync_To_Avalon(BaseAction):
#: Action identifier.
identifier = 'sync.to.avalon'
#: Action label.
label = 'SyncToAvalon'
label = "Pype Admin"
variant = "- Sync To Avalon (Server)"
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format(
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get(
'PYPE_STATICS_SERVER',
'http://localhost:{}'.format(
@ -206,18 +207,8 @@ class Sync_To_Avalon(BaseAction):
job['status'] = 'failed'
session.commit()
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier='sync.hierarchical.attrs',
selection=event['data']['selection']
),
source=dict(
user=event['source']['user']
)
)
session.event_hub.publish(event, on_error='ignore')
self.trigger_action("sync.hierarchical.attrs", event)
if len(message) > 0:
message = "Unable to sync: {}".format(message)
@ -242,7 +233,7 @@ class Sync_To_Avalon(BaseAction):
self.add_childs_to_importable(child)
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
@ -251,7 +242,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
Sync_To_Avalon(session).register()
SyncToAvalon(session, plugins_presets).register()
def main(arguments=None):

View file

@ -51,9 +51,9 @@ class DelAvalonIdFromNew(BaseEvent):
continue
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
DelAvalonIdFromNew(session).register()
DelAvalonIdFromNew(session, plugins_presets).register()

View file

@ -86,9 +86,9 @@ class NextTaskUpdate(BaseEvent):
session.rollback()
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
NextTaskUpdate(session).register()
NextTaskUpdate(session, plugins_presets).register()

View file

@ -2,11 +2,10 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent
ignore_me = True
class Radio_buttons(BaseEvent):
ignore_me = True
def launch(self, session, event):
'''Provides a readio button behaviour to any bolean attribute in
radio_button group.'''
@ -34,9 +33,10 @@ class Radio_buttons(BaseEvent):
session.commit()
def register(session):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Radio_buttons(session).register()
Radio_buttons(session, plugins_presets).register()

View file

@ -115,9 +115,9 @@ class SyncHierarchicalAttrs(BaseEvent):
self.update_hierarchical_attribute(child, key, value)
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session).register()
SyncHierarchicalAttrs(session, plugins_presets).register()

View file

@ -16,7 +16,7 @@ class Sync_to_Avalon(BaseEvent):
# If mongo_id textfield has changed: RETURN!
# - infinite loop
for ent in event['data']['entities']:
if 'keys' in ent:
if ent.get('keys') is not None:
if ca_mongoid in ent['keys']:
return
@ -109,19 +109,19 @@ class Sync_to_Avalon(BaseEvent):
' for more information.'
)
items = [
{'type': 'label', 'value':'# Fatal Error'},
{'type': 'label', 'value': '# Fatal Error'},
{'type': 'label', 'value': '<p>{}</p>'.format(ftrack_message)}
]
self.show_interface(event, items, title)
self.show_interface(items, title, event=event)
self.log.error('Fatal error during sync: {}'.format(message))
return
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Sync_to_Avalon(session).register()
Sync_to_Avalon(session, plugins_presets).register()

View file

@ -5,11 +5,10 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent
ignore_me = True
class Test_Event(BaseEvent):
ignore_me = True
priority = 10000
def launch(self, session, event):
@ -21,9 +20,9 @@ class Test_Event(BaseEvent):
return True
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Test_Event(session).register()
Test_Event(session, plugins_presets).register()

View file

@ -45,9 +45,9 @@ class ThumbnailEvents(BaseEvent):
pass
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbnailEvents(session).register()
ThumbnailEvents(session, plugins_presets).register()

View file

@ -229,11 +229,11 @@ class UserAssigmentEvent(BaseEvent):
return True
def register(session, **kw):
def register(session, plugins_presets):
"""
Register plugin. Called when used as an plugin.
"""
if not isinstance(session, ftrack_api.session.Session):
return
UserAssigmentEvent(session).register()
UserAssigmentEvent(session, plugins_presets).register()

View file

@ -69,9 +69,9 @@ class VersionToTaskStatus(BaseEvent):
path, task_status['name']))
def register(session, **kw):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
VersionToTaskStatus(session).register()
VersionToTaskStatus(session, plugins_presets).register()

View file

@ -5,7 +5,9 @@ import importlib
from pype.vendor import ftrack_api
import time
import logging
from pypeapp import Logger
import inspect
from pypeapp import Logger, config
log = Logger().get_logger(__name__)
@ -27,8 +29,8 @@ PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
"""
class FtrackServer():
def __init__(self, type='action'):
class FtrackServer:
def __init__(self, server_type='action'):
"""
- 'type' is by default set to 'action' - Runs Action server
- enter 'event' for Event server
@ -43,21 +45,12 @@ class FtrackServer():
ftrack_log = logging.getLogger("ftrack_api")
ftrack_log.setLevel(logging.WARNING)
self.type = type
self.actionsAvailable = True
self.eventsAvailable = True
# Separate all paths
if "FTRACK_ACTIONS_PATH" in os.environ:
all_action_paths = os.environ["FTRACK_ACTIONS_PATH"]
self.actionsPaths = all_action_paths.split(os.pathsep)
else:
self.actionsAvailable = False
env_key = "FTRACK_ACTIONS_PATH"
if server_type.lower() == 'event':
env_key = "FTRACK_EVENTS_PATH"
if "FTRACK_EVENTS_PATH" in os.environ:
all_event_paths = os.environ["FTRACK_EVENTS_PATH"]
self.eventsPaths = all_event_paths.split(os.pathsep)
else:
self.eventsAvailable = False
self.server_type = server_type
self.env_key = env_key
def stop_session(self):
if self.session.event_hub.connected is True:
@ -67,7 +60,7 @@ class FtrackServer():
def set_files(self, paths):
# Iterate all paths
functions = []
register_functions_dict = []
for path in paths:
# add path to PYTHON PATH
if path not in sys.path:
@ -80,32 +73,23 @@ class FtrackServer():
if '.pyc' in file or '.py' not in file:
continue
ignore = 'ignore_me'
mod = importlib.import_module(os.path.splitext(file)[0])
importlib.reload(mod)
mod_functions = dict(
[
(name, function)
for name, function in mod.__dict__.items()
if isinstance(function, types.FunctionType) or
name == ignore
if isinstance(function, types.FunctionType)
]
)
# Don't care about ignore_me files
if (
ignore in mod_functions and
mod_functions[ignore] is True
):
continue
# separate files by register function
if 'register' not in mod_functions:
msg = (
'"{0}" - Missing register method'
).format(file, self.type)
msg = ('"{}" - Missing register method').format(file)
log.warning(msg)
continue
functions.append({
register_functions_dict.append({
'name': file,
'register': mod_functions['register']
})
@ -115,43 +99,47 @@ class FtrackServer():
)
log.warning(msg)
if len(functions) < 1:
if len(register_functions_dict) < 1:
raise Exception
# Load presets for setting plugins
key = "user"
if self.server_type.lower() == "event":
key = "server"
plugins_presets = config.get_presets().get(
"ftrack", {}
).get("plugins", {}).get(key, {})
function_counter = 0
for function in functions:
for function_dict in register_functions_dict:
register = function_dict["register"]
try:
function['register'](self.session)
if len(inspect.signature(register).parameters) == 1:
register(self.session)
else:
register(self.session, plugins_presets=plugins_presets)
if function_counter%7 == 0:
time.sleep(0.1)
function_counter += 1
except Exception as e:
except Exception as exc:
msg = '"{}" - register was not successful ({})'.format(
function['name'], str(e)
function_dict['name'], str(exc)
)
log.warning(msg)
def run_server(self):
self.session = ftrack_api.Session(auto_connect_event_hub=True,)
if self.type.lower() == 'event':
if self.eventsAvailable is False:
msg = (
'FTRACK_EVENTS_PATH is not set'
', event server won\'t launch'
)
log.error(msg)
return
self.set_files(self.eventsPaths)
else:
if self.actionsAvailable is False:
msg = (
'FTRACK_ACTIONS_PATH is not set'
', action server won\'t launch'
)
log.error(msg)
return
self.set_files(self.actionsPaths)
paths_str = os.environ.get(self.env_key)
if paths_str is None:
log.error((
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
).format(self.env_key, self.server_type))
return
paths = paths_str.split(os.pathsep)
self.set_files(paths)
log.info(60*"*")
log.info('Registration of actions/events has finished!')

View file

@ -1,14 +1,13 @@
import os
import re
import json
from pype import lib as pypelib
from pype.lib import get_avalon_database
from bson.objectid import ObjectId
import avalon
import avalon.api
from avalon import schema
from avalon.vendor import toml, jsonschema
from pypeapp import Logger
from pypeapp import Logger, Anatomy, config
ValidationError = jsonschema.ValidationError
@ -53,8 +52,8 @@ def import_to_avalon(
if entity_type in ['Project']:
type = 'project'
config = get_project_config(entity)
schema.validate(config)
proj_config = get_project_config(entity)
schema.validate(proj_config)
av_project_code = None
if av_project is not None and 'code' in av_project['data']:
@ -62,13 +61,12 @@ def import_to_avalon(
ft_project_code = ft_project['name']
if av_project is None:
project_schema = pypelib.get_avalon_project_template_schema()
item = {
'schema': project_schema,
'schema': "avalon-core:project-2.0",
'type': type,
'name': project_name,
'data': dict(),
'config': config,
'config': proj_config,
'parent': None,
}
schema.validate(item)
@ -118,13 +116,13 @@ def import_to_avalon(
# not override existing templates!
templates = av_project['config'].get('template', None)
if templates is not None:
for key, value in config['template'].items():
for key, value in proj_config['template'].items():
if (
key in templates and
templates[key] is not None and
templates[key] != value
):
config['template'][key] = templates[key]
proj_config['template'][key] = templates[key]
projectId = av_project['_id']
@ -144,7 +142,7 @@ def import_to_avalon(
{'_id': ObjectId(projectId)},
{'$set': {
'name': project_name,
'config': config,
'config': proj_config,
'data': data
}}
)
@ -214,9 +212,8 @@ def import_to_avalon(
{'type': 'asset', 'name': name}
)
if avalon_asset is None:
asset_schema = pypelib.get_avalon_asset_template_schema()
item = {
'schema': asset_schema,
'schema': "avalon-core:asset-2.0",
'name': name,
'silo': silo,
'parent': ObjectId(projectId),
@ -329,13 +326,26 @@ def import_to_avalon(
return output
def get_avalon_attr(session):
def get_avalon_attr(session, split_hierarchical=False):
custom_attributes = []
hier_custom_attributes = []
query = 'CustomAttributeGroup where name is "avalon"'
all_avalon_attr = session.query(query).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
custom_attributes.append(cust_attr)
if 'avalon_' in cust_attr['key']:
continue
if split_hierarchical:
if cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr)
continue
custom_attributes.append(cust_attr)
if split_hierarchical:
# return tuple
return custom_attributes, hier_custom_attributes
return custom_attributes
@ -345,13 +355,12 @@ def changeability_check_childs(entity):
childs = entity['children']
for child in childs:
if child.entity_type.lower() == 'task':
config = get_config_data()
if 'sync_to_avalon' in config:
config = config['sync_to_avalon']
if 'statuses_name_change' in config:
available_statuses = config['statuses_name_change']
else:
available_statuses = []
available_statuses = config.get_presets().get(
"ftrack", {}).get(
"ftrack_config", {}).get(
"sync_to_avalon", {}).get(
"statuses_name_change", []
)
ent_status = child['status']['name'].lower()
if ent_status not in available_statuses:
return False
@ -480,14 +489,28 @@ def get_avalon_project(ft_project):
return avalon_project
def get_project_config(entity):
config = {}
config['schema'] = pypelib.get_avalon_project_config_schema()
config['tasks'] = get_tasks(entity)
config['apps'] = get_project_apps(entity)
config['template'] = pypelib.get_avalon_project_template()
def get_avalon_project_template():
"""Get avalon template
return config
Returns:
dictionary with templates
"""
templates = Anatomy().templates
return {
'workfile': templates["avalon"]["workfile"],
'work': templates["avalon"]["work"],
'publish': templates["avalon"]["publish"]
}
def get_project_config(entity):
proj_config = {}
proj_config['schema'] = 'avalon-core:config-1.0'
proj_config['tasks'] = get_tasks(entity)
proj_config['apps'] = get_project_apps(entity)
proj_config['template'] = get_avalon_project_template()
return proj_config
def get_tasks(project):
@ -507,11 +530,17 @@ def get_project_apps(entity):
apps = []
for app in entity['custom_attributes']['applications']:
try:
app_config = {}
app_config['name'] = app
app_config['label'] = toml.load(avalon.lib.which_app(app))['label']
toml_path = avalon.lib.which_app(app)
if not toml_path:
log.warning((
'Missing config file for application "{}"'
).format(app))
continue
apps.append(app_config)
apps.append({
'name': app,
'label': toml.load(toml_path)['label']
})
except Exception as e:
log.warning('Error with application {0} - {1}'.format(app, e))
@ -533,7 +562,7 @@ def avalon_check_name(entity, inSchema=None):
if entity.entity_type in ['Project']:
# data['type'] = 'project'
name = entity['full_name']
# schema = get_avalon_project_template_schema()
# schema = "avalon-core:project-2.0"
data['silo'] = 'Film'
@ -551,24 +580,6 @@ def avalon_check_name(entity, inSchema=None):
raise ValueError(msg.format(name))
def get_config_data():
path_items = [pypelib.get_presets_path(), 'ftrack', 'ftrack_config.json']
filepath = os.path.sep.join(path_items)
data = dict()
try:
with open(filepath) as data_file:
data = json.load(data_file)
except Exception as e:
msg = (
'Loading "Ftrack Config file" Failed.'
' Please check log for more information.'
)
log.warning("{} - {}".format(msg, str(e)))
return data
def show_errors(obj, event, errors):
title = 'Hey You! You raised few Errors! (*look below*)'
items = []
@ -590,4 +601,4 @@ def show_errors(obj, event, errors):
obj.log.error(
'{}: {}'.format(key, message)
)
obj.show_interface(event, items, title)
obj.show_interface(items, title, event=event)

View file

@ -21,9 +21,9 @@ class BaseAction(BaseHandler):
icon = None
type = 'Action'
def __init__(self, session):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
super().__init__(session)
super().__init__(session, plugins_presets)
if self.label is None:
raise ValueError(

View file

@ -5,7 +5,7 @@ from avalon import lib as avalonlib
import acre
from pype import api as pype
from pype import lib as pypelib
from .avalon_sync import get_config_data
from pypeapp import config
from .ftrack_base_handler import BaseHandler
from pypeapp import Anatomy
@ -26,10 +26,10 @@ class AppAction(BaseHandler):
preactions = ['start.timer']
def __init__(
self, session, label, name, executable,
variant=None, icon=None, description=None, preactions=[]
self, session, label, name, executable, variant=None,
icon=None, description=None, preactions=[], plugins_presets={}
):
super().__init__(session)
super().__init__(session, plugins_presets)
'''Expects a ftrack_api.Session instance'''
if label is None:
@ -94,6 +94,9 @@ class AppAction(BaseHandler):
):
return False
if entities[0]['parent'].entity_type.lower() == 'project':
return False
ft_project = entities[0]['project']
database = pypelib.get_avalon_database()
@ -218,11 +221,22 @@ class AppAction(BaseHandler):
anatomy = anatomy.format(data)
work_template = anatomy["work"]["folder"]
except Exception as e:
self.log.exception(
"{0} Error in anatomy.format: {1}".format(__name__, e)
except Exception as exc:
msg = "{} Error in anatomy.format: {}".format(
__name__, str(exc)
)
os.environ["AVALON_WORKDIR"] = os.path.normpath(work_template)
self.log.error(msg, exc_info=True)
return {
'success': False,
'message': msg
}
workdir = os.path.normpath(work_template)
os.environ["AVALON_WORKDIR"] = workdir
try:
os.makedirs(workdir)
except FileExistsError:
pass
# collect all parents from the task
parents = []
@ -325,10 +339,10 @@ class AppAction(BaseHandler):
pass
# Change status of task to In progress
config = get_config_data()
presets = config.get_presets()["ftrack"]["ftrack_config"]
if 'status_update' in config:
statuses = config['status_update']
if 'status_update' in presets:
statuses = presets['status_update']
actual_status = entity['status']['name'].lower()
next_status_name = None
@ -348,7 +362,7 @@ class AppAction(BaseHandler):
session.commit()
except Exception:
msg = (
'Status "{}" in config wasn\'t found on Ftrack'
'Status "{}" in presets wasn\'t found on Ftrack'
).format(next_status_name)
self.log.warning(msg)

View file

@ -26,9 +26,10 @@ class BaseHandler(object):
priority = 100
# Type is just for logging purpose (e.g.: Action, Event, Application,...)
type = 'No-type'
ignore_me = False
preactions = []
def __init__(self, session):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
self._session = session
self.log = Logger().get_logger(self.__class__.__name__)
@ -36,11 +37,23 @@ class BaseHandler(object):
# Using decorator
self.register = self.register_decorator(self.register)
self.launch = self.launch_log(self.launch)
self.plugins_presets = plugins_presets
# Decorator
def register_decorator(self, func):
@functools.wraps(func)
def wrapper_register(*args, **kwargs):
presets_data = self.plugins_presets.get(self.__class__.__name__)
if presets_data:
for key, value in presets_data.items():
if not hasattr(self, key):
continue
setattr(self, key, value)
if self.ignore_me:
return
label = self.__class__.__name__
if hasattr(self, 'label'):
if self.variant is None:
@ -72,7 +85,7 @@ class BaseHandler(object):
self.type, label)
)
except Exception as e:
self.log.exception('{} "{}" - Registration failed ({})'.format(
self.log.error('{} "{}" - Registration failed ({})'.format(
self.type, label, str(e))
)
return wrapper_register
@ -83,23 +96,23 @@ class BaseHandler(object):
def wrapper_launch(*args, **kwargs):
label = self.__class__.__name__
if hasattr(self, 'label'):
if self.variant is None:
label = self.label
else:
label = '{} {}'.format(self.label, self.variant)
label = self.label
if hasattr(self, 'variant'):
if self.variant is not None:
label = '{} {}'.format(self.label, self.variant)
self.log.info(('{} "{}": Launched').format(self.type, label))
try:
self.log.info(('{} "{}": Launched').format(self.type, label))
result = func(*args, **kwargs)
self.log.info(('{} "{}": Finished').format(self.type, label))
return result
except Exception as e:
msg = '{} "{}": Failed ({})'.format(self.type, label, str(e))
self.log.exception(msg)
return func(*args, **kwargs)
except Exception as exc:
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
self.log.error(msg, exc_info=True)
return {
'success': False,
'message': msg
}
finally:
self.log.info(('{} "{}": Finished').format(self.type, label))
return wrapper_launch
@property
@ -127,6 +140,13 @@ class BaseHandler(object):
# Custom validations
result = self.preregister()
if result is None:
self.log.debug((
"\"{}\" 'preregister' method returned 'None'. Expected it"
" didn't fail and continue as preregister returned True."
).format(self.__class__.__name__))
return
if result is True:
return
msg = "Pre-register conditions were not met"
@ -194,7 +214,6 @@ class BaseHandler(object):
def _translate_event(self, session, event):
'''Return *event* translated structure to be used with the API.'''
'''Return *event* translated structure to be used with the API.'''
_entities = event['data'].get('entities_object', None)
if (
_entities is None or
@ -209,26 +228,29 @@ class BaseHandler(object):
event
]
def _get_entities(self, event):
self.session._local_cache.clear()
selection = event['data'].get('selection', [])
def _get_entities(self, event, session=None):
if session is None:
session = self.session
session._local_cache.clear()
selection = event['data'].get('selection') or []
_entities = []
for entity in selection:
_entities.append(
self.session.get(
self._get_entity_type(entity),
entity.get('entityId')
)
)
_entities.append(session.get(
self._get_entity_type(entity, session),
entity.get('entityId')
))
event['data']['entities_object'] = _entities
return _entities
def _get_entity_type(self, entity):
def _get_entity_type(self, entity, session=None):
'''Return translated entity type tht can be used with API.'''
# Get entity type and make sure it is lower cased. Most places except
# the component tab in the Sidebar will use lower case notation.
entity_type = entity.get('entityType').replace('_', '').lower()
if session is None:
session = self.session
for schema in self.session.schemas:
alias_for = schema.get('alias_for')
@ -305,30 +327,13 @@ class BaseHandler(object):
# Launch preactions
for preaction in self.preactions:
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier=preaction,
selection=selection
),
source=dict(
user=dict(username=session.api_user)
)
)
session.event_hub.publish(event, on_error='ignore')
self.trigger_action(preaction, event)
# Relaunch this action
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier=self.identifier,
selection=selection,
preactions_launched=True
),
source=dict(
user=dict(username=session.api_user)
)
additional_data = {"preactions_launched": True}
self.trigger_action(
self.identifier, event, additional_event_data=additional_data
)
session.event_hub.publish(event, on_error='ignore')
return False
@ -430,12 +435,47 @@ class BaseHandler(object):
on_error='ignore'
)
def show_interface(self, event, items, title=''):
def show_interface(
self, items, title='',
event=None, user=None, username=None, user_id=None
):
"""
Shows interface to user who triggered event
Shows interface to user
- to identify user must be entered one of args:
event, user, username, user_id
- 'items' must be list containing Ftrack interface items
"""
user_id = event['source']['user']['id']
if not any([event, user, username, user_id]):
raise TypeError((
'Missing argument `show_interface` requires one of args:'
' event (ftrack_api Event object),'
' user (ftrack_api User object)'
' username (string) or user_id (string)'
))
if event:
user_id = event['source']['user']['id']
elif user:
user_id = user['id']
else:
if user_id:
key = 'id'
value = user_id
else:
key = 'username'
value = username
user = self.session.query(
'User where {} is "{}"'.format(key, value)
).first()
if not user:
raise TypeError((
'Ftrack user with {} "{}" was not found!'.format(key, value)
))
user_id = user['id']
target = (
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
@ -452,3 +492,90 @@ class BaseHandler(object):
),
on_error='ignore'
)
def show_interface_from_dict(
self, messages, title="", event=None,
user=None, username=None, user_id=None
):
if not messages:
self.log.debug("No messages to show! (messages dict is empty)")
return
items = []
splitter = {'type': 'label', 'value': '---'}
first = True
for key, value in messages.items():
if not first:
items.append(splitter)
else:
first = False
subtitle = {'type': 'label', 'value':'<h3>{}</h3>'.format(key)}
items.append(subtitle)
if isinstance(value, list):
for item in value:
message = {
'type': 'label', 'value': '<p>{}</p>'.format(item)
}
items.append(message)
else:
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
items.append(message)
self.show_interface(items, title, event, user, username, user_id)
def trigger_action(
self, action_name, event=None, session=None,
selection=None, user_data=None,
topic="ftrack.action.launch", additional_event_data={},
on_error="ignore"
):
self.log.debug("Triggering action \"{}\" Begins".format(action_name))
if not session:
session = self.session
# Getting selection and user data
_selection = None
_user_data = None
if event:
_selection = event.get("data", {}).get("selection")
_user_data = event.get("source", {}).get("user")
if selection is not None:
_selection = selection
if user_data is not None:
_user_data = user_data
# Without selection and user data skip triggering
msg = "Can't trigger \"{}\" action without {}."
if _selection is None:
self.log.error(msg.format(action_name, "selection"))
return
if _user_data is None:
self.log.error(msg.format(action_name, "user data"))
return
_event_data = {
"actionIdentifier": action_name,
"selection": _selection
}
# Add additional data
if additional_event_data:
_event_data.update(additional_event_data)
# Create and trigger event
session.event_hub.publish(
fa_session.ftrack_api.event.base.Event(
topic=topic,
data=_event_data,
source=dict(user=_user_data)
),
on_error=on_error
)
self.log.debug(
"Action \"{}\" Triggered successfully".format(action_name)
)

View file

@ -15,9 +15,9 @@ class BaseEvent(BaseHandler):
type = 'Event'
def __init__(self, session):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
super().__init__(session)
super().__init__(session, plugins_presets)
# Decorator
def launch_log(self, func):
@ -25,9 +25,12 @@ class BaseEvent(BaseHandler):
def wrapper_launch(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
self.log.info('{} Failed ({})'.format(
self.__class__.__name__, str(e))
except Exception as exc:
self.log.error(
'Event "{}" Failed: {}'.format(
self.__class__.__name__, str(exc)
),
exc_info=True
)
return wrapper_launch
@ -43,11 +46,7 @@ class BaseEvent(BaseHandler):
self.session.rollback()
self.session._local_cache.clear()
self.launch(
self.session, event
)
return
self.launch(self.session, event)
def _translate_event(self, session, event):
'''Return *event* translated structure to be used with the API.'''

View file

@ -88,9 +88,11 @@ class FtrackModule:
def set_action_server(self):
try:
self.action_server.run_server()
except Exception:
msg = 'Ftrack Action server crashed! Please try to start again.'
log.error(msg)
except Exception as exc:
log.error(
"Ftrack Action server crashed! Please try to start again.",
exc_info=True
)
# TODO show message to user
self.bool_action_server = False
self.set_menu_visibility()

View file

@ -138,8 +138,8 @@ def update_frame_range(comp, representations):
versions = io.find({"type": "version", "_id": {"$in": version_ids}})
versions = list(versions)
start = min(v["data"]["startFrame"] for v in versions)
end = max(v["data"]["endFrame"] for v in versions)
start = min(v["data"]["frameStart"] for v in versions)
end = max(v["data"]["frameEnd"] for v in versions)
fusion_lib.update_frame_range(start, end, comp=comp)

View file

@ -10,10 +10,7 @@ from avalon.houdini import pipeline as houdini
from pype.houdini import lib
from pype.lib import (
any_outdated,
update_task_from_path
)
from pype.lib import any_outdated
PARENT_DIR = os.path.dirname(__file__)
@ -57,8 +54,6 @@ def on_save(*args):
avalon.logger.info("Running callback on save..")
update_task_from_path(hou.hipFile.path())
nodes = lib.get_id_required_nodes()
for node, new_id in lib.generate_ids(nodes):
lib.set_id(node, new_id, overwrite=False)
@ -68,8 +63,6 @@ def on_open(*args):
avalon.logger.info("Running callback on open..")
update_task_from_path(hou.hipFile.path())
if any_outdated():
from ..widgets import popup

View file

@ -205,7 +205,7 @@ def validate_fps():
"""
fps = lib.get_asset_fps()
fps = lib.get_asset()["data"]["fps"]
current_fps = hou.fps() # returns float
if current_fps != fps:

View file

@ -4,6 +4,7 @@ import logging
import importlib
import itertools
import contextlib
import subprocess
from .vendor import pather
from .vendor.pather.error import ParseError
@ -15,11 +16,66 @@ import avalon
log = logging.getLogger(__name__)
def get_handle_irregular(asset):
data = asset["data"]
handle_start = data.get("handle_start", 0)
handle_end = data.get("handle_end", 0)
return (handle_start, handle_end)
# Special naming case for subprocess since its a built-in method.
def _subprocess(args):
"""Convenience method for getting output errors for subprocess."""
proc = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
env=os.environ
)
output = proc.communicate()[0]
if proc.returncode != 0:
raise ValueError("\"{}\" was not successful: {}".format(args, output))
def get_hierarchy(asset_name=None):
"""
Obtain asset hierarchy path string from mongo db
Returns:
string: asset hierarchy path
"""
if not asset_name:
asset_name = io.Session.get("AVALON_ASSET", os.environ["AVALON_ASSET"])
asset_entity = io.find_one({
"type": 'asset',
"name": asset_name
})
not_set = "PARENTS_NOT_SET"
entity_parents = asset_entity.get("data", {}).get("parents", not_set)
# If entity already have parents then just return joined
if entity_parents != not_set:
return "/".join(entity_parents)
# Else query parents through visualParents and store result to entity
hierarchy_items = []
entity = asset_entity
while True:
parent_id = entity.get("data", {}).get("visualParent")
if not parent_id:
break
entity = io.find_one({"_id": parent_id})
hierarchy_items.append(entity["name"])
# Add parents to entity data for next query
entity_data = asset_entity.get("data", {})
entity_data["parents"] = hierarchy_items
io.update_many(
{"_id": asset_entity["_id"]},
{"$set": {"data": entity_data}}
)
return "/".join(hierarchy_items)
def add_tool_to_environment(tools):
@ -138,45 +194,6 @@ def any_outdated():
return False
def update_task_from_path(path):
"""Update the context using the current scene state.
When no changes to the context it will not trigger an update.
When the context for a file could not be parsed an error is logged but not
raised.
"""
if not path:
log.warning("Can't update the current task. Scene is not saved.")
return
# Find the current context from the filename
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project['config']['template']['work']
# Force to use the registered to root to avoid using wrong paths
template = pather.format(template, {"root": avalon.api.registered_root()})
try:
context = pather.parse(template, path)
except ParseError:
log.error("Can't update the current task. Unable to parse the "
"task for: %s (pattern: %s)", path, template)
return
# Find the changes between current Session and the path's context.
current = {
"asset": avalon.api.Session["AVALON_ASSET"],
"task": avalon.api.Session["AVALON_TASK"]
# "app": avalon.api.Session["AVALON_APP"]
}
changes = {key: context[key] for key, current_value in current.items()
if context[key] != current_value}
if changes:
log.info("Updating work task to: %s", context)
avalon.api.update_current_task(**changes)
def _rreplace(s, a, b, n=1):
"""Replace a with b in string s from right side n times"""
return b.join(s.rsplit(a, n))
@ -196,7 +213,7 @@ def version_up(filepath):
dirname = os.path.dirname(filepath)
basename, ext = os.path.splitext(os.path.basename(filepath))
regex = "[._]v\d+"
regex = r"[._]v\d+"
matches = re.findall(regex, str(basename), re.IGNORECASE)
if not matches:
log.info("Creating version...")
@ -204,7 +221,7 @@ def version_up(filepath):
new_basename = "{}{}".format(basename, new_label)
else:
label = matches[-1]
version = re.search("\d+", label).group()
version = re.search(r"\d+", label).group()
padding = len(version)
new_version = int(version) + 1
@ -312,140 +329,107 @@ def _get_host_name():
return _host.__name__.rsplit(".", 1)[-1]
def collect_container_metadata(container):
"""Add additional data based on the current host
def get_asset(asset_name=None):
entity_data_keys_from_project_when_miss = [
"frameStart", "frameEnd", "handleStart", "handleEnd", "fps",
"resolutionWidth", "resolutionHeight"
]
If the host application's lib module does not have a function to inject
additional data it will return the input container
entity_keys_from_project_when_miss = []
alternatives = {
"handleStart": "handles",
"handleEnd": "handles"
}
defaults = {
"handleStart": 0,
"handleEnd": 0
}
if not asset_name:
asset_name = avalon.api.Session["AVALON_ASSET"]
asset_document = io.find_one({"name": asset_name, "type": "asset"})
if not asset_document:
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
project_document = io.find_one({"type": "project"})
for key in entity_data_keys_from_project_when_miss:
if asset_document["data"].get(key):
continue
value = project_document["data"].get(key)
if value is not None or key not in alternatives:
asset_document["data"][key] = value
continue
alt_key = alternatives[key]
value = asset_document["data"].get(alt_key)
if value is not None:
asset_document["data"][key] = value
continue
value = project_document["data"].get(alt_key)
if value:
asset_document["data"][key] = value
continue
if key in defaults:
asset_document["data"][key] = defaults[key]
for key in entity_keys_from_project_when_miss:
if asset_document.get(key):
continue
value = project_document.get(key)
if value is not None or key not in alternatives:
asset_document[key] = value
continue
alt_key = alternatives[key]
value = asset_document.get(alt_key)
if value:
asset_document[key] = value
continue
value = project_document.get(alt_key)
if value:
asset_document[key] = value
continue
if key in defaults:
asset_document[key] = defaults[key]
return asset_document
def get_project():
io.install()
return io.find_one({"type": "project"})
def get_version_from_path(file):
"""
Finds version number in file path string
Args:
container (dict): collection if representation data in host
file (string): file path
Returns:
generator
"""
# TODO: Improve method of getting the host lib module
host_name = _get_host_name()
package_name = "pype.{}.lib".format(host_name)
hostlib = importlib.import_module(package_name)
if not hasattr(hostlib, "get_additional_data"):
return {}
return hostlib.get_additional_data(container)
def get_asset_fps():
"""Returns project's FPS, if not found will return 25 by default
Returns:
int, float
v: version number in string ('001')
"""
key = "fps"
# FPS from asset data (if set)
asset_data = get_asset_data()
if key in asset_data:
return asset_data[key]
# FPS from project data (if set)
project_data = get_project_data()
if key in project_data:
return project_data[key]
# Fallback to 25 FPS
return 25.0
def get_project_data():
"""Get the data of the current project
The data of the project can contain things like:
resolution
fps
renderer
Returns:
dict:
"""
project_name = io.active_project()
project = io.find_one({"name": project_name,
"type": "project"},
projection={"data": True})
data = project.get("data", {})
return data
def get_asset_data(asset=None):
"""Get the data from the current asset
Args:
asset(str, Optional): name of the asset, eg:
Returns:
dict
"""
asset_name = asset or avalon.api.Session["AVALON_ASSET"]
document = io.find_one({"name": asset_name,
"type": "asset"})
data = document.get("data", {})
return data
def get_data_hierarchical_attr(entity, attr_name):
vp_attr = 'visualParent'
data = entity['data']
value = data.get(attr_name, None)
if value is not None:
return value
elif vp_attr in data:
if data[vp_attr] is None:
parent_id = entity['parent']
else:
parent_id = data[vp_attr]
parent = io.find_one({"_id": parent_id})
return get_data_hierarchical_attr(parent, attr_name)
else:
return None
def get_avalon_project_config_schema():
schema = 'avalon-core:config-1.0'
return schema
def get_avalon_project_template_schema():
schema = "avalon-core:project-2.0"
return schema
def get_avalon_project_template():
from pypeapp import Anatomy
"""
Get avalon template
Returns:
dictionary with templates
"""
templates = Anatomy().templates
proj_template = {}
proj_template['workfile'] = templates["avalon"]["workfile"]
proj_template['work'] = templates["avalon"]["work"]
proj_template['publish'] = templates["avalon"]["publish"]
return proj_template
def get_avalon_asset_template_schema():
schema = "avalon-core:asset-2.0"
return schema
pattern = re.compile(r"[\._]v([0-9]+)")
try:
return pattern.findall(file)[0]
except IndexError:
log.error(
"templates:get_version_from_workfile:"
"`{}` missing version string."
"Example `v004`".format(file)
)
def get_avalon_database():
@ -455,31 +439,20 @@ def get_avalon_database():
def set_io_database():
project = os.environ.get('AVALON_PROJECT', '')
asset = os.environ.get('AVALON_ASSET', '')
silo = os.environ.get('AVALON_SILO', '')
os.environ['AVALON_PROJECT'] = project
os.environ['AVALON_ASSET'] = asset
os.environ['AVALON_SILO'] = silo
required_keys = ["AVALON_PROJECT", "AVALON_ASSET", "AVALON_SILO"]
for key in required_keys:
os.environ[key] = os.environ.get(key, "")
io.install()
def get_all_avalon_projects():
db = get_avalon_database()
project_names = db.collection_names()
projects = []
for name in project_names:
for name in db.collection_names():
projects.append(db[name].find_one({'type': 'project'}))
return projects
def get_presets_path():
templates = os.environ['PYPE_CONFIG']
path_items = [templates, 'presets']
filepath = os.path.sep.join(path_items)
return filepath
def filter_pyblish_plugins(plugins):
"""
This servers as plugin filter / modifier for pyblish. It will load plugin
@ -494,10 +467,18 @@ def filter_pyblish_plugins(plugins):
host = api.current_host()
presets = config.get_presets().get('plugins', {}).get(host, {}).get(
"publish", {}
)
# iterate over plugins
for plugin in plugins[:]:
# skip if there are no presets to process
if not presets:
continue
try:
config_data = config.get_presets()['plugins'][host]["publish"][plugin.__name__] # noqa: E501
config_data = presets[plugin.__name__] # noqa: E501
except KeyError:
continue
@ -510,3 +491,73 @@ def filter_pyblish_plugins(plugins):
option, value, plugin.__name__))
setattr(plugin, option, value)
def get_subsets(asset_name,
regex_filter=None,
version=None,
representations=["exr", "dpx"]):
"""
Query subsets with filter on name.
The method will return all found subsets and its defined version and subsets. Version could be specified with number. Representation can be filtered.
Arguments:
asset_name (str): asset (shot) name
regex_filter (raw): raw string with filter pattern
version (str or int): `last` or number of version
representations (list): list for all representations
Returns:
dict: subsets with version and representaions in keys
"""
from avalon import io
# query asset from db
asset_io = io.find_one({"type": "asset",
"name": asset_name})
# check if anything returned
assert asset_io, "Asset not existing. \
Check correct name: `{}`".format(asset_name)
# create subsets query filter
filter_query = {"type": "subset", "parent": asset_io["_id"]}
# add reggex filter string into query filter
if regex_filter:
filter_query.update({"name": {"$regex": r"{}".format(regex_filter)}})
else:
filter_query.update({"name": {"$regex": r'.*'}})
# query all assets
subsets = [s for s in io.find(filter_query)]
assert subsets, "No subsets found. Check correct filter. Try this for start `r'.*'`: asset: `{}`".format(asset_name)
output_dict = {}
# Process subsets
for subset in subsets:
if not version:
version_sel = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
else:
assert isinstance(version, int), "version needs to be `int` type"
version_sel = io.find_one({"type": "version",
"parent": subset["_id"],
"name": int(version)})
find_dict = {"type": "representation",
"parent": version_sel["_id"]}
filter_repr = {"$or": [{"name": repr} for repr in representations]}
find_dict.update(filter_repr)
repres_out = [i for i in io.find(find_dict)]
if len(repres_out) > 0:
output_dict[subset["name"]] = {"version": version_sel,
"representaions": repres_out}
return output_dict

View file

@ -280,8 +280,8 @@ def collect_animation_data():
# build attributes
data = OrderedDict()
data["startFrame"] = start
data["endFrame"] = end
data["frameStart"] = start
data["frameEnd"] = end
data["handles"] = 0
data["step"] = 1.0
data["fps"] = fps
@ -1858,16 +1858,16 @@ def set_context_settings():
# Todo (Wijnand): apply renderer and resolution of project
project_data = lib.get_project_data()
asset_data = lib.get_asset_data()
project_data = lib.get_project()["data"]
asset_data = lib.get_asset()["data"]
# Set project fps
fps = asset_data.get("fps", project_data.get("fps", 25))
set_scene_fps(fps)
# Set project resolution
width_key = "resolution_width"
height_key = "resolution_height"
width_key = "resolutionWidth"
height_key = "resolutionHeight"
width = asset_data.get(width_key, project_data.get(width_key, 1920))
height = asset_data.get(height_key, project_data.get(height_key, 1080))
@ -1887,7 +1887,7 @@ def validate_fps():
"""
fps = lib.get_asset_fps()
fps = lib.get_asset()["data"]["fps"]
current_fps = mel.eval('currentTimeUnitToFPS()') # returns float
if current_fps != fps:

View file

@ -1,28 +1,15 @@
import os
import sys
import logging
import nuke
from avalon import api as avalon
from avalon.tools import workfiles
from pyblish import api as pyblish
from .. import api
from pype.nuke import menu
import logging
from .lib import (
create_write_node
)
import nuke
from pypeapp import Logger
# #removing logger handler created in avalon_core
# for name, handler in [(handler.get_name(), handler)
# for handler in Logger.logging.root.handlers[:]]:
# if "pype" not in str(name).lower():
# Logger.logging.root.removeHandler(handler)
from . import lib
self = sys.modules[__name__]
@ -72,13 +59,14 @@ class NukeHandler(logging.Handler):
'''Adding Nuke Logging Handler'''
log.info([handler.get_name() for handler in logging.root.handlers[:]])
nuke_handler = NukeHandler()
if nuke_handler.get_name() \
not in [handler.get_name()
for handler in logging.root.handlers[:]]:
logging.getLogger().addHandler(nuke_handler)
logging.getLogger().setLevel(logging.INFO)
log.info([handler.get_name() for handler in logging.root.handlers[:]])
def reload_config():
"""Attempt to reload pipeline at run-time.
@ -90,10 +78,7 @@ def reload_config():
import importlib
for module in (
"app",
"app.api",
"{}.api".format(AVALON_CONFIG),
"{}.templates".format(AVALON_CONFIG),
"{}.nuke.actions".format(AVALON_CONFIG),
"{}.nuke.templates".format(AVALON_CONFIG),
"{}.nuke.menu".format(AVALON_CONFIG),
@ -109,9 +94,8 @@ def reload_config():
def install():
# api.set_avalon_workdir()
# reload_config()
''' Installing all requarements for Nuke host
'''
log.info("Registering Nuke plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
@ -120,7 +104,7 @@ def install():
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
workfile_settings = lib.WorkfileSettings()
# Disable all families except for the ones we explicitly want to see
family_states = [
"write",
@ -130,22 +114,30 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
menu.install()
# Workfiles.
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
if launch_workfiles:
nuke.addOnCreate(launch_workfiles_app, nodeClass="Root")
# Set context settings.
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
menu.install()
def launch_workfiles_app():
'''Function letting start workfiles after start of host
'''
if not self.workfiles_launched:
self.workfiles_launched = True
workfiles.show(os.environ["AVALON_WORKDIR"])
def uninstall():
'''Uninstalling host's integration
'''
log.info("Deregistering Nuke plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
@ -154,8 +146,13 @@ def uninstall():
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
reload_config()
menu.uninstall()
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle node passthrough states on instance toggles."""
log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
instance, old_value, new_value))

View file

@ -1,6 +1,3 @@
# absolute_import is needed to counter the `module has no cmds error` in Maya
from __future__ import absolute_import
import pyblish.api
from avalon.nuke.lib import (
@ -12,7 +9,7 @@ from ..action import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid nodes in Maya when plug-in failed.
"""Select invalid nodes in Nuke when plug-in failed.
To retrieve the invalid nodes this assumes a static `get_invalid()`
method is available on the plugin.

File diff suppressed because it is too large Load diff

View file

@ -2,21 +2,25 @@ import nuke
from avalon.api import Session
from pype.nuke import lib
from pypeapp import Logger
log = Logger().get_logger(__name__, "nuke")
def install():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(Session["AVALON_LABEL"])
workfile_settings = lib.WorkfileSettings()
# replace reset resolution from avalon core to pype's
name = "Reset Resolution"
new_name = "Set Resolution"
rm_item = [
(i, item) for i, item in enumerate(menu.items()) if name in item.name()
][0]
log.debug("Changing Item: {}".format(rm_item))
# rm_item[1].setEnabled(False)
menu.removeItem(rm_item[1].name())
menu.addCommand(new_name, lib.reset_resolution, index=rm_item[0])
menu.addCommand(new_name, workfile_settings.reset_resolution, index=(rm_item[0]))
# replace reset frame range from avalon core to pype's
name = "Reset Frame Range"
@ -24,18 +28,41 @@ def install():
rm_item = [
(i, item) for i, item in enumerate(menu.items()) if name in item.name()
][0]
log.debug("Changing Item: {}".format(rm_item))
# rm_item[1].setEnabled(False)
menu.removeItem(rm_item[1].name())
menu.addCommand(new_name, lib.reset_frame_range_handles, index=rm_item[0])
menu.addCommand(new_name, workfile_settings.reset_frame_range_handles, index=(rm_item[0]))
# add colorspace menu item
name = "Set colorspace"
menu.addCommand(
name, lib.set_colorspace,
name, workfile_settings.set_colorspace,
index=(rm_item[0]+2)
)
log.debug("Adding menu item: {}".format(name))
# add workfile builder menu item
name = "Build First Workfile.."
menu.addCommand(
name, lib.BuildWorkfile().process,
index=(rm_item[0]+7)
)
log.debug("Adding menu item: {}".format(name))
# add item that applies all setting above
name = "Apply all settings"
menu.addCommand(
name, lib.set_context_settings, index=(rm_item[0]+3)
name, workfile_settings.set_context_settings, index=(rm_item[0]+3)
)
log.debug("Adding menu item: {}".format(name))
def uninstall():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(Session["AVALON_LABEL"])
for item in menu.items():
log.info("Removing menu item: {}".format(item.name()))
menu.removeItem(item.name())

View file

@ -20,6 +20,8 @@ def get_colorspace_preset():
def get_node_dataflow_preset(**kwarg):
''' Get preset data for dataflow (fileType, compression, bitDepth)
'''
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)
@ -39,6 +41,8 @@ def get_node_dataflow_preset(**kwarg):
def get_node_colorspace_preset(**kwarg):
''' Get preset data for colorspace
'''
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)

View file

@ -1,24 +1,42 @@
import os
from avalon.tools import workfiles
from pypeapp import Logger
import hiero
from avalon import api as avalon
from pyblish import api as pyblish
from .. import api
from .workio import (
open,
save,
current_file,
has_unsaved_changes,
file_extensions,
work_root
)
from .menu import (
install as menu_install,
_update_menu_task_label
)
from .tags import add_tags_from_presets
from pypeapp import Logger
import hiero
__all__ = [
# Workfiles API
"open",
"save",
"current_file",
"has_unsaved_changes",
"file_extensions",
"work_root",
]
# get logger
log = Logger().get_logger(__name__, "nukestudio")
''' Creating all important host related variables '''
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
# plugin root path
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -28,13 +46,21 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nukestudio", "inventory")
# registering particular pyblish gui but `lite` is recomended!!
if os.getenv("PYBLISH_GUI", None):
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
def install(config):
"""
Installing Nukestudio integration for avalon
Args:
config (obj): avalon config module `pype` in our case, it is not used but required by avalon.api.install()
"""
# adding all events
_register_events()
log.info("Registering NukeStudio plug-ins..")
@ -53,6 +79,7 @@ def install(config):
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# install menu
menu_install()
# Workfiles.
@ -70,11 +97,26 @@ def install(config):
def add_tags(event):
"""
Event for automatic tag creation after nukestudio start
Args:
event (obj): required but unused
"""
add_tags_from_presets()
def launch_workfiles_app(event):
workfiles.show(os.environ["AVALON_WORKDIR"])
"""
Event for launching workfiles after nukestudio start
Args:
event (obj): required but unused
"""
from .lib import set_workfiles
set_workfiles()
# Closing the new project.
event.sender.close()
@ -86,6 +128,10 @@ def launch_workfiles_app(event):
def uninstall():
"""
Uninstalling Nukestudio integration for avalon
"""
log.info("Deregistering NukeStudio plug-ins..")
pyblish.deregister_host("nukestudio")
pyblish.deregister_plugin_path(PUBLISH_PATH)
@ -94,6 +140,11 @@ def uninstall():
def _register_events():
"""
Adding all callbacks.
"""
# if task changed then change notext of nukestudio
avalon.on("taskChanged", _update_menu_task_label)
log.info("Installed event callback for 'taskChanged'..")
@ -108,4 +159,5 @@ def ls():
See the `container.json` schema for details on how it should look,
and the Maya equivalent, which is in `avalon.maya.pipeline`
"""
# TODO: listing all availabe containers form sequence
return

View file

@ -1,19 +1,13 @@
# Standard library
import os
import sys
# Pyblish libraries
import pyblish.api
import avalon.api as avalon
import pype.api as pype
from avalon.vendor.Qt import (QtWidgets, QtGui)
# Host libraries
import hiero
import pyblish.api
import avalon.api as avalon
from avalon.vendor.Qt import (QtWidgets, QtGui)
import pype.api as pype
from pypeapp import Logger
log = Logger().get_logger(__name__, "nukestudio")
cached_process = None
@ -30,12 +24,18 @@ AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
def set_workfiles():
''' Wrapping function for workfiles launcher '''
from avalon.tools import workfiles
# import session to get project dir
S = avalon.Session
active_project_root = os.path.normpath(
os.path.join(S['AVALON_PROJECTS'], S['AVALON_PROJECT'])
)
workdir = os.environ["AVALON_WORKDIR"]
# show workfile gui
workfiles.show(workdir)
# getting project
project = hiero.core.projects()[-1]
# set project root with backward compatibility
@ -46,14 +46,14 @@ def set_workfiles():
project.setProjectRoot(active_project_root)
# get project data from avalon db
project_data = pype.get_project_data()
project_data = pype.get_project()["data"]
log.info("project_data: {}".format(project_data))
# get format and fps property from avalon db on project
width = project_data['resolution_width']
height = project_data['resolution_height']
pixel_aspect = project_data['pixel_aspect']
width = project_data["resolutionWidth"]
height = project_data["resolutionHeight"]
pixel_aspect = project_data["pixelAspect"]
fps = project_data['fps']
format_name = project_data['code']
@ -64,11 +64,10 @@ def set_workfiles():
# set fps to hiero project
project.setFramerate(fps)
# TODO: add auto colorspace set from project drop
log.info("Project property has been synchronised with Avalon db")
def reload_config():
"""Attempt to reload pipeline at run-time.
@ -189,6 +188,10 @@ def add_submission():
class PublishAction(QtWidgets.QAction):
"""
Action with is showing as menu item
"""
def __init__(self):
QtWidgets.QAction.__init__(self, "Publish", None)
self.triggered.connect(self.publish)
@ -213,7 +216,8 @@ class PublishAction(QtWidgets.QAction):
def _show_no_gui():
"""Popup with information about how to register a new GUI
"""
Popup with information about how to register a new GUI
In the event of no GUI being registered or available,
this information dialog will appear to guide the user
through how to get set up with one.
@ -283,3 +287,59 @@ def _show_no_gui():
messagebox.setStandardButtons(messagebox.Ok)
messagebox.exec_()
def CreateNukeWorkfile(nodes=None,
nodes_effects=None,
to_timeline=False,
**kwargs):
''' Creating nuke workfile with particular version with given nodes
Also it is creating timeline track items as precomps.
Arguments:
nodes(list of dict): each key in dict is knob order is important
to_timeline(type): will build trackItem with metadata
Returns:
bool: True if done
Raises:
Exception: with traceback
'''
import hiero.core
from avalon.nuke import imprint
from pype.nuke import (
lib as nklib
)
# check if the file exists if does then Raise "File exists!"
if os.path.exists(filepath):
raise FileExistsError("File already exists: `{}`".format(filepath))
# if no representations matching then
# Raise "no representations to be build"
if len(representations) == 0:
raise AttributeError("Missing list of `representations`")
# check nodes input
if len(nodes) == 0:
log.warning("Missing list of `nodes`")
# create temp nk file
nuke_script = hiero.core.nuke.ScriptWriter()
# create root node and save all metadata
root_node = hiero.core.nuke.RootNode()
root_path = os.environ["AVALON_PROJECTS"]
nuke_script.addNode(root_node)
# here to call pype.nuke.lib.BuildWorkfile
script_builder = nklib.BuildWorkfile(
root_node=root_node,
root_path=root_path,
nodes=nuke_script.getNodes(),
**kwargs
)

View file

@ -1,24 +1,23 @@
import os
import sys
import hiero.core
from pypeapp import Logger
from avalon.api import Session
from hiero.ui import findMenuAction
# this way we secure compatibility between nuke 10 and 11
try:
from PySide.QtGui import *
except Exception:
from PySide2.QtGui import *
from PySide2.QtWidgets import *
from hiero.ui import findMenuAction
from avalon.api import Session
from .tags import add_tags_from_presets
from .lib import (
reload_config,
set_workfiles
)
from pypeapp import Logger
log = Logger().get_logger(__name__, "nukestudio")
@ -45,6 +44,11 @@ def _update_menu_task_label(*args):
def install():
"""
Installing menu into Nukestudio
"""
# here is the best place to add menu
from avalon.tools import (
creator,
@ -127,8 +131,6 @@ def install():
'icon': QIcon('icons:ColorAdd.png')
}]
# Create menu items
for a in actions:
add_to_menu = menu

View file

@ -14,9 +14,9 @@ def create_nk_script_clips(script_lst, seq=None):
'handles': 10,
'handleStart': 15, # added asymetrically to handles
'handleEnd': 10, # added asymetrically to handles
'timelineIn': 16,
'startFrame': 991,
'endFrame': 1023,
"clipIn": 16,
"frameStart": 991,
"frameEnd": 1023,
'task': 'Comp-tracking',
'work_dir': 'VFX_PR',
'shot': '00010'
@ -55,12 +55,12 @@ def create_nk_script_clips(script_lst, seq=None):
if media_in:
source_in = media_in + handle_start
else:
source_in = nk['startFrame'] + handle_start
source_in = nk["frameStart"] + handle_start
if media_duration:
source_out = (media_in + media_duration - 1) - handle_end
else:
source_out = nk['endFrame'] - handle_end
source_out = nk["frameEnd"] - handle_end
print("__ media: `{}`".format(media))
print("__ media_in: `{}`".format(media_in))
@ -98,8 +98,8 @@ def create_nk_script_clips(script_lst, seq=None):
trackItem.setSourceIn(source_in)
trackItem.setSourceOut(source_out)
trackItem.setSourceIn(source_in)
trackItem.setTimelineIn(nk['timelineIn'])
trackItem.setTimelineOut(nk['timelineIn'] + (source_out - source_in))
trackItem.setTimelineIn(nk["clipIn"])
trackItem.setTimelineOut(nk["clipIn"] + (source_out - source_in))
track.addTrackItem(trackItem)
track.addTrackItem(trackItem)
clips_lst.append(trackItem)
@ -179,9 +179,9 @@ script_lst = [{
'handles': 10,
'handleStart': 10,
'handleEnd': 10,
'timelineIn': 16,
'startFrame': 991,
'endFrame': 1023,
"clipIn": 16,
"frameStart": 991,
"frameEnd": 1023,
'task': 'platesMain',
'work_dir': 'shots',
'shot': '120sh020'

View file

@ -1,5 +1,6 @@
import re
import os
import hiero
from pypeapp import (
config,
@ -7,8 +8,6 @@ from pypeapp import (
)
from avalon import io
import hiero
log = Logger().get_logger(__name__, "nukestudio")

44
pype/nukestudio/workio.py Normal file
View file

@ -0,0 +1,44 @@
import os
import hiero
from avalon import api
def file_extensions():
return [".hrox"]
def has_unsaved_changes():
# There are no methods for querying unsaved changes to a project, so
# enforcing to always save.
return True
def save(filepath):
project = hiero.core.projects()[-1]
if project:
project.saveAs(filepath)
else:
project = hiero.core.newProject()
project.saveAs(filepath)
def open(filepath):
hiero.core.openProject(filepath)
return True
def current_file():
current_file = hiero.core.projects()[-1].path()
normalised = os.path.normpath(current_file)
# Unsaved current file
if normalised == "":
return None
return normalised
def work_root():
return os.path.normpath(api.Session["AVALON_WORKDIR"]).replace("\\", "/")

View file

@ -87,13 +87,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
context.data["currentFile"] = current_file
# get project data from avalon
project_data = pype.get_project_data()
project_data = pype.get_project()["data"]
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset_data()
asset_data = pype.get_asset()["data"]
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}

View file

@ -39,19 +39,18 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
assert instances_data, "No `asset_default` data in json file"
asset_name = a_session["AVALON_ASSET"]
entity = io.find_one({"name": asset_name,
"type": "asset"})
entity = pype.get_asset(asset_name)
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("fstart", None)
frame_start = context.data["assetData"].get("frameStart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
frame_start = entity["data"]["frameStart"]
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["fstart"]
frame_start = asset_default["frameStart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
@ -61,7 +60,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
handles = context.data["assetData"].get("handles", None)
if not handles:
# get frame start > second try from parent data
handles = pype.get_data_hierarchical_attr(entity, "handles")
handles = entity["data"]["handles"]
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
@ -129,7 +128,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
instance.data.update({
"subset": subset_name,
"task": task,
"fstart": frame_start,
"frameStart": frame_start,
"handles": handles,
"host": host,
"asset": asset,

View file

@ -26,7 +26,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'write': 'render',
'review': 'mov',
'plate': 'img',
'audio': 'audio'
'audio': 'audio',
'workfile': 'scene',
'animation': 'cache'
}
def process(self, instance):
@ -74,11 +76,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
)
else:
end_frame += (
instance.data['endFrame'] - instance.data['startFrame']
instance.data["frameEnd"] - instance.data["frameStart"]
)
if not comp.get('frameRate'):
comp['frameRate'] = instance.context.data['fps']
if not comp.get('fps'):
comp['fps'] = instance.context.data['fps']
location = self.get_ftrack_location(
'ftrack.server', ft_session
)
@ -88,7 +90,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
"metadata": {'ftr_meta': json.dumps({
'frameIn': int(start_frame),
'frameOut': int(end_frame),
'frameRate': float(comp['frameRate'])})}
'frameRate': float(comp['fps'])})}
}
comp['thumbnail'] = False
else:

View file

@ -106,11 +106,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
existing_tasks.append(child['name'].lower())
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
if task.lower() in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)

View file

@ -0,0 +1,27 @@
import pyblish.api
import os
class IntegrateCleanComponentData(pyblish.api.InstancePlugin):
"""
Cleaning up thumbnail an mov files after they have been integrated
"""
order = pyblish.api.IntegratorOrder + 0.5
label = 'Clean component data'
families = ["ftrack"]
optional = True
active = True
def process(self, instance):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
os.remove(comp['published_path'])
self.log.info('Thumbnail image was erased')
elif comp.get('preview') or ("preview" in comp.get('tags', [])):
os.remove(comp['published_path'])
self.log.info('Preview mov file was erased')

View file

@ -27,8 +27,8 @@ class FusionSetFrameRangeLoader(api.Loader):
version = context['version']
version_data = version.get("data", {})
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
start = version_data.get("frameStart", None)
end = version_data.get("frameEnd", None)
if start is None or end is None:
print("Skipping setting frame range because start or "
@ -60,8 +60,8 @@ class FusionSetFrameRangeWithHandlesLoader(api.Loader):
version = context['version']
version_data = version.get("data", {})
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
start = version_data.get("frameStart", None)
end = version_data.get("frameEnd", None)
if start is None or end is None:
print("Skipping setting frame range because start or "

View file

@ -145,7 +145,7 @@ class FusionLoadSequence(api.Loader):
tool["Clip"] = path
# Set global in point to start frame (if in version.data)
start = context["version"]["data"].get("startFrame", None)
start = context["version"]["data"].get("frameStart", None)
if start is not None:
loader_shift(tool, start, relative=False)
@ -175,7 +175,7 @@ class FusionLoadSequence(api.Loader):
been set.
- GlobalIn: Fusion reset to comp's global in if duration changes
- We change it to the "startFrame"
- We change it to the "frameStart"
- GlobalEnd: Fusion resets to globalIn + length if duration changes
- We do the same like Fusion - allow fusion to take control.
@ -212,7 +212,7 @@ class FusionLoadSequence(api.Loader):
# Get start frame from version data
version = io.find_one({"type": "version",
"_id": representation["parent"]})
start = version["data"].get("startFrame")
start = version["data"].get("frameStart")
if start is None:
self.log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "

View file

@ -23,7 +23,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
"""Collect Fusion saver instances
This additionally stores the Comp start and end render range in the
current context's data as "startFrame" and "endFrame".
current context's data as "frameStart" and "frameEnd".
"""
@ -43,8 +43,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
savers = [tool for tool in tools if tool.ID == "Saver"]
start, end = get_comp_render_range(comp)
context.data["startFrame"] = start
context.data["endFrame"] = end
context.data["frameStart"] = start
context.data["frameEnd"] = end
for tool in savers:
path = tool["Clip"][comp.TIME_UNDEFINED]

View file

@ -53,8 +53,8 @@ class PublishImageSequence(pyblish.api.InstancePlugin):
# The instance has most of the information already stored
metadata = {
"regex": regex,
"startFrame": instance.context.data["startFrame"],
"endFrame": instance.context.data["endFrame"],
"frameStart": instance.context.data["frameStart"],
"frameEnd": instance.context.data["frameEnd"],
"families": ["imagesequence"],
}

View file

@ -79,8 +79,8 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
"Plugin": "Fusion",
"Frames": "{start}-{end}".format(
start=int(context.data["startFrame"]),
end=int(context.data["endFrame"])
start=int(context.data["frameStart"]),
end=int(context.data["frameEnd"])
),
"Comment": comment,

View file

@ -1,22 +1,15 @@
import os
import subprocess
import json
from pype import lib as pypelib
from pypeapp import config
from avalon import api
def get_config_data():
path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
filepath = os.path.sep.join(path_items)
data = dict()
with open(filepath) as data_file:
data = json.load(data_file)
return data
def get_families():
families = []
paths = get_config_data().get('djv_paths', [])
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
"djv_paths", []
)
for path in paths:
if os.path.exists(path):
families.append("*")
@ -25,13 +18,15 @@ def get_families():
def get_representation():
return get_config_data().get('file_ext', [])
return config.get_presets().get("djv_view", {}).get("config", {}).get(
'file_ext', []
)
class OpenInDJV(api.Loader):
"""Open Image Sequence with system default"""
config_data = get_config_data()
config_data = config.get_presets().get("djv_view", {}).get("config", {})
families = get_families()
representations = get_representation()
@ -42,7 +37,9 @@ class OpenInDJV(api.Loader):
def load(self, context, name, namespace, data):
self.djv_path = None
paths = get_config_data().get('djv_paths', [])
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
"djv_paths", []
)
for path in paths:
if os.path.exists(path):
self.djv_path = path

View file

@ -3,11 +3,33 @@ import shutil
import pyblish.api
def clean_renders(instance):
transfers = instance.data.get("transfers", list())
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
dirnames = []
for src, dest in transfers:
if os.path.normpath(src) != os.path.normpath(dest):
if instance_family == 'render' or 'render' in current_families:
os.remove(src)
dirnames.append(os.path.dirname(src))
# make unique set
cleanup_dirs = set(dirnames)
for dir in cleanup_dirs:
try:
os.rmdir(dir)
except OSError:
# directory is not empty, skipping
continue
class CleanUp(pyblish.api.InstancePlugin):
"""Cleans up the staging directory after a successful publish.
The removal will only happen for staging directories which are inside the
temporary folder, otherwise the folder is ignored.
This will also clean published renders and delete their parent directories.
"""
@ -36,3 +58,5 @@ class CleanUp(pyblish.api.InstancePlugin):
self.log.info("Removing temporary folder ...")
shutil.rmtree(staging_dir)
self.log.info("Cleaning renders ...")
clean_renders(instance)

View file

@ -67,9 +67,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
if isinstance(component['files'], list):
collections, remainder = clique.assemble(component['files'])
self.log.debug("collecting sequence: {}".format(collections))
instance.data['startFrame'] = int(component['startFrame'])
instance.data['endFrame'] = int(component['endFrame'])
instance.data['frameRate'] = int(component['frameRate'])
instance.data["frameStart"] = int(component["frameStart"])
instance.data["frameEnd"] = int(component["frameEnd"])
instance.data['fps'] = int(component['fps'])
instance.data["representations"].append(component)

View file

@ -6,14 +6,13 @@ from pprint import pformat
import pyblish.api
from avalon import api
import pype.api as pype
def collect(root,
regex=None,
exclude_regex=None,
startFrame=None,
endFrame=None):
frame_start=None,
frame_end=None):
"""Collect sequence collections in root"""
from avalon.vendor import clique
@ -52,10 +51,10 @@ def collect(root,
# Exclude any frames outside start and end frame.
for collection in collections:
for index in list(collection.indexes):
if startFrame is not None and index < startFrame:
if frame_start is not None and index < frame_start:
collection.indexes.discard(index)
continue
if endFrame is not None and index > endFrame:
if frame_end is not None and index > frame_end:
collection.indexes.discard(index)
continue
@ -77,8 +76,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
api.Session["AVALON_ASSET"]
subset (str): The subset to publish to. If not provided the sequence's
head (up to frame number) will be used.
startFrame (int): The start frame for the sequence
endFrame (int): The end frame for the sequence
frame_start (int): The start frame for the sequence
frame_end (int): The end frame for the sequence
root (str): The path to collect from (can be relative to the .json)
regex (str): A regex for the sequence filename
exclude_regex (str): A regex for filename to exclude from collection
@ -143,8 +142,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
collections = collect(root=root,
regex=regex,
exclude_regex=data.get("exclude_regex"),
startFrame=data.get("startFrame"),
endFrame=data.get("endFrame"))
frame_start=data.get("frameStart"),
frame_end=data.get("frameEnd"))
self.log.info("Found collections: {}".format(collections))
@ -160,10 +159,13 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
# Get family from the data
families = data.get("families", ["render"])
assert isinstance(families, (list, tuple)), "Must be iterable"
assert families, "Must have at least a single family"
families.append("ftrack")
families.append("review")
if "render" not in families:
families.append("render")
if "ftrack" not in families:
families.append("ftrack")
if "review" not in families:
families.append("review")
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
@ -176,8 +178,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
# If no start or end frame provided, get it from collection
indices = list(collection.indexes)
start = data.get("startFrame", indices[0])
end = data.get("endFrame", indices[-1])
start = data.get("frameStart", indices[0])
end = data.get("frameEnd", indices[-1])
# root = os.path.normpath(root)
# self.log.info("Source: {}}".format(data.get("source", "")))
@ -191,8 +193,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
"subset": subset,
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"startFrame": start,
"endFrame": end,
"frameStart": start,
"frameEnd": end,
"fps": fps,
"source": data.get('source', '')
})
@ -208,7 +210,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
'files': list(collection),
"stagingDir": root,
"anatomy_template": "render",
"frameRate": fps,
"fps": fps,
"tags": ['review']
}
instance.data["representations"].append(representation)

View file

@ -1,7 +1,7 @@
import os
import json
import pyblish.api
from pype import lib as pypelib
from pypeapp import config
class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
@ -12,13 +12,5 @@ class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
hosts = ["shell"]
def process(self, context):
config_items = [
pypelib.get_presets_path(),
"ftrack",
"output_representation.json"
]
config_file = os.path.sep.join(config_items)
with open(config_file) as data_file:
config_data = json.load(data_file)
config_data = config.get_presets()["ftrack"]["output_representation"]
context.data['output_repre_config'] = config_data

View file

@ -12,6 +12,6 @@ class CollectProjectData(pyblish.api.ContextPlugin):
def process(self, context):
# get project data from avalon db
context.data["projectData"] = pype.get_project_data()
context.data["projectData"] = pype.get_project()["data"]
return

View file

@ -1,7 +1,7 @@
import os
import subprocess
import pype.api
import json
import pype.api
import pyblish
@ -17,6 +17,7 @@ class ExtractBurnin(pype.api.Extractor):
label = "Quicktime with burnins"
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
hosts = ["nuke", "maya", "shell"]
optional = True
def process(self, instance):
@ -32,7 +33,7 @@ class ExtractBurnin(pype.api.Extractor):
"username": instance.context.data['user'],
"asset": os.environ['AVALON_ASSET'],
"task": os.environ['AVALON_TASK'],
"start_frame": int(instance.data['startFrame']),
"start_frame": int(instance.data["frameStart"]),
"version": version
}
self.log.debug("__ prep_data: {}".format(prep_data))
@ -61,31 +62,55 @@ class ExtractBurnin(pype.api.Extractor):
self.log.debug("__ burnin_data2: {}".format(burnin_data))
json_data = json.dumps(burnin_data)
scriptpath = os.path.normpath(os.path.join(os.environ['PYPE_MODULE_ROOT'],
"pype",
"scripts",
"otio_burnin.py"))
# Get script path.
module_path = os.environ['PYPE_MODULE_ROOT']
# There can be multiple paths in PYPE_MODULE_ROOT, in which case
# we just take first one.
if os.pathsep in module_path:
module_path = module_path.split(os.pathsep)[0]
scriptpath = os.path.normpath(
os.path.join(
module_path,
"pype",
"scripts",
"otio_burnin.py"
)
)
self.log.debug("__ scriptpath: {}".format(scriptpath))
self.log.debug("__ EXE: {}".format(os.getenv("PYPE_PYTHON_EXE")))
try:
p = subprocess.Popen(
[os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data]
)
p.wait()
if not os.path.isfile(full_burnin_path):
raise RuntimeError("File not existing: {}".format(full_burnin_path))
except Exception as e:
raise RuntimeError("Burnin script didn't work: `{}`".format(e))
# Get executable.
executable = os.getenv("PYPE_PYTHON_EXE")
if os.path.exists(full_burnin_path):
repre_update = {
"files": movieFileBurnin,
"name": repre["name"]
}
instance.data["representations"][i].update(repre_update)
# There can be multiple paths in PYPE_PYTHON_EXE, in which case
# we just take first one.
if os.pathsep in executable:
executable = executable.split(os.pathsep)[0]
# removing the source mov file
os.remove(full_movie_path)
self.log.debug("Removed: `{}`".format(full_movie_path))
self.log.debug("__ EXE: {}".format(executable))
args = [executable, scriptpath, json_data]
self.log.debug("Executing: {}".format(args))
pype.api.subprocess(args)
repre_update = {
"files": movieFileBurnin,
"name": repre["name"],
"tags": [x for x in repre["tags"] if x != "delete"]
}
instance.data["representations"][i].update(repre_update)
# removing the source mov file
os.remove(full_movie_path)
self.log.debug("Removed: `{}`".format(full_movie_path))
# Remove any representations tagged for deletion.
for repre in instance.data["representations"]:
if "delete" in repre.get("tags", []):
self.log.debug("Removing representation: {}".format(repre))
instance.data["representations"].remove(repre)
self.log.debug(instance.data["representations"])

View file

@ -2,11 +2,11 @@ import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
"""Create entities in Avalon based on collected data."""
order = pyblish.api.IntegratorOrder - 0.1
label = "Integrate Hierarchy To Avalon"
order = pyblish.api.ExtractorOrder - 0.01
label = "Extract Hierarchy To Avalon"
families = ["clip", "shot"]
def process(self, context):

View file

@ -1,7 +1,8 @@
import os
import pyblish.api
import subprocess
from pype.vendor import clique
import pype.api
class ExtractJpegEXR(pyblish.api.InstancePlugin):
@ -20,9 +21,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
def process(self, instance):
start = instance.data.get("startFrame")
start = instance.data.get("frameStart")
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
collected_frames = os.listdir(stagingdir)
@ -59,8 +59,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
jpeg_items.append(full_output_path)
subprocess_jpeg = " ".join(jpeg_items)
sub_proc = subprocess.Popen(subprocess_jpeg)
sub_proc.wait()
# run subprocess
self.log.debug("{}".format(subprocess_jpeg))
pype.api.subprocess(subprocess_jpeg)
if "representations" not in instance.data:
instance.data["representations"] = []

View file

@ -1,7 +1,8 @@
import os
import pyblish.api
import subprocess
from pype.vendor import clique
import pype.api
from pypeapp import config
@ -19,6 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["nuke", "maya", "shell"]
def process(self, instance):
# adding plugin attributes from presets
@ -28,7 +30,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
inst_data = instance.data
fps = inst_data.get("fps")
start_frame = inst_data.get("startFrame")
start_frame = inst_data.get("frameStart")
self.log.debug("Families In: `{}`".format(instance.data["families"]))
@ -53,7 +55,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
ext = "mov"
self.log.warning(
"`ext` attribute not in output profile. Setting to default ext: `mov`")
self.log.debug("instance.families: {}".format(instance.data['families']))
self.log.debug("profile.families: {}".format(profile['families']))
@ -85,7 +87,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
repre_new = repre.copy()
new_tags = tags[:]
new_tags = [x for x in tags if x != "delete"]
p_tags = profile.get('tags', [])
self.log.info("p_tags: `{}`".format(p_tags))
# add families
@ -108,12 +110,42 @@ class ExtractReview(pyblish.api.InstancePlugin):
# necessary input data
# adds start arg only if image sequence
if "mov" not in repre_new['ext']:
if isinstance(repre["files"], list):
input_args.append("-start_number {0} -framerate {1}".format(
start_frame, fps))
input_args.append("-i {}".format(full_input_path))
for audio in instance.data.get("audio", []):
offset_frames = (
instance.data.get("startFrameReview") -
audio["offset"]
)
offset_seconds = offset_frames / fps
if offset_seconds > 0:
input_args.append("-ss")
else:
input_args.append("-itsoffset")
input_args.append(str(abs(offset_seconds)))
input_args.extend(
["-i", audio["filename"]]
)
# Need to merge audio if there are more
# than 1 input.
if len(instance.data["audio"]) > 1:
input_args.extend(
[
"-filter_complex",
"amerge",
"-ac",
"2"
]
)
output_args = []
# preset's output data
output_args.extend(profile.get('output', []))
@ -125,6 +157,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
output_args.append(
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
# In case audio is longer than video.
output_args.append("-shortest")
# output filename
output_args.append(full_output_path)
mov_args = [
@ -136,12 +171,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
# run subprocess
self.log.debug("{}".format(subprcs_cmd))
sub_proc = subprocess.Popen(subprcs_cmd)
sub_proc.wait()
if not os.path.isfile(full_output_path):
raise FileExistsError(
"Quicktime wasn't created succesfully")
pype.api.subprocess(subprcs_cmd)
# create representation data
repre_new.update({
@ -157,16 +187,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
repre_new.pop("thumbnail")
# adding representation
self.log.debug("Adding: {}".format(repre_new))
representations_new.append(repre_new)
# if "delete" in tags:
# if "mov" in full_input_path:
# os.remove(full_input_path)
# self.log.debug("Removed: `{}`".format(full_input_path))
else:
continue
else:
continue
for repre in representations_new:
if "delete" in repre.get("tags", []):
representations_new.remove(repre)
self.log.debug(
"new representations: {}".format(representations_new))
instance.data["representations"] = representations_new

View file

@ -404,7 +404,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Include optional data if present in
optionals = [
"startFrame", "endFrame", "step", "handles", "sourceHashes"
"frameStart", "frameEnd", "step", "handles", "sourceHashes"
]
for key in optionals:
if key in instance.data:

View file

@ -36,9 +36,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template from anatomy that should be used for
integrating this file. Only the first level can
be specified right now.
'startFrame'
'endFrame'
'framerate'
"frameStart"
"frameEnd"
'fps'
"""
label = "Integrate Asset New"
@ -63,6 +63,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"rig",
"plate",
"look",
"lut",
"audio"
]
exclude_families = ["clip"]
@ -271,15 +272,22 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug(
"src_tail_collections: {}".format(str(src_collections)))
src_collection = src_collections[0]
# Assert that each member has identical suffix
src_head = src_collection.format("{head}")
src_tail = src_collection.format("{tail}")
# fix dst_padding
valid_files = [x for x in files if src_collection.match(x)]
padd_len = len(
valid_files[0].replace(src_head, "").replace(src_tail, "")
)
src_padding_exp = "%0{}d".format(padd_len)
test_dest_files = list()
for i in [1, 2]:
template_data["representation"] = repre['ext']
template_data["frame"] = src_collection.format(
"{padding}") % i
template_data["frame"] = src_padding_exp % i
anatomy_filled = anatomy.format(template_data)
test_dest_files.append(
@ -295,24 +303,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_head = dst_collection.format("{head}")
dst_tail = dst_collection.format("{tail}")
repre['published_path'] = dst_collection.format()
index_frame_start = None
if repre.get('startFrame'):
if repre.get("frameStart"):
frame_start_padding = len(str(
repre.get('endFrame')))
index_frame_start = repre.get('startFrame')
repre.get("frameEnd")))
index_frame_start = repre.get("frameStart")
dst_padding_exp = src_padding_exp
for i in src_collection.indexes:
src_padding = src_collection.format("{padding}") % i
src_padding = src_padding_exp % i
src_file_name = "{0}{1}{2}".format(
src_head, src_padding, src_tail)
dst_padding = dst_collection.format("{padding}") % i
dst_padding = src_padding_exp % i
if index_frame_start:
dst_padding = "%0{}d".format(
frame_start_padding) % index_frame_start
dst_padding_exp = "%0{}d".format(frame_start_padding)
dst_padding = dst_padding_exp % index_frame_start
index_frame_start += 1
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
@ -321,6 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail)
# for imagesequence version data
hashes = '#' * len(dst_padding)
dst = os.path.normpath("{0}{1}{2}".format(
@ -380,7 +388,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"representation": repre['ext']
}
}
self.log.debug("__ _representation: {}".format(representation))
self.log.debug("__ representation: {}".format(representation))
destination_list.append(dst)
self.log.debug("__ destination_list: {}".format(destination_list))
instance.data['destination_list'] = destination_list
@ -396,20 +404,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.info("Registered {} items".format(len(representations)))
def integrate(self, instance):
"""Move the files
""" Move the files.
Through `instance.data["transfers"]`
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
Args:
instance: the instance to integrate
"""
transfers = instance.data.get("transfers", list())
for src, dest in transfers:
if os.path.normpath(src) != os.path.normpath(dest):
self.copy_file(src, dest)
transfers = instance.data.get("transfers", list())
for src, dest in transfers:
self.copy_file(src, dest)
# Produce hardlinked copies
# Note: hardlink can only be produced between two files on the same
# server/disk and editing one of the two will edit both files at once.
@ -543,8 +554,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# Include optional data if present in
optionals = [
"startFrame", "endFrame", "step", "handles",
"handle_end", "handle_start", "sourceHashes"
"frameStart", "frameEnd", "step", "handles",
"handleEnd", "handleStart", "sourceHashes"
]
for key in optionals:
if key in instance.data:

View file

@ -408,7 +408,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step",
optionals = ["frameStart", "frameEnd", "step",
"handles", "colorspace", "fps", "outputDir"]
for key in optionals:

View file

@ -121,7 +121,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
- publishJobState (str, Optional): "Active" or "Suspended"
This defaults to "Suspended"
This requires a "startFrame" and "endFrame" to be present in instance.data
This requires a "frameStart" and "frameEnd" to be present in instance.data
or in context.data.
"""
@ -138,6 +138,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"imagesequence"
]
enviro_filter = [
"PATH",
"PYTHONPATH",
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"PYPE_ROOT"
]
def _submit_deadline_post_job(self, instance, job):
"""
Deadline specific code separated from :meth:`process` for sake of
@ -181,13 +191,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
environment = job["Props"].get("Env", {})
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
i = 0
for index, key in enumerate(environment):
self.log.info("KEY: {}".format(key))
self.log.info("FILTER: {}".format(self.enviro_filter))
if key.upper() in self.enviro_filter:
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % i: "{key}={value}".format(
key=key,
value=environment[key]
)
})
i += 1
# Avoid copied pools and remove secondary pool
payload["JobInfo"]["Pool"] = "none"
@ -236,12 +255,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Get start/end frame from instance, if not available get from context
context = instance.context
start = instance.data.get("startFrame")
start = instance.data.get("frameStart")
if start is None:
start = context.data["startFrame"]
end = instance.data.get("endFrame")
start = context.data["frameStart"]
end = instance.data.get("frameEnd")
if end is None:
end = context.data["endFrame"]
end = context.data["frameEnd"]
# Add in regex for sequence filename
# This assumes the output files start with subset name and ends with
@ -266,8 +285,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata = {
"asset": asset,
"regex": regex,
"startFrame": start,
"endFrame": end,
"frameStart": start,
"frameEnd": end,
"fps": context.data.get("fps", None),
"families": ["render"],
"source": source,
@ -315,8 +334,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Set prev start / end frames for comparison
if not prev_start and not prev_end:
prev_start = version["data"]["startFrame"]
prev_end = version["data"]["endFrame"]
prev_start = version["data"]["frameStart"]
prev_end = version["data"]["frameEnd"]
subset_resources = get_resources(version, _ext)
resource_files = get_resource_files(subset_resources,
@ -352,12 +371,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Please do so when fixing this.
# Start frame
metadata["startFrame"] = updated_start
metadata["metadata"]["instance"]["startFrame"] = updated_start
metadata["frameStart"] = updated_start
metadata["metadata"]["instance"]["frameStart"] = updated_start
# End frame
metadata["endFrame"] = updated_end
metadata["metadata"]["instance"]["endFrame"] = updated_end
metadata["frameEnd"] = updated_end
metadata["metadata"]["instance"]["frameEnd"] = updated_end
metadata_filename = "{}_metadata.json".format(subset)

View file

@ -0,0 +1,12 @@
import pyblish.api
class ValidateFileSequences(pyblish.api.ContextPlugin):
"""Validates whether any file sequences were collected."""
order = pyblish.api.ValidatorOrder
targets = ["filesequence"]
label = "Validate File Sequences"
def process(self, context):
assert context, "Nothing collected."

View file

@ -22,8 +22,8 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
frames = list(collection.indexes)
current_range = (frames[0], frames[-1])
required_range = (instance.data["startFrame"],
instance.data["endFrame"])
required_range = (instance.data["frameStart"],
instance.data["frameEnd"])
if current_range != required_range:
raise ValueError("Invalid frame range: {0} - "

Some files were not shown because too many files have changed in this diff Show more