diff --git a/README.md b/README.md index 634ede742d..7cf8c4c0b6 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,12 @@ The base studio *config* for [Avalon](https://getavalon.github.io/) -
+Currently this config is dependent on our customised avalon instalation so it won't work with vanilla avalon core. We're working on open sourcing all of the necessary code though. You can still get inspiration or take our individual validators and scripts which should work just fine in other pipelines. + _This configuration acts as a starting point for all pype club clients wth avalon deployment._ + + ### Code convention Below are some of the standard practices applied to this repositories. diff --git a/pype/clockify/__init__.py b/pype/clockify/__init__.py new file mode 100644 index 0000000000..5f61acd751 --- /dev/null +++ b/pype/clockify/__init__.py @@ -0,0 +1,9 @@ +from .clockify_api import ClockifyAPI +from .widget_settings import ClockifySettings +from .clockify import ClockifyModule + +__all__ = [ + 'ClockifyAPI', + 'ClockifySettings', + 'ClockifyModule' +] diff --git a/pype/clockify/clockify.py b/pype/clockify/clockify.py new file mode 100644 index 0000000000..a22933f700 --- /dev/null +++ b/pype/clockify/clockify.py @@ -0,0 +1,97 @@ +import threading +from app import style +from app.vendor.Qt import QtWidgets +from pype.clockify import ClockifySettings, ClockifyAPI + + +class ClockifyModule: + + def __init__(self, main_parent=None, parent=None): + self.main_parent = main_parent + self.parent = parent + self.clockapi = ClockifyAPI() + self.widget_settings = ClockifySettings(main_parent, self) + self.widget_settings_required = None + + self.thread_timer_check = None + # Bools + self.bool_thread_check_running = False + self.bool_api_key_set = False + self.bool_workspace_set = False + self.bool_timer_run = False + + def start_up(self): + self.clockapi.set_master(self) + self.bool_api_key_set = self.clockapi.set_api() + if self.bool_api_key_set is False: + self.show_settings() + return + + self.bool_workspace_set = self.clockapi.workspace_id is not None + if self.bool_workspace_set is False: + return + + self.start_timer_check() + + self.set_menu_visibility() + + def start_timer_check(self): + self.bool_thread_check_running = True + if self.thread_timer_check is None: + self.thread_timer_check = threading.Thread( + target=self.check_running + ) + self.thread_timer_check.daemon = True + self.thread_timer_check.start() + + def stop_timer_check(self): + self.bool_thread_check_running = True + if self.thread_timer_check is not None: + self.thread_timer_check.join() + self.thread_timer_check = None + + def check_running(self): + import time + while self.bool_thread_check_running is True: + if self.clockapi.get_in_progress() is not None: + self.bool_timer_run = True + else: + self.bool_timer_run = False + self.set_menu_visibility() + time.sleep(5) + + def stop_timer(self): + self.clockapi.finish_time_entry() + self.bool_timer_run = False + + # Definition of Tray menu + def tray_menu(self, parent): + # Menu for Tray App + self.menu = QtWidgets.QMenu('Clockify', parent) + self.menu.setProperty('submenu', 'on') + self.menu.setStyleSheet(style.load_stylesheet()) + + # Actions + self.aShowSettings = QtWidgets.QAction( + "Settings", self.menu + ) + self.aStopTimer = QtWidgets.QAction( + "Stop timer", self.menu + ) + + self.menu.addAction(self.aShowSettings) + self.menu.addAction(self.aStopTimer) + + self.aShowSettings.triggered.connect(self.show_settings) + self.aStopTimer.triggered.connect(self.stop_timer) + + self.set_menu_visibility() + + return self.menu + + def show_settings(self): + self.widget_settings.input_api_key.setText(self.clockapi.get_api_key()) + self.widget_settings.show() + + def set_menu_visibility(self): + self.aStopTimer.setVisible(self.bool_timer_run) diff --git a/pype/clockify/clockify_api.py b/pype/clockify/clockify_api.py new file mode 100644 index 0000000000..f5ebac0cef --- /dev/null +++ b/pype/clockify/clockify_api.py @@ -0,0 +1,434 @@ +import os +import requests +import json +import datetime +import appdirs + + +class Singleton(type): + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super( + Singleton, cls + ).__call__(*args, **kwargs) + return cls._instances[cls] + + +class ClockifyAPI(metaclass=Singleton): + endpoint = "https://api.clockify.me/api/" + headers = {"X-Api-Key": None} + app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype')) + file_name = 'clockify.json' + fpath = os.path.join(app_dir, file_name) + master_parent = None + workspace_id = None + + def set_master(self, master_parent): + self.master_parent = master_parent + + def verify_api(self): + for key, value in self.headers.items(): + if value is None or value.strip() == '': + return False + return True + + def set_api(self, api_key=None): + if api_key is None: + api_key = self.get_api_key() + + if api_key is not None and self.validate_api_key(api_key) is True: + self.headers["X-Api-Key"] = api_key + self.set_workspace() + return True + return False + + def validate_api_key(self, api_key): + test_headers = {'X-Api-Key': api_key} + action_url = 'workspaces/' + response = requests.get( + self.endpoint + action_url, + headers=test_headers + ) + if response.status_code != 200: + return False + return True + + def validate_workspace_perm(self): + test_project = '__test__' + action_url = 'workspaces/{}/projects/'.format(self.workspace_id) + body = { + "name": test_project, "clientId": "", "isPublic": "false", + "estimate": {"type": "AUTO"}, + "color": "#f44336", "billable": "true" + } + response = requests.post( + self.endpoint + action_url, + headers=self.headers, json=body + ) + if response.status_code == 201: + self.delete_project(self.get_project_id(test_project)) + return True + else: + projects = self.get_projects() + if test_project in projects: + try: + self.delete_project(self.get_project_id(test_project)) + return True + except json.decoder.JSONDecodeError: + return False + return False + + def set_workspace(self, name=None): + if name is None: + name = os.environ.get('CLOCKIFY_WORKSPACE', None) + self.workspace = name + self.workspace_id = None + if self.workspace is None: + return + try: + result = self.validate_workspace() + except Exception: + result = False + if result is not False: + self.workspace_id = result + if self.master_parent is not None: + self.master_parent.start_timer_check() + return True + return False + + def validate_workspace(self, name=None): + if name is None: + name = self.workspace + all_workspaces = self.get_workspaces() + if name in all_workspaces: + return all_workspaces[name] + return False + + def get_api_key(self): + api_key = None + try: + file = open(self.fpath, 'r') + api_key = json.load(file).get('api_key', None) + if api_key == '': + api_key = None + except Exception: + file = open(self.fpath, 'w') + file.close() + return api_key + + def save_api_key(self, api_key): + data = {'api_key': api_key} + file = open(self.fpath, 'w') + file.write(json.dumps(data)) + file.close() + + def get_workspaces(self): + action_url = 'workspaces/' + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + return { + workspace["name"]: workspace["id"] for workspace in response.json() + } + + def get_projects(self, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/projects/'.format(workspace_id) + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + + return { + project["name"]: project["id"] for project in response.json() + } + + def get_tags(self, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/tags/'.format(workspace_id) + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + + return { + tag["name"]: tag["id"] for tag in response.json() + } + + def get_tasks(self, project_id, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/projects/{}/tasks/'.format( + workspace_id, project_id + ) + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + + return { + task["name"]: task["id"] for task in response.json() + } + + def get_workspace_id(self, workspace_name): + all_workspaces = self.get_workspaces() + if workspace_name not in all_workspaces: + return None + return all_workspaces[workspace_name] + + def get_project_id(self, project_name, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + all_projects = self.get_projects(workspace_id) + if project_name not in all_projects: + return None + return all_projects[project_name] + + def get_tag_id(self, tag_name, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + all_tasks = self.get_tags(workspace_id) + if tag_name not in all_tasks: + return None + return all_tasks[tag_name] + + def get_task_id( + self, task_name, project_id, workspace_id=None + ): + if workspace_id is None: + workspace_id = self.workspace_id + all_tasks = self.get_tasks( + project_id, workspace_id + ) + if task_name not in all_tasks: + return None + return all_tasks[task_name] + + def get_current_time(self): + return str(datetime.datetime.utcnow().isoformat())+'Z' + + def start_time_entry( + self, description, project_id, task_id=None, tag_ids=[], + workspace_id=None, billable=True + ): + # Workspace + if workspace_id is None: + workspace_id = self.workspace_id + + # Check if is currently run another times and has same values + current = self.get_in_progress(workspace_id) + if current is not None: + if ( + current.get("description", None) == description and + current.get("projectId", None) == project_id and + current.get("taskId", None) == task_id + ): + self.bool_timer_run = True + return self.bool_timer_run + self.finish_time_entry(workspace_id) + + # Convert billable to strings + if billable: + billable = 'true' + else: + billable = 'false' + # Rest API Action + action_url = 'workspaces/{}/timeEntries/'.format(workspace_id) + start = self.get_current_time() + body = { + "start": start, + "billable": billable, + "description": description, + "projectId": project_id, + "taskId": task_id, + "tagIds": tag_ids + } + response = requests.post( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + + success = False + if response.status_code < 300: + success = True + return success + + def get_in_progress(self, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/timeEntries/inProgress'.format( + workspace_id + ) + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + try: + output = response.json() + except json.decoder.JSONDecodeError: + output = None + return output + + def finish_time_entry(self, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + current = self.get_in_progress(workspace_id) + current_id = current["id"] + action_url = 'workspaces/{}/timeEntries/{}'.format( + workspace_id, current_id + ) + body = { + "start": current["timeInterval"]["start"], + "billable": current["billable"], + "description": current["description"], + "projectId": current["projectId"], + "taskId": current["taskId"], + "tagIds": current["tagIds"], + "end": self.get_current_time() + } + response = requests.put( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + return response.json() + + def get_time_entries( + self, workspace_id=None, quantity=10 + ): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/timeEntries/'.format(workspace_id) + response = requests.get( + self.endpoint + action_url, + headers=self.headers + ) + return response.json()[:quantity] + + def remove_time_entry(self, tid, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/timeEntries/{}'.format( + workspace_id, tid + ) + response = requests.delete( + self.endpoint + action_url, + headers=self.headers + ) + return response.json() + + def add_project(self, name, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/projects/'.format(workspace_id) + body = { + "name": name, + "clientId": "", + "isPublic": "false", + "estimate": { + # "estimate": "3600", + "type": "AUTO" + }, + "color": "#f44336", + "billable": "true" + } + response = requests.post( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + return response.json() + + def add_workspace(self, name): + action_url = 'workspaces/' + body = {"name": name} + response = requests.post( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + return response.json() + + def add_task( + self, name, project_id, workspace_id=None + ): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/projects/{}/tasks/'.format( + workspace_id, project_id + ) + body = { + "name": name, + "projectId": project_id + } + response = requests.post( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + return response.json() + + def add_tag(self, name, workspace_id=None): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = 'workspaces/{}/tags'.format(workspace_id) + body = { + "name": name + } + response = requests.post( + self.endpoint + action_url, + headers=self.headers, + json=body + ) + return response.json() + + def delete_project( + self, project_id, workspace_id=None + ): + if workspace_id is None: + workspace_id = self.workspace_id + action_url = '/workspaces/{}/projects/{}'.format( + workspace_id, project_id + ) + response = requests.delete( + self.endpoint + action_url, + headers=self.headers, + ) + return response.json() + + def convert_input( + self, entity_id, entity_name, mode='Workspace', project_id=None + ): + if entity_id is None: + error = False + error_msg = 'Missing information "{}"' + if mode.lower() == 'workspace': + if entity_id is None and entity_name is None: + if self.workspace_id is not None: + entity_id = self.workspace_id + else: + error = True + else: + entity_id = self.get_workspace_id(entity_name) + else: + if entity_id is None and entity_name is None: + error = True + elif mode.lower() == 'project': + entity_id = self.get_project_id(entity_name) + elif mode.lower() == 'task': + entity_id = self.get_task_id( + task_name=entity_name, project_id=project_id + ) + else: + raise TypeError('Unknown type') + # Raise error + if error: + raise ValueError(error_msg.format(mode)) + + return entity_id diff --git a/pype/clockify/widget_settings.py b/pype/clockify/widget_settings.py new file mode 100644 index 0000000000..02fd4350e6 --- /dev/null +++ b/pype/clockify/widget_settings.py @@ -0,0 +1,155 @@ +import os +from app.vendor.Qt import QtCore, QtGui, QtWidgets +from app import style + + +class ClockifySettings(QtWidgets.QWidget): + + SIZE_W = 300 + SIZE_H = 130 + + loginSignal = QtCore.Signal(object, object, object) + + def __init__(self, main_parent=None, parent=None, optional=True): + + super(ClockifySettings, self).__init__() + + self.parent = parent + self.main_parent = main_parent + self.clockapi = parent.clockapi + self.optional = optional + self.validated = False + + # Icon + if hasattr(parent, 'icon'): + self.setWindowIcon(self.parent.icon) + elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'): + self.setWindowIcon(self.parent.parent.icon) + else: + pype_setup = os.getenv('PYPE_SETUP_ROOT') + items = [pype_setup, "app", "resources", "icon.png"] + fname = os.path.sep.join(items) + icon = QtGui.QIcon(fname) + self.setWindowIcon(icon) + + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint | + QtCore.Qt.WindowMinimizeButtonHint + ) + + self._translate = QtCore.QCoreApplication.translate + + # Font + self.font = QtGui.QFont() + self.font.setFamily("DejaVu Sans Condensed") + self.font.setPointSize(9) + self.font.setBold(True) + self.font.setWeight(50) + self.font.setKerning(True) + + # Size setting + self.resize(self.SIZE_W, self.SIZE_H) + self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) + self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100)) + self.setStyleSheet(style.load_stylesheet()) + + self.setLayout(self._main()) + self.setWindowTitle('Clockify settings') + + def _main(self): + self.main = QtWidgets.QVBoxLayout() + self.main.setObjectName("main") + + self.form = QtWidgets.QFormLayout() + self.form.setContentsMargins(10, 15, 10, 5) + self.form.setObjectName("form") + + self.label_api_key = QtWidgets.QLabel("Clockify API key:") + self.label_api_key.setFont(self.font) + self.label_api_key.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) + self.label_api_key.setTextFormat(QtCore.Qt.RichText) + self.label_api_key.setObjectName("label_api_key") + + self.input_api_key = QtWidgets.QLineEdit() + self.input_api_key.setEnabled(True) + self.input_api_key.setFrame(True) + self.input_api_key.setObjectName("input_api_key") + self.input_api_key.setPlaceholderText( + self._translate("main", "e.g. XX1XxXX2x3x4xXxx") + ) + + self.error_label = QtWidgets.QLabel("") + self.error_label.setFont(self.font) + self.error_label.setTextFormat(QtCore.Qt.RichText) + self.error_label.setObjectName("error_label") + self.error_label.setWordWrap(True) + self.error_label.hide() + + self.form.addRow(self.label_api_key, self.input_api_key) + self.form.addRow(self.error_label) + + self.btn_group = QtWidgets.QHBoxLayout() + self.btn_group.addStretch(1) + self.btn_group.setObjectName("btn_group") + + self.btn_ok = QtWidgets.QPushButton("Ok") + self.btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer') + self.btn_ok.clicked.connect(self.click_ok) + + self.btn_cancel = QtWidgets.QPushButton("Cancel") + cancel_tooltip = 'Application won\'t start' + if self.optional: + cancel_tooltip = 'Close this window' + self.btn_cancel.setToolTip(cancel_tooltip) + self.btn_cancel.clicked.connect(self._close_widget) + + self.btn_group.addWidget(self.btn_ok) + self.btn_group.addWidget(self.btn_cancel) + + self.main.addLayout(self.form) + self.main.addLayout(self.btn_group) + + return self.main + + def setError(self, msg): + self.error_label.setText(msg) + self.error_label.show() + + def invalid_input(self, entity): + entity.setStyleSheet("border: 1px solid red;") + + def click_ok(self): + api_key = self.input_api_key.text().strip() + if self.optional is True and api_key == '': + self.clockapi.save_api_key(None) + self.clockapi.set_api(api_key) + self.validated = False + self._close_widget() + return + + validation = self.clockapi.validate_api_key(api_key) + + if validation: + self.clockapi.save_api_key(api_key) + self.clockapi.set_api(api_key) + self.validated = True + self._close_widget() + else: + self.invalid_input(self.input_api_key) + self.validated = False + self.setError( + "Entered invalid API key" + ) + + def closeEvent(self, event): + if self.optional is True: + event.ignore() + self._close_widget() + else: + self.validated = False + + def _close_widget(self): + if self.optional is True: + self.hide() + else: + self.close() diff --git a/pype/ftrack/actions/action_application_loader.py b/pype/ftrack/actions/action_application_loader.py index f98f1b1e0d..ecf3575902 100644 --- a/pype/ftrack/actions/action_application_loader.py +++ b/pype/ftrack/actions/action_application_loader.py @@ -55,9 +55,12 @@ def register(session): apps.append(app) apps = sorted(apps, key=lambda x: x['name']) + app_counter = 0 for app in apps: try: registerApp(app, session) - time.sleep(0.05) + if app_counter%5 == 0: + time.sleep(0.1) + app_counter += 1 except Exception as e: log.warning("'{0}' - not proper App ({1})".format(app['name'], e)) diff --git a/pype/ftrack/actions/action_clockify_start.py b/pype/ftrack/actions/action_clockify_start.py new file mode 100644 index 0000000000..b1c60a2525 --- /dev/null +++ b/pype/ftrack/actions/action_clockify_start.py @@ -0,0 +1,105 @@ +import sys +import argparse +import logging + +import ftrack_api +from pype.ftrack import BaseAction +from pype.clockify import ClockifyAPI + + +class StartClockify(BaseAction): + '''Starts timer on clockify.''' + + #: Action identifier. + identifier = 'clockify.start.timer' + #: Action label. + label = 'Start timer' + #: Action description. + description = 'Starts timer on clockify' + #: roles that are allowed to register this action + icon = 'https://clockify.me/assets/images/clockify-logo.png' + #: Clockify api + clockapi = ClockifyAPI() + + def discover(self, session, entities, event): + if len(entities) != 1: + return False + if entities[0].entity_type.lower() != 'task': + return False + if self.clockapi.workspace_id is None: + return False + return True + + def launch(self, session, entities, event): + task = entities[0] + task_name = task['type']['name'] + project_name = task['project']['full_name'] + + def get_parents(entity): + output = [] + if entity.entity_type.lower() == 'project': + return output + output.extend(get_parents(entity['parent'])) + output.append(entity['name']) + + return output + + desc_items = get_parents(task['parent']) + desc_items.append(task['name']) + description = '/'.join(desc_items) + project_id = self.clockapi.get_project_id(project_name) + tag_ids = [] + tag_ids.append(self.clockapi.get_tag_id(task_name)) + self.clockapi.start_time_entry( + description, project_id, tag_ids=tag_ids + ) + + return True + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + StartClockify(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_clockify_sync.py b/pype/ftrack/actions/action_clockify_sync.py new file mode 100644 index 0000000000..202bb7b912 --- /dev/null +++ b/pype/ftrack/actions/action_clockify_sync.py @@ -0,0 +1,148 @@ +import sys +import argparse +import logging +import json +import ftrack_api +from pype.ftrack import BaseAction, MissingPermision +from pype.clockify import ClockifyAPI + + +class SyncClocify(BaseAction): + '''Synchronise project names and task types.''' + + #: Action identifier. + identifier = 'clockify.sync' + #: Action label. + label = 'Sync To Clockify' + #: Action description. + description = 'Synchronise data to Clockify workspace' + #: priority + priority = 100 + #: roles that are allowed to register this action + role_list = ['Pypeclub', 'Administrator'] + #: icon + icon = 'https://clockify.me/assets/images/clockify-logo-white.svg' + #: CLockifyApi + clockapi = ClockifyAPI() + + def register(self): + if self.clockapi.workspace_id is None: + raise ValueError('Clockify Workspace or API key are not set!') + + if self.clockapi.validate_workspace_perm() is False: + raise MissingPermision('Clockify') + super().register() + + def discover(self, session, entities, event): + ''' Validation ''' + return True + + def launch(self, session, entities, event): + # JOB SETTINGS + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'Sync Ftrack to Clockify' + }) + }) + session.commit() + try: + entity = entities[0] + + if entity.entity_type.lower() == 'project': + project = entity + else: + project = entity['project'] + project_name = project['full_name'] + + task_types = [] + for task_type in project['project_schema']['_task_type_schema'][ + 'types' + ]: + task_types.append(task_type['name']) + + clockify_projects = self.clockapi.get_projects() + + if project_name not in clockify_projects: + response = self.clockapi.add_project(project_name) + if 'id' not in response: + self.log.error('Project {} can\'t be created'.format( + project_name + )) + return { + 'success': False, + 'message': 'Can\'t create project, unexpected error' + } + project_id = response['id'] + else: + project_id = clockify_projects[project_name] + + clockify_workspace_tags = self.clockapi.get_tags() + for task_type in task_types: + if task_type not in clockify_workspace_tags: + response = self.clockapi.add_tag(task_type) + if 'id' not in response: + self.log.error('Task {} can\'t be created'.format( + task_type + )) + continue + except Exception: + job['status'] = 'failed' + session.commit() + return False + + job['status'] = 'done' + session.commit() + return True + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + SyncClocify(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_create_sw_folders.py b/pype/ftrack/actions/action_create_sw_folders.py new file mode 100644 index 0000000000..f6b14cb764 --- /dev/null +++ b/pype/ftrack/actions/action_create_sw_folders.py @@ -0,0 +1,155 @@ +import os +import sys +import json +import argparse +import logging + +import ftrack_api +from avalon import lib as avalonlib +from avalon.tools.libraryloader.io_nonsingleton import DbConnector +from pype import lib as pypelib +from pype.ftrack import BaseAction + + +class CreateSWFolders(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = 'create.sw.folders' + #: Action label. + label = 'Create SW Folders' + #: Action description. + description = 'Creates folders for all SW in project' + + + def __init__(self, session): + super().__init__(session) + self.avalon_db = DbConnector() + self.avalon_db.install() + + def discover(self, session, entities, event): + ''' Validation ''' + + return True + + def launch(self, session, entities, event): + if len(entities) != 1: + self.log.warning( + 'There are more entities in selection!' + ) + return False + entity = entities[0] + if entity.entity_type.lower() != 'task': + self.log.warning( + 'Selected entity is not Task!' + ) + return False + asset = entity['parent'] + project = asset['project'] + + project_name = project["full_name"] + self.avalon_db.Session['AVALON_PROJECT'] = project_name + av_project = self.avalon_db.find_one({'type': 'project'}) + av_asset = self.avalon_db.find_one({ + 'type': 'asset', + 'name': asset['name'] + }) + + templates = av_project["config"]["template"] + template = templates.get("work", None) + if template is None: + return False + + + data = { + "root": os.environ["AVALON_PROJECTS"], + "project": { + "name": project_name, + "code": project["name"] + }, + "hierarchy": av_asset['data']['hierarchy'], + "asset": asset['name'], + "task": entity['name'], + } + + apps = [] + if '{app}' in template: + # Apps in project + for app in av_project['data']['applications']: + app_data = avalonlib.get_application(app) + app_dir = app_data['application_dir'] + if app_dir not in apps: + apps.append(app_dir) + # Apps in presets + path_items = [pypelib.get_presets_path(), 'tools', 'sw_folders.json'] + filepath = os.path.sep.join(path_items) + + presets = dict() + try: + with open(filepath) as data_file: + presets = json.load(data_file) + except Exception as e: + self.log.warning('Wasn\'t able to load presets') + preset_apps = presets.get(project_name, presets.get('__default__', [])) + for app in preset_apps: + if app not in apps: + apps.append(app) + + # Create folders for apps + for app in apps: + data['app'] = app + self.log.info('Created folder for app {}'.format(app)) + path = os.path.normpath(template.format(**data)) + if os.path.exists(path): + continue + os.makedirs(path) + + return True + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + CreateSWFolders(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_djvview.py b/pype/ftrack/actions/action_djvview.py index 4e027b740b..7da83457ee 100644 --- a/pype/ftrack/actions/action_djvview.py +++ b/pype/ftrack/actions/action_djvview.py @@ -5,120 +5,59 @@ import json import logging import subprocess from operator import itemgetter -from pype.vendor import ftrack_api -from pype.ftrack import BaseHandler +import ftrack_api +from pype.ftrack import BaseAction from app.api import Logger -from pype import lib +from pype import lib as pypelib + log = Logger().get_logger(__name__) -class DJVViewAction(BaseHandler): +class DJVViewAction(BaseAction): """Launch DJVView action.""" identifier = "djvview-launch-action" label = "DJV View" + description = "DJV View Launcher" icon = "http://a.fsdn.com/allura/p/djv/icon" type = 'Application' def __init__(self, session): '''Expects a ftrack_api.Session instance''' super().__init__(session) - self.variant = None self.djv_path = None self.config_data = None - items = [] - if self.config_data is None: - self.load_config_data() + self.load_config_data() + self.set_djv_path() - application = self.get_application() - if application is None: + if self.djv_path is None: return - applicationIdentifier = application["identifier"] - label = application["label"] - items.append({ - "actionIdentifier": self.identifier, - "label": label, - "variant": application.get("variant", None), - "description": application.get("description", None), - "icon": application.get("icon", "default"), - "applicationIdentifier": applicationIdentifier - }) - - self.items = items - - items = [] - applications = self.get_applications() - applications = sorted( - applications, key=lambda application: application["label"] + self.allowed_types = self.config_data.get( + 'file_ext', ["img", "mov", "exr"] ) - for application in applications: - self.djv_path = application.get("path", None) - applicationIdentifier = application["identifier"] - label = application["label"] - items.append({ - "actionIdentifier": self.identifier, - "label": label, - "variant": application.get("variant", None), - "description": application.get("description", None), - "icon": application.get("icon", "default"), - "applicationIdentifier": applicationIdentifier - }) - - self.items = items - - if self.identifier is None: - raise ValueError( - 'Action missing identifier.' - ) - - def is_valid_selection(self, event): - selection = event["data"].get("selection", []) - - if not selection: - return - - entityType = selection[0]["entityType"] - - if entityType not in ["assetversion", "task"]: - return False - - return True - - def discover(self, event): - """Return available actions based on *event*. """ - - if not self.is_valid_selection(event): - return - - return { - "items": self.items - } - def register(self): - '''Registers the action, subscribing the discover and launch topics.''' - self.session.event_hub.subscribe( - 'topic=ftrack.action.discover and source.user.username={0}'.format( - self.session.api_user - ), self.discover - ) - launch_subscription = ( - 'topic=ftrack.action.launch' - ' and data.actionIdentifier={0}' - ' and source.user.username={1}' - ) - self.session.event_hub.subscribe( - launch_subscription.format( - self.identifier, - self.session.api_user - ), - self.launch + assert (self.djv_path is not None), ( + 'DJV View is not installed' + ' or paths in presets are not set correctly' ) + super().register() + + def discover(self, session, entities, event): + """Return available actions based on *event*. """ + selection = event["data"].get("selection", []) + if len(selection) != 1: + return False + + entityType = selection[0].get("entityType", None) + if entityType in ["assetversion", "task"]: + return True + return False def load_config_data(self): - path_items = [lib.get_presets_path(), 'djv_view', 'config.json'] + path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json'] filepath = os.path.sep.join(path_items) data = dict() @@ -132,270 +71,168 @@ class DJVViewAction(BaseHandler): self.config_data = data - def get_application(self): - applicationIdentifier = "djvview" - description = "DJV View Launcher" - - possible_paths = self.config_data.get("djv_paths", []) - for path in possible_paths: + def set_djv_path(self): + for path in self.config_data.get("djv_paths", []): if os.path.exists(path): self.djv_path = path break - if self.djv_path is None: - log.debug("DJV View application was not found") - return None + def interface(self, session, entities, event): + if event['data'].get('values', {}): + return - application = { - 'identifier': applicationIdentifier, - 'label': self.label, - 'icon': self.icon, - 'description': description - } - - versionExpression = re.compile(r"(?P\d+.\d+.\d+)") - versionMatch = versionExpression.search(self.djv_path) - if versionMatch: - new_label = '{} {}'.format( - application['label'], versionMatch.group('version') - ) - application['label'] = new_label - - return application - - def translate_event(self, session, event): - '''Return *event* translated structure to be used with the API.''' - - selection = event['data'].get('selection', []) - - entities = list() - for entity in selection: - entities.append( - (session.get( - self.get_entity_type(entity), entity.get('entityId') - )) - ) - - return entities - - def get_entity_type(self, entity): - entity_type = entity.get('entityType').replace('_', '').lower() - - for schema in self.session.schemas: - alias_for = schema.get('alias_for') + entity = entities[0] + versions = [] + entity_type = entity.entity_type.lower() + if entity_type == "assetversion": if ( - alias_for and isinstance(alias_for, str) and - alias_for.lower() == entity_type + entity[ + 'components' + ][0]['file_type'][1:] in self.allowed_types ): - return schema['id'] + versions.append(entity) + else: + master_entity = entity + if entity_type == "task": + master_entity = entity['parent'] - for schema in self.session.schemas: - if schema['id'].lower() == entity_type: - return schema['id'] - - raise ValueError( - 'Unable to translate entity type: {0}.'.format(entity_type) - ) - - def launch(self, event): - """Callback method for DJVView action.""" - session = self.session - entities = self.translate_event(session, event) - - # Launching application - if "values" in event["data"]: - filename = event['data']['values']['path'] - file_type = filename.split(".")[-1] - - # TODO Is this proper way? - try: - fps = int(entities[0]['custom_attributes']['fps']) - except Exception: - fps = 24 - - # TODO issequence is probably already built-in validation in ftrack - isseq = re.findall('%[0-9]*d', filename) - if len(isseq) > 0: - if len(isseq) == 1: - frames = [] - padding = re.findall('%[0-9]*d', filename).pop() - index = filename.find(padding) - - full_file = filename[0:index-1] - file = full_file.split(os.sep)[-1] - folder = os.path.dirname(full_file) - - for fname in os.listdir(path=folder): - if fname.endswith(file_type) and file in fname: - frames.append(int(fname.split(".")[-2])) - - if len(frames) > 0: - start = min(frames) - end = max(frames) - - range = (padding % start) + '-' + (padding % end) - filename = re.sub('%[0-9]*d', range, filename) - else: - msg = ( - 'DJV View - Filename has more than one' - ' sequence identifier.' - ) - return { - 'success': False, - 'message': (msg) - } - - cmd = [] - # DJV path - cmd.append(os.path.normpath(self.djv_path)) - # DJV Options Start ############################################## - '''layer name''' - # cmd.append('-file_layer (value)') - ''' Proxy scale: 1/2, 1/4, 1/8''' - cmd.append('-file_proxy 1/2') - ''' Cache: True, False.''' - cmd.append('-file_cache True') - ''' Start in full screen ''' - # cmd.append('-window_fullscreen') - ''' Toolbar controls: False, True.''' - # cmd.append("-window_toolbar False") - ''' Window controls: False, True.''' - # cmd.append("-window_playbar False") - ''' Grid overlay: None, 1x1, 10x10, 100x100.''' - # cmd.append("-view_grid None") - ''' Heads up display: True, False.''' - # cmd.append("-view_hud True") - ''' Playback: Stop, Forward, Reverse.''' - cmd.append("-playback Forward") - ''' Frame.''' - # cmd.append("-playback_frame (value)") - cmd.append("-playback_speed " + str(fps)) - ''' Timer: Sleep, Timeout. Value: Sleep.''' - # cmd.append("-playback_timer (value)") - ''' Timer resolution (seconds): 0.001.''' - # cmd.append("-playback_timer_resolution (value)") - ''' Time units: Timecode, Frames.''' - cmd.append("-time_units Frames") - # DJV Options End ################################################ - - # PATH TO COMPONENT - cmd.append(os.path.normpath(filename)) - - try: - # Run DJV with these commands - subprocess.Popen(' '.join(cmd)) - except FileNotFoundError: - return { - 'success': False, - 'message': 'File "{}" was not found.'.format( - os.path.basename(filename) - ) - } - - return { - 'success': True, - 'message': 'DJV View started.' - } - - if 'items' not in event["data"]: - event["data"]['items'] = [] - - try: - for entity in entities: - versions = [] - self.load_config_data() - default_types = ["img", "mov", "exr"] - allowed_types = self.config_data.get('file_ext', default_types) - - if entity.entity_type.lower() == "assetversion": + for asset in master_entity['assets']: + for version in asset['versions']: + # Get only AssetVersion of selected task if ( - entity[ - 'components' - ][0]['file_type'][1:] in allowed_types + entity_type == "task" and + version['task']['id'] != entity['id'] ): - versions.append(entity) + continue + # Get only components with allowed type + filetype = version['components'][0]['file_type'] + if filetype[1:] in self.allowed_types: + versions.append(version) - elif entity.entity_type.lower() == "task": - # AssetVersions are obtainable only from shot! - shotentity = entity['parent'] - - for asset in shotentity['assets']: - for version in asset['versions']: - # Get only AssetVersion of selected task - if version['task']['id'] != entity['id']: - continue - # Get only components with allowed type - filetype = version['components'][0]['file_type'] - if filetype[1:] in allowed_types: - versions.append(version) - - # Raise error if no components were found - if len(versions) < 1: - raise ValueError('There are no Asset Versions to open.') - - for version in versions: - logging.info(version['components']) - for component in version['components']: - label = "v{0} - {1} - {2}" - - label = label.format( - str(version['version']).zfill(3), - version['asset']['type']['name'], - component['name'] - ) - - try: - # TODO This is proper way to get filepath!!! - # THIS WON'T WORK RIGHT NOW - location = component[ - 'component_locations' - ][0]['location'] - file_path = location.get_filesystem_path(component) - # if component.isSequence(): - # if component.getMembers(): - # frame = int( - # component.getMembers()[0].getName() - # ) - # file_path = file_path % frame - except Exception: - # This works but is NOT proper way - file_path = component[ - 'component_locations' - ][0]['resource_identifier'] - - dirpath = os.path.dirname(file_path) - if os.path.isdir(dirpath): - event["data"]["items"].append( - {"label": label, "value": file_path} - ) - - # Raise error if any component is playable - if len(event["data"]["items"]) == 0: - raise ValueError( - 'There are no Asset Versions with accessible path.' - ) - - except Exception as e: + if len(versions) < 1: return { 'success': False, - 'message': str(e) + 'message': 'There are no Asset Versions to open.' } - return { - "items": [ - { - "label": "Items to view", - "type": "enumerator", - "name": "path", - "data": sorted( - event["data"]['items'], - key=itemgetter("label"), - reverse=True + items = [] + base_label = "v{0} - {1} - {2}" + default_component = self.config_data.get( + 'default_component', None + ) + last_available = None + select_value = None + for version in versions: + for component in version['components']: + label = base_label.format( + str(version['version']).zfill(3), + version['asset']['type']['name'], + component['name'] + ) + + try: + location = component[ + 'component_locations' + ][0]['location'] + file_path = location.get_filesystem_path(component) + except Exception: + file_path = component[ + 'component_locations' + ][0]['resource_identifier'] + + if os.path.isdir(os.path.dirname(file_path)): + last_available = file_path + if component['name'] == default_component: + select_value = file_path + items.append( + {'label': label, 'value': file_path} ) - } - ] + + if len(items) == 0: + return { + 'success': False, + 'message': ( + 'There are no Asset Versions with accessible path.' + ) + } + + item = { + 'label': 'Items to view', + 'type': 'enumerator', + 'name': 'path', + 'data': sorted( + items, + key=itemgetter('label'), + reverse=True + ) } + if select_value is not None: + item['value'] = select_value + else: + item['value'] = last_available + + return {'items': [item]} + + def launch(self, session, entities, event): + """Callback method for DJVView action.""" + + # Launching application + if "values" not in event["data"]: + return + filename = event['data']['values']['path'] + + fps = entities[0].get('custom_attributes', {}).get('fps', None) + + cmd = [] + # DJV path + cmd.append(os.path.normpath(self.djv_path)) + # DJV Options Start ############################################## + # '''layer name''' + # cmd.append('-file_layer (value)') + # ''' Proxy scale: 1/2, 1/4, 1/8''' + # cmd.append('-file_proxy 1/2') + # ''' Cache: True, False.''' + # cmd.append('-file_cache True') + # ''' Start in full screen ''' + # cmd.append('-window_fullscreen') + # ''' Toolbar controls: False, True.''' + # cmd.append("-window_toolbar False") + # ''' Window controls: False, True.''' + # cmd.append("-window_playbar False") + # ''' Grid overlay: None, 1x1, 10x10, 100x100.''' + # cmd.append("-view_grid None") + # ''' Heads up display: True, False.''' + # cmd.append("-view_hud True") + ''' Playback: Stop, Forward, Reverse.''' + cmd.append("-playback Forward") + # ''' Frame.''' + # cmd.append("-playback_frame (value)") + if fps is not None: + cmd.append("-playback_speed {}".format(int(fps))) + # ''' Timer: Sleep, Timeout. Value: Sleep.''' + # cmd.append("-playback_timer (value)") + # ''' Timer resolution (seconds): 0.001.''' + # cmd.append("-playback_timer_resolution (value)") + ''' Time units: Timecode, Frames.''' + cmd.append("-time_units Frames") + # DJV Options End ################################################ + + # PATH TO COMPONENT + cmd.append(os.path.normpath(filename)) + + try: + # Run DJV with these commands + subprocess.Popen(' '.join(cmd)) + except FileNotFoundError: + return { + 'success': False, + 'message': 'File "{}" was not found.'.format( + os.path.basename(filename) + ) + } + + return True def register(session): @@ -403,8 +240,7 @@ def register(session): if not isinstance(session, ftrack_api.session.Session): return - action = DJVViewAction(session) - action.register() + DJVViewAction(session).register() def main(arguments=None): diff --git a/pype/ftrack/actions/action_job_killer.py b/pype/ftrack/actions/action_job_killer.py index 008b36b1c1..25c0c6a489 100644 --- a/pype/ftrack/actions/action_job_killer.py +++ b/pype/ftrack/actions/action_job_killer.py @@ -14,7 +14,7 @@ class JobKiller(BaseAction): #: Action label. label = 'Job Killer' #: Action description. - description = 'Killing all running jobs younger than day' + description = 'Killing selected running jobs' #: roles that are allowed to register this action role_list = ['Pypeclub', 'Administrator'] icon = ( @@ -34,29 +34,42 @@ class JobKiller(BaseAction): jobs = session.query( 'select id, status from Job' ' where status in ("queued", "running")' - ) + ).all() items = [] import json + item_splitter = {'type': 'label', 'value': '---'} for job in jobs: data = json.loads(job['data']) user = job['user']['username'] created = job['created_at'].strftime('%d.%m.%Y %H:%M:%S') - label = '{}/ {}/ {}'.format( + label = '{} - {} - {}'.format( data['description'], created, user ) + item_label = { + 'type': 'label', + 'value': label + } item = { - 'label': label, 'name': job['id'], 'type': 'boolean', 'value': False } + if len(items) > 0: + items.append(item_splitter) + items.append(item_label) items.append(item) - return { - 'items': items, - 'title': title - } + if len(items) == 0: + return { + 'success': False, + 'message': 'Didn\'t found any running jobs' + } + else: + return { + 'items': items, + 'title': title + } def launch(self, session, entities, event): """ GET JOB """ diff --git a/pype/ftrack/actions/action_multiple_notes.py b/pype/ftrack/actions/action_multiple_notes.py new file mode 100644 index 0000000000..c61f5b1e9c --- /dev/null +++ b/pype/ftrack/actions/action_multiple_notes.py @@ -0,0 +1,162 @@ +import os +import sys +import argparse +import logging +import json +import ftrack_api + +from pype.ftrack import BaseAction + + +class MultipleNotes(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = 'multiple.notes' + #: Action label. + label = 'Multiple Notes' + #: Action description. + description = 'Add same note to multiple Asset Versions' + icon = ( + 'https://cdn2.iconfinder.com/data/icons/' + 'mixed-rounded-flat-icon/512/note_1-512.png' + ) + + def discover(self, session, entities, event): + ''' Validation ''' + valid = True + for entity in entities: + if entity.entity_type.lower() != 'assetversion': + valid = False + break + return valid + + def interface(self, session, entities, event): + if not event['data'].get('values', {}): + note_label = { + 'type': 'label', + 'value': '# Enter note: #' + } + + note_value = { + 'name': 'note', + 'type': 'textarea' + } + + category_label = { + 'type': 'label', + 'value': '## Category: ##' + } + + category_data = [] + category_data.append({ + 'label': '- None -', + 'value': 'none' + }) + all_categories = session.query('NoteCategory').all() + for cat in all_categories: + category_data.append({ + 'label': cat['name'], + 'value': cat['id'] + }) + category_value = { + 'type': 'enumerator', + 'name': 'category', + 'data': category_data, + 'value': 'none' + } + + splitter = { + 'type': 'label', + 'value': '{}'.format(200*"-") + } + + items = [] + items.append(note_label) + items.append(note_value) + items.append(splitter) + items.append(category_label) + items.append(category_value) + return items + + def launch(self, session, entities, event): + if 'values' not in event['data']: + return + + values = event['data']['values'] + if len(values) <= 0 or 'note' not in values: + return False + # Get Note text + note_value = values['note'] + if note_value.lower().strip() == '': + return False + # Get User + user = session.query( + 'User where username is "{}"'.format(session.api_user) + ).one() + # Base note data + note_data = { + 'content': note_value, + 'author': user + } + # Get category + category_value = values['category'] + if category_value != 'none': + category = session.query( + 'NoteCategory where id is "{}"'.format(category_value) + ).one() + note_data['category'] = category + # Create notes for entities + for entity in entities: + new_note = session.create('Note', note_data) + entity['notes'].append(new_note) + session.commit() + return True + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + MultipleNotes(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py index d3fb140cc4..1056b5ee55 100644 --- a/pype/ftrack/actions/action_sync_to_avalon_local.py +++ b/pype/ftrack/actions/action_sync_to_avalon_local.py @@ -82,15 +82,11 @@ class SyncToAvalon(BaseAction): 'user': user, 'status': 'running', 'data': json.dumps({ - 'description': 'Synch Ftrack to Avalon.' + 'description': 'Sync Ftrack to Avalon.' }) }) - + session.commit() try: - self.log.info( - "Action <" + self.__class__.__name__ + "> is running" - ) - self.importable = [] # get from top entity in hierarchy all parent entities @@ -137,26 +133,11 @@ class SyncToAvalon(BaseAction): ) if 'errors' in result and len(result['errors']) > 0: - items = [] - for error in result['errors']: - for key, message in error.items(): - name = key.lower().replace(' ', '') - info = { - 'label': key, - 'type': 'textarea', - 'name': name, - 'value': message - } - items.append(info) - self.log.error( - '{}: {}'.format(key, message) - ) - title = 'Hey You! Few Errors were raised! (*look below*)' - job['status'] = 'failed' session.commit() - self.show_interface(event, items, title) + ftracklib.show_errors(self, event, result['errors']) + return { 'success': False, 'message': "Sync to avalon FAILED" @@ -167,7 +148,6 @@ class SyncToAvalon(BaseAction): avalon_project = result['project'] job['status'] = 'done' - self.log.info('Synchronization to Avalon was successfull!') except ValueError as ve: job['status'] = 'failed' diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index a3ad4d34cf..22358cd775 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -98,15 +98,11 @@ class Sync_To_Avalon(BaseAction): 'user': user, 'status': 'running', 'data': json.dumps({ - 'description': 'Synch Ftrack to Avalon.' + 'description': 'Sync Ftrack to Avalon.' }) }) - + session.commit() try: - self.log.info( - "Action <" + self.__class__.__name__ + "> is running" - ) - self.importable = [] # get from top entity in hierarchy all parent entities @@ -153,26 +149,11 @@ class Sync_To_Avalon(BaseAction): ) if 'errors' in result and len(result['errors']) > 0: - items = [] - for error in result['errors']: - for key, message in error.items(): - name = key.lower().replace(' ', '') - info = { - 'label': key, - 'type': 'textarea', - 'name': name, - 'value': message - } - items.append(info) - self.log.error( - '{}: {}'.format(key, message) - ) - title = 'Hey You! Few Errors were raised! (*look below*)' - job['status'] = 'failed' session.commit() - self.show_interface(event, items, title) + lib.show_errors(self, event, result['errors']) + return { 'success': False, 'message': "Sync to avalon FAILED" @@ -184,7 +165,6 @@ class Sync_To_Avalon(BaseAction): job['status'] = 'done' session.commit() - self.log.info('Synchronization to Avalon was successfull!') except ValueError as ve: job['status'] = 'failed' diff --git a/pype/ftrack/events/event_del_avalon_id_from_new.py b/pype/ftrack/events/event_del_avalon_id_from_new.py index c449739800..7659191637 100644 --- a/pype/ftrack/events/event_del_avalon_id_from_new.py +++ b/pype/ftrack/events/event_del_avalon_id_from_new.py @@ -13,25 +13,25 @@ class DelAvalonIdFromNew(BaseEvent): ''' priority = Sync_to_Avalon.priority - 1 - def launch(self, event): + def launch(self, session, event): created = [] entities = event['data']['entities'] for entity in entities: try: entity_id = entity['entityId'] - if entity['action'] == 'add': + if entity.get('action', None) == 'add': id_dict = entity['changes']['id'] if id_dict['new'] is not None and id_dict['old'] is None: created.append(id_dict['new']) elif ( - entity['action'] == 'update' and + entity.get('action', None) == 'update' and get_ca_mongoid() in entity['keys'] and entity_id in created ): - ftrack_entity = self.session.get( + ftrack_entity = session.get( self._get_entity_type(entity), entity_id ) @@ -44,21 +44,13 @@ class DelAvalonIdFromNew(BaseEvent): ftrack_entity['custom_attributes'][ get_ca_mongoid() ] = '' - self.session.commit() + session.commit() except Exception: + session.rollback() continue - def register(self): - '''Registers the event, subscribing the discover and launch topics.''' - self.session.event_hub.subscribe( - 'topic=ftrack.update', - self.launch, - priority=self.priority - ) - - def register(session, **kw): '''Register plugin. Called when used as an plugin.''' if not isinstance(session, ftrack_api.session.Session): diff --git a/pype/ftrack/events/event_next_task_update.py b/pype/ftrack/events/event_next_task_update.py index b6c82b930c..e677e53fb2 100644 --- a/pype/ftrack/events/event_next_task_update.py +++ b/pype/ftrack/events/event_next_task_update.py @@ -34,49 +34,57 @@ class NextTaskUpdate(BaseEvent): return None - def launch(self, session, entities, event): + def launch(self, session, event): '''Propagates status from version to task when changed''' # self.log.info(event) # start of event procedure ---------------------------------- for entity in event['data'].get('entities', []): + changes = entity.get('changes', None) + if changes is None: + continue + statusid_changes = changes.get('statusid', {}) + if ( + entity['entityType'] != 'task' or + 'statusid' not in entity['keys'] or + statusid_changes.get('new', None) is None or + statusid_changes.get('old', None) is None + ): + continue - if (entity['entityType'] == 'task' and - 'statusid' in entity['keys']): + task = session.get('Task', entity['entityId']) - task = session.get('Task', entity['entityId']) + status = session.get('Status', + entity['changes']['statusid']['new']) + state = status['state']['name'] - status = session.get('Status', - entity['changes']['statusid']['new']) - state = status['state']['name'] + next_task = self.get_next_task(task, session) - next_task = self.get_next_task(task, session) + # Setting next task to Ready, if on NOT READY + if next_task and state == 'Done': + if next_task['status']['name'].lower() == 'not ready': - # Setting next task to Ready, if on NOT READY - if next_task and state == 'Done': - if next_task['status']['name'].lower() == 'not ready': + # Get path to task + path = task['name'] + for p in task['ancestors']: + path = p['name'] + '/' + path - # Get path to task - path = task['name'] - for p in task['ancestors']: - path = p['name'] + '/' + path + # Setting next task status + try: + query = 'Status where name is "{}"'.format('Ready') + status_to_set = session.query(query).one() + next_task['status'] = status_to_set + session.commit() + self.log.info(( + '>>> [ {} ] updated to [ Ready ]' + ).format(path)) + except Exception as e: + self.log.warning(( + '!!! [ {} ] status couldnt be set: [ {} ]' + ).format(path, e)) + session.rollback() - # Setting next task status - try: - query = 'Status where name is "{}"'.format('Ready') - status_to_set = session.query(query).one() - next_task['status'] = status_to_set - except Exception as e: - self.log.warning(( - '!!! [ {} ] status couldnt be set: [ {} ]' - ).format(path, e)) - else: - self.log.info(( - '>>> [ {} ] updated to [ Ready ]' - ).format(path)) - - session.commit() def register(session, **kw): '''Register plugin. Called when used as an plugin.''' diff --git a/pype/ftrack/events/event_radio_buttons.py b/pype/ftrack/events/event_radio_buttons.py index 7af720d95d..f96d90307d 100644 --- a/pype/ftrack/events/event_radio_buttons.py +++ b/pype/ftrack/events/event_radio_buttons.py @@ -2,9 +2,12 @@ import ftrack_api from pype.ftrack import BaseEvent +ignore_me = True + + class Radio_buttons(BaseEvent): - def launch(self, session, entities, event): + def launch(self, session, event): '''Provides a readio button behaviour to any bolean attribute in radio_button group.''' diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 1699ea5d3c..1deaa3d17e 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -4,7 +4,12 @@ from pype.ftrack import BaseEvent, lib class Sync_to_Avalon(BaseEvent): - def launch(self, session, entities, event): + ignore_entityType = [ + 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer', + 'socialfeed', 'socialnotification', 'timelog' + ] + + def launch(self, session, event): ca_mongoid = lib.get_ca_mongoid() # If mongo_id textfield has changed: RETURN! # - infinite loop @@ -13,6 +18,7 @@ class Sync_to_Avalon(BaseEvent): if ca_mongoid in ent['keys']: return + entities = self._get_entities(session, event, self.ignore_entityType) ft_project = None # get project for entity in entities: @@ -83,23 +89,9 @@ class Sync_to_Avalon(BaseEvent): custom_attributes=custom_attributes ) if 'errors' in result and len(result['errors']) > 0: - items = [] - for error in result['errors']: - for key, message in error.items(): - name = key.lower().replace(' ', '') - info = { - 'label': key, - 'type': 'textarea', - 'name': name, - 'value': message - } - items.append(info) - self.log.error( - '{}: {}'.format(key, message) - ) session.commit() - title = 'Hey You! You raised few Errors! (*look below*)' - self.show_interface(event, items, title) + lib.show_errors(self, event, result['errors']) + return if avalon_project is None: @@ -108,56 +100,21 @@ class Sync_to_Avalon(BaseEvent): except Exception as e: message = str(e) + title = 'Hey You! Unknown Error has been raised! (*look below*)' ftrack_message = ( 'SyncToAvalon event ended with unexpected error' - ' please check log file for more information.' + ' please check log file or contact Administrator' + ' for more information.' ) - items = [{ - 'label': 'Fatal Error', - 'type': 'textarea', - 'name': 'error', - 'value': ftrack_message - }] - title = 'Hey You! Unknown Error has been raised! (*look below*)' + items = [ + {'type': 'label', 'value':'# Fatal Error'}, + {'type': 'label', 'value': '

{}

'.format(ftrack_message)} + ] self.show_interface(event, items, title) - self.log.error(message) + self.log.error('Fatal error during sync: {}'.format(message)) return - def _launch(self, event): - self.session.reset() - - args = self._translate_event( - self.session, event - ) - - self.launch( - self.session, *args - ) - return - - def _translate_event(self, session, event): - exceptions = [ - 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer', - 'socialfeed', 'timelog' - ] - _selection = event['data'].get('entities', []) - - _entities = list() - for entity in _selection: - if entity['entityType'] in exceptions: - continue - _entities.append( - ( - session.get( - self._get_entity_type(entity), - entity.get('entityId') - ) - ) - ) - - return [_entities, event] - def register(session, **kw): '''Register plugin. Called when used as an plugin.''' diff --git a/pype/ftrack/events/event_test.py b/pype/ftrack/events/event_test.py index ecefc628f3..46e16cbb95 100644 --- a/pype/ftrack/events/event_test.py +++ b/pype/ftrack/events/event_test.py @@ -13,7 +13,7 @@ class Test_Event(BaseEvent): priority = 10000 - def launch(self, session, entities, event): + def launch(self, session, event): '''just a testing event''' diff --git a/pype/ftrack/events/event_thumbnail_updates.py b/pype/ftrack/events/event_thumbnail_updates.py index 62a194d167..50089e26b8 100644 --- a/pype/ftrack/events/event_thumbnail_updates.py +++ b/pype/ftrack/events/event_thumbnail_updates.py @@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent class ThumbnailEvents(BaseEvent): - def launch(self, session, entities, event): + def launch(self, session, event): '''just a testing event''' # self.log.info(event) @@ -23,8 +23,12 @@ class ThumbnailEvents(BaseEvent): parent['name'], task['name'])) # Update task thumbnail from published version - if (entity['entityType'] == 'assetversion' and - entity['action'] == 'encoded'): + # if (entity['entityType'] == 'assetversion' and + # entity['action'] == 'encoded'): + if ( + entity['entityType'] == 'assetversion' + and 'thumbid' in entity['keys'] + ): version = session.get('AssetVersion', entity['entityId']) thumbnail = version.get('thumbnail') @@ -40,6 +44,7 @@ class ThumbnailEvents(BaseEvent): pass + def register(session, **kw): '''Register plugin. Called when used as an plugin.''' if not isinstance(session, ftrack_api.session.Session): diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index c5c1d9b664..d1393e622e 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent class VersionToTaskStatus(BaseEvent): - def launch(self, session, entities, event): + def launch(self, session, event): '''Propagates status from version to task when changed''' session.commit() @@ -13,7 +13,7 @@ class VersionToTaskStatus(BaseEvent): # Filter non-assetversions if ( entity['entityType'] == 'assetversion' and - 'statusid' in entity['keys'] + 'statusid' in entity.get('keys', []) ): version = session.get('AssetVersion', entity['entityId']) @@ -45,10 +45,9 @@ class VersionToTaskStatus(BaseEvent): task_status = session.query(query).one() except Exception: self.log.info( - 'During update {}: Status {} was not found'.format( - entity['name'], status_to_set - ) - ) + '!!! status was not found in Ftrack [ {} ]'.format( + status_to_set + )) continue # Proceed if the task status was set diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index 831d33c4d7..14dd3d11f7 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -118,15 +118,18 @@ class FtrackServer(): if len(functions) < 1: raise Exception + function_counter = 0 for function in functions: try: function['register'](self.session) + if function_counter%7 == 0: + time.sleep(0.1) + function_counter += 1 except Exception as e: msg = '"{}" - register was not successful ({})'.format( function['name'], str(e) ) log.warning(msg) - time.sleep(0.05) def run_server(self): self.session = ftrack_api.Session(auto_connect_event_hub=True,) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 851b6f3ed6..030b0b5b6c 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -541,3 +541,26 @@ def get_config_data(): log.warning("{} - {}".format(msg, str(e))) return data + +def show_errors(obj, event, errors): + title = 'Hey You! You raised few Errors! (*look below*)' + items = [] + splitter = {'type': 'label', 'value': '---'} + for error in errors: + for key, message in error.items(): + error_title = { + 'type': 'label', + 'value': '# {}'.format(key) + } + error_message = { + 'type': 'label', + 'value': '

{}

'.format(message) + } + if len(items) > 0: + items.append(splitter) + items.append(error_title) + items.append(error_message) + obj.log.error( + '{}: {}'.format(key, message) + ) + obj.show_interface(event, items, title) diff --git a/pype/ftrack/lib/ftrack_action_handler.py b/pype/ftrack/lib/ftrack_action_handler.py index 2249611a4b..c6d6181c1f 100644 --- a/pype/ftrack/lib/ftrack_action_handler.py +++ b/pype/ftrack/lib/ftrack_action_handler.py @@ -84,14 +84,20 @@ class BaseAction(BaseHandler): def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' if isinstance(result, bool): - result = { - 'success': result, - 'message': ( - '{0} launched successfully.'.format( - self.label + if result is True: + result = { + 'success': result, + 'message': ( + '{0} launched successfully.'.format(self.label) ) - ) - } + } + else: + result = { + 'success': result, + 'message': ( + '{0} launch failed.'.format(self.label) + ) + } elif isinstance(result, dict): if 'items' in result: diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py index 6d07a39f81..7498db5b62 100644 --- a/pype/ftrack/lib/ftrack_app_handler.py +++ b/pype/ftrack/lib/ftrack_app_handler.py @@ -148,6 +148,25 @@ class AppAction(BaseHandler): entity = entities[0] project_name = entity['project']['full_name'] + # Validate Clockify settings if Clockify is required + clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None) + if clockify_timer is not None: + from pype.clockify import ClockifyAPI + clockapi = ClockifyAPI() + if clockapi.verify_api() is False: + title = 'Launch message' + header = '# You Can\'t launch **any Application**' + message = ( + '

You don\'t have set Clockify API' + ' key in Clockify settings

' + ) + items = [ + {'type': 'label', 'value': header}, + {'type': 'label', 'value': message} + ] + self.show_interface(event, items, title) + return False + database = pypelib.get_avalon_database() # Get current environments @@ -188,6 +207,7 @@ class AppAction(BaseHandler): "code": entity['project']['name']}, "task": entity['name'], "asset": entity['parent']['name'], + "app": application["application_dir"], "hierarchy": hierarchy} try: anatomy_filled = anatomy.format(data) @@ -306,6 +326,31 @@ class AppAction(BaseHandler): self.log.info('Starting timer for task: ' + task['name']) user.start_timer(task, force=True) + # RUN TIMER IN Clockify + if clockify_timer is not None: + task_type = task['type']['name'] + project_name = task['project']['full_name'] + + def get_parents(entity): + output = [] + if entity.entity_type.lower() == 'project': + return output + output.extend(get_parents(entity['parent'])) + output.append(entity['name']) + + return output + + desc_items = get_parents(task['parent']) + desc_items.append(task['name']) + description = '/'.join(desc_items) + + project_id = clockapi.get_project_id(project_name) + tag_ids = [] + tag_ids.append(clockapi.get_tag_id(task_type)) + clockapi.start_time_entry( + description, project_id, tag_ids=tag_ids + ) + # Change status of task to In progress config = get_config_data() diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 6d56fcb010..3e08eb122d 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -5,8 +5,10 @@ from pype.vendor import ftrack_api class MissingPermision(Exception): - def __init__(self): - super().__init__('Missing permission') + def __init__(self, message=None): + if message is None: + message = 'Ftrack' + super().__init__(message) class BaseHandler(object): @@ -64,10 +66,14 @@ class BaseHandler(object): self.log.info(( '{} "{}" - Registered successfully ({:.4f}sec)' ).format(self.type, label, run_time)) - except MissingPermision: + except MissingPermision as MPE: self.log.info(( - '!{} "{}" - You\'re missing required permissions' - ).format(self.type, label)) + '!{} "{}" - You\'re missing required {} permissions' + ).format(self.type, label, str(MPE))) + except AssertionError as ae: + self.log.info(( + '!{} "{}" - {}' + ).format(self.type, label, str(ae))) except NotImplementedError: self.log.error(( '{} "{}" - Register method is not implemented' @@ -97,9 +103,12 @@ class BaseHandler(object): self.log.info(('{} "{}": Finished').format(self.type, label)) return result except Exception as e: - self.log.error('{} "{}": Failed ({})'.format( - self.type, label, str(e)) - ) + msg = '{} "{}": Failed ({})'.format(self.type, label, str(e)) + self.log.error(msg) + return { + 'success': False, + 'message': msg + } return wrapper_launch @property @@ -165,22 +174,31 @@ class BaseHandler(object): '''Return *event* translated structure to be used with the API.''' _entities = event['data'].get('entities_object', None) - if _entities is None: - selection = event['data'].get('selection', []) - _entities = [] - for entity in selection: - _entities.append( - self.session.get( - self._get_entity_type(entity), - entity.get('entityId') - ) - ) + if ( + _entities is None or + _entities[0].get('link', None) == ftrack_api.symbol.NOT_SET + ): + _entities = self._get_entities(event) return [ _entities, event ] + def _get_entities(self, event): + self.session._local_cache.clear() + selection = event['data'].get('selection', []) + _entities = [] + for entity in selection: + _entities.append( + self.session.get( + self._get_entity_type(entity), + entity.get('entityId') + ) + ) + event['data']['entities_object'] = _entities + return _entities + def _get_entity_type(self, entity): '''Return translated entity type tht can be used with API.''' # Get entity type and make sure it is lower cased. Most places except @@ -248,7 +266,10 @@ class BaseHandler(object): def _interface(self, *args): interface = self.interface(*args) if interface: - if 'items' in interface: + if ( + 'items' in interface or + ('success' in interface and 'message' in interface) + ): return interface return { @@ -273,23 +294,31 @@ class BaseHandler(object): def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' if isinstance(result, bool): - result = { - 'success': result, - 'message': ( - '{0} launched successfully.'.format( - self.label + if result is True: + result = { + 'success': result, + 'message': ( + '{0} launched successfully.'.format(self.label) ) - ) - } + } + else: + result = { + 'success': result, + 'message': ( + '{0} launch failed.'.format(self.label) + ) + } elif isinstance(result, dict): - for key in ('success', 'message'): - if key in result: - continue + items = 'items' in result + if items is False: + for key in ('success', 'message'): + if key in result: + continue - raise KeyError( - 'Missing required key: {0}.'.format(key) - ) + raise KeyError( + 'Missing required key: {0}.'.format(key) + ) else: self.log.error( diff --git a/pype/ftrack/lib/ftrack_event_handler.py b/pype/ftrack/lib/ftrack_event_handler.py index 2cbc3782b8..c6c91e7428 100644 --- a/pype/ftrack/lib/ftrack_event_handler.py +++ b/pype/ftrack/lib/ftrack_event_handler.py @@ -1,3 +1,4 @@ +import functools from .ftrack_base_handler import BaseHandler @@ -18,6 +19,18 @@ class BaseEvent(BaseHandler): '''Expects a ftrack_api.Session instance''' super().__init__(session) + # Decorator + def launch_log(self, func): + @functools.wraps(func) + def wrapper_launch(*args, **kwargs): + try: + func(*args, **kwargs) + except Exception as e: + self.log.info('{} Failed ({})'.format( + self.__class__.__name__, str(e)) + ) + return wrapper_launch + def register(self): '''Registers the event, subscribing the discover and launch topics.''' self.session.event_hub.subscribe( @@ -27,23 +40,31 @@ class BaseEvent(BaseHandler): ) def _launch(self, event): - args = self._translate_event( - self.session, event - ) + self.session.rollback() + self.session._local_cache.clear() self.launch( - self.session, *args + self.session, event ) return def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' - _selection = event['data'].get('entities', []) + return [ + self._get_entities(session, event), + event + ] + def _get_entities( + self, session, event, ignore=['socialfeed', 'socialnotification'] + ): + _selection = event['data'].get('entities', []) _entities = list() + if isinstance(ignore, str): + ignore = list(ignore) for entity in _selection: - if entity['entityType'] in ['socialfeed']: + if entity['entityType'] in ignore: continue _entities.append( ( @@ -53,8 +74,4 @@ class BaseEvent(BaseHandler): ) ) ) - - return [ - _entities, - event - ] + return _entities diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index adc5f6b14a..f08e52857e 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -2,7 +2,7 @@ import sys from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui -from avalon import api, io +from avalon import api, io, lib import avalon.nuke import pype.api as pype import nuke @@ -37,10 +37,12 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': avalon_knob_data = get_avalon_knob_data(each) - if avalon_knob_data['families'] not in ["render"]: - log.info(avalon_knob_data['families']) - continue + try: + if avalon_knob_data['families'] not in ["render"]: + log.info(avalon_knob_data['families']) + continue + node_file = each['file'].value() log.info("node_file: {}".format(node_file)) @@ -90,6 +92,7 @@ def create_write_node(name, data): ) nuke_dataflow_writes = get_dataflow(**data) nuke_colorspace_writes = get_colorspace(**data) + application = lib.get_application(os.environ["AVALON_APP_NAME"]) try: anatomy_filled = format_anatomy({ "subset": data["avalon"]["subset"], @@ -99,6 +102,7 @@ def create_write_node(name, data): "project": {"name": pype.get_project_name(), "code": pype.get_project_code()}, "representation": nuke_dataflow_writes.file_type, + "app": application["application_dir"], }) except Exception as e: log.error("problem with resolving anatomy tepmlate: {}".format(e)) diff --git a/pype/plugins/global/load/open_djv.py b/pype/plugins/global/load/open_djv.py index 29f8e8ba08..bd49d86d5f 100644 --- a/pype/plugins/global/load/open_djv.py +++ b/pype/plugins/global/load/open_djv.py @@ -81,7 +81,7 @@ class OpenInDJV(api.Loader): '''layer name''' # cmd.append('-file_layer (value)') ''' Proxy scale: 1/2, 1/4, 1/8''' - cmd.append('-file_proxy 1/2') + # cmd.append('-file_proxy 1/2') ''' Cache: True, False.''' cmd.append('-file_cache True') ''' Start in full screen ''' diff --git a/pype/plugins/global/publish/collect_assumed_destination.py b/pype/plugins/global/publish/collect_assumed_destination.py index 058af12340..16a299d524 100644 --- a/pype/plugins/global/publish/collect_assumed_destination.py +++ b/pype/plugins/global/publish/collect_assumed_destination.py @@ -1,5 +1,5 @@ -import pyblish.api import os +import pyblish.api from avalon import io, api @@ -8,7 +8,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): """Generate the assumed destination path where the file will be stored""" label = "Collect Assumed Destination" - order = pyblish.api.CollectorOrder + 0.499 + order = pyblish.api.CollectorOrder + 0.498 exclude_families = ["clip"] def process(self, instance): @@ -76,6 +76,9 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): Returns: file path (str) """ + if [ef for ef in self.exclude_families + if instance.data["family"] in ef]: + return # get all the stuff from the database subset_name = instance.data["subset"] @@ -87,7 +90,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): projection={"config": True, "data": True}) template = project["config"]["template"]["publish"] - # anatomy = instance.context.data['anatomy'] + anatomy = instance.context.data['anatomy'] asset = io.find_one({"type": "asset", "name": asset_name, @@ -129,5 +132,10 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): "hierarchy": hierarchy, "representation": "TEMP"} - instance.data["assumedTemplateData"] = template_data instance.data["template"] = template + instance.data["assumedTemplateData"] = template_data + + # We take the parent folder of representation 'filepath' + instance.data["assumedDestination"] = os.path.dirname( + (anatomy.format(template_data)).publish.path + ) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index a99e6bc787..7720c9d56d 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -16,9 +16,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): """ label = "Extract Jpeg EXR" + hosts = ["shell"] order = pyblish.api.ExtractorOrder families = ["imagesequence", "render", "write", "source"] - host = ["shell"] + def process(self, instance): start = instance.data.get("startFrame") diff --git a/pype/plugins/global/publish/extract_quicktime.py b/pype/plugins/global/publish/extract_quicktime.py index a226bf7e2a..621078e3c0 100644 --- a/pype/plugins/global/publish/extract_quicktime.py +++ b/pype/plugins/global/publish/extract_quicktime.py @@ -18,7 +18,7 @@ class ExtractQuicktimeEXR(pyblish.api.InstancePlugin): label = "Extract Quicktime EXR" order = pyblish.api.ExtractorOrder families = ["imagesequence", "render", "write", "source"] - host = ["shell"] + hosts = ["shell"] def process(self, instance): fps = instance.data.get("fps") diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index be7fc3bcf3..00096a95ee 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -5,6 +5,7 @@ import shutil import errno import pyblish.api from avalon import api, io +from avalon.vendor import filelink log = logging.getLogger(__name__) @@ -91,6 +92,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) + # Ensure at least one file is set up for transfer in staging dir. + files = instance.data.get("files", []) + assert files, "Instance has no files to transfer" + assert isinstance(files, (list, tuple)), ( + "Instance 'files' must be a list, got: {0}".format(files) + ) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", @@ -170,6 +178,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] + if 'transfers' not in instance.data: + instance.data['transfers'] = [] for files in instance.data["files"]: @@ -271,12 +281,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance: the instance to integrate """ - transfers = instance.data["transfers"] + transfers = instance.data.get("transfers", list()) for src, dest in transfers: self.log.info("Copying file .. {} -> {}".format(src, dest)) self.copy_file(src, dest) + # Produce hardlinked copies + # Note: hardlink can only be produced between two files on the same + # server/disk and editing one of the two will edit both files at once. + # As such it is recommended to only make hardlinks between static files + # to ensure publishes remain safe and non-edited. + hardlinks = instance.data.get("hardlinks", list()) + for src, dest in hardlinks: + self.log.info("Hardlinking file .. {} -> {}".format(src, dest)) + self.hardlink_file(src, dest) + def copy_file(self, src, dst): """ Copy given source to destination @@ -299,6 +319,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin): shutil.copy(src, dst) + def hardlink_file(self, src, dst): + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + filelink.create(src, dst, filelink.HARDLINK) + def get_subset(self, asset, instance): subset = io.find_one({"type": "subset", @@ -362,7 +396,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): families.append(instance_family) families += current_families - self.log.debug("Registered roor: {}".format(api.registered_root())) + self.log.debug("Registered root: {}".format(api.registered_root())) # create relative source path for DB try: source = instance.data['source'] @@ -382,7 +416,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "fps": context.data.get("fps")} # Include optional data if present in - optionals = ["startFrame", "endFrame", "step", "handles"] + optionals = [ + "startFrame", "endFrame", "step", "handles", "sourceHashes" + ] for key in optionals: if key in instance.data: version_data[key] = instance.data[key] diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index ae11d33348..8e7e2a59c4 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -168,6 +168,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): representations = [] destination_list = [] + if 'transfers' not in instance.data: + instance.data['transfers'] = [] + for files in instance.data["files"]: # Collection # _______ @@ -240,7 +243,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) - template_data["frame"] = "#####" + template_data["frame"] = "#" * anatomy.render.padding anatomy_filled = anatomy.format(template_data) path_to_save = anatomy_filled.render.path template = anatomy.render.fullpath diff --git a/pype/plugins/launcher/actions/ClockifyStart.py b/pype/plugins/launcher/actions/ClockifyStart.py new file mode 100644 index 0000000000..78a8b4e1b6 --- /dev/null +++ b/pype/plugins/launcher/actions/ClockifyStart.py @@ -0,0 +1,49 @@ +from avalon import api, io +from pype.api import Logger +try: + from pype.clockify import ClockifyAPI +except Exception: + pass + +log = Logger.getLogger(__name__, "clockify_start") + + +class ClockifyStart(api.Action): + + name = "clockify_start_timer" + label = "Clockify - Start Timer" + icon = "clockify_icon" + order = 500 + + exec("try: clockapi = ClockifyAPI()\nexcept: clockapi = None") + + def is_compatible(self, session): + """Return whether the action is compatible with the session""" + if self.clockapi is None: + return False + if "AVALON_TASK" in session: + return True + return False + + def process(self, session, **kwargs): + project_name = session['AVALON_PROJECT'] + asset_name = session['AVALON_ASSET'] + task_name = session['AVALON_TASK'] + + description = asset_name + asset = io.find_one({ + 'type': 'asset', + 'name': asset_name + }) + if asset is not None: + desc_items = asset.get('data', {}).get('parents', []) + desc_items.append(asset_name) + desc_items.append(task_name) + description = '/'.join(desc_items) + + project_id = self.clockapi.get_project_id(project_name) + tag_ids = [] + tag_ids.append(self.clockapi.get_tag_id(task_name)) + self.clockapi.start_time_entry( + description, project_id, tag_ids=tag_ids + ) diff --git a/pype/plugins/launcher/actions/ClockifySync.py b/pype/plugins/launcher/actions/ClockifySync.py new file mode 100644 index 0000000000..c50fbc4b25 --- /dev/null +++ b/pype/plugins/launcher/actions/ClockifySync.py @@ -0,0 +1,65 @@ +from avalon import api, io +try: + from pype.clockify import ClockifyAPI +except Exception: + pass +from pype.api import Logger +log = Logger.getLogger(__name__, "clockify_sync") + + +class ClockifySync(api.Action): + + name = "sync_to_clockify" + label = "Sync to Clockify" + icon = "clockify_white_icon" + order = 500 + exec( + "try:\n\tclockapi = ClockifyAPI()" + "\n\thave_permissions = clockapi.validate_workspace_perm()" + "\nexcept:\n\tclockapi = None" + ) + + def is_compatible(self, session): + """Return whether the action is compatible with the session""" + if self.clockapi is None: + return False + return self.have_permissions + + def process(self, session, **kwargs): + project_name = session.get('AVALON_PROJECT', None) + + projects_to_sync = [] + if project_name.strip() == '' or project_name is None: + for project in io.projects(): + projects_to_sync.append(project) + else: + project = io.find_one({'type': 'project'}) + projects_to_sync.append(project) + + projects_info = {} + for project in projects_to_sync: + task_types = [task['name'] for task in project['config']['tasks']] + projects_info[project['name']] = task_types + + clockify_projects = self.clockapi.get_projects() + for project_name, task_types in projects_info.items(): + if project_name not in clockify_projects: + response = self.clockapi.add_project(project_name) + if 'id' not in response: + self.log.error('Project {} can\'t be created'.format( + project_name + )) + continue + project_id = response['id'] + else: + project_id = clockify_projects[project_name] + + clockify_workspace_tags = self.clockapi.get_tags() + for task_type in task_types: + if task_type not in clockify_workspace_tags: + response = self.clockapi.add_tag(task_type) + if 'id' not in response: + self.log.error('Task {} can\'t be created'.format( + task_type + )) + continue diff --git a/pype/plugins/maya/create/create_look.py b/pype/plugins/maya/create/create_look.py index 32cda3a28e..299fbafe02 100644 --- a/pype/plugins/maya/create/create_look.py +++ b/pype/plugins/maya/create/create_look.py @@ -15,3 +15,6 @@ class CreateLook(avalon.maya.Creator): super(CreateLook, self).__init__(*args, **kwargs) self.data["renderlayer"] = lib.get_current_renderlayer() + + # Whether to automatically convert the textures to .tx upon publish. + self.data["maketx"] = True diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py index 9e08702521..9fd4aa2108 100644 --- a/pype/plugins/maya/load/load_alembic.py +++ b/pype/plugins/maya/load/load_alembic.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class AbcLoader(pype.maya.plugin.ReferenceLoader): @@ -16,6 +18,12 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "animation" + + groupName = "{}:{}".format(namespace, name) cmds.loadPlugin("AbcImport.mll", quiet=True) nodes = cmds.file(self.fname, namespace=namespace, @@ -25,6 +33,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): reference=True, returnNewNodes=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py index 13ad85473c..c268ce70c5 100644 --- a/pype/plugins/maya/load/load_ass.py +++ b/pype/plugins/maya/load/load_ass.py @@ -2,6 +2,7 @@ from avalon import api import pype.maya.plugin import os import pymel.core as pm +import json class AssProxyLoader(pype.maya.plugin.ReferenceLoader): @@ -21,6 +22,11 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "ass" + with maya.maintained_selection(): groupName = "{}:{}".format(namespace, name) @@ -34,7 +40,8 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName=groupName) - cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) # Set attributes proxyShape = pm.ls(nodes, type="mesh")[0] @@ -43,6 +50,19 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): proxyShape.dso.set(path) proxyShape.aiOverrideShaders.set(0) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes @@ -132,7 +152,6 @@ class AssStandinLoader(api.Loader): import mtoa.ui.arnoldmenu import pymel.core as pm - asset = context['asset']['name'] namespace = namespace or lib.unique_namespace( asset + "_", @@ -146,6 +165,20 @@ class AssStandinLoader(api.Loader): label = "{}:{}".format(namespace, name) root = pm.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('ass') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + # Create transform with shape transform_name = label + "_ASS" # transform = pm.createNode("transform", name=transform_name, diff --git a/pype/plugins/maya/load/load_camera.py b/pype/plugins/maya/load/load_camera.py index eb75c3a63d..989e80e979 100644 --- a/pype/plugins/maya/load/load_camera.py +++ b/pype/plugins/maya/load/load_camera.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class CameraLoader(pype.maya.plugin.ReferenceLoader): @@ -16,7 +18,13 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds # Get family type from the context + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "camera" + cmds.loadPlugin("AbcImport.mll", quiet=True) + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, @@ -27,6 +35,20 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader): cameras = cmds.ls(nodes, type="camera") + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + # Check the Maya version, lockTransform has been introduced since # Maya 2016.5 Ext 2 version = int(cmds.about(version=True)) diff --git a/pype/plugins/maya/load/load_fbx.py b/pype/plugins/maya/load/load_fbx.py index 2ee3e5fdbd..b580257334 100644 --- a/pype/plugins/maya/load/load_fbx.py +++ b/pype/plugins/maya/load/load_fbx.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class FBXLoader(pype.maya.plugin.ReferenceLoader): @@ -17,6 +19,11 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "fbx" + # Ensure FBX plug-in is loaded cmds.loadPlugin("fbxmaya", quiet=True) @@ -28,6 +35,21 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName="{}:{}".format(namespace, name)) + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py index 6f4c6a63a0..549d1dff4c 100644 --- a/pype/plugins/maya/load/load_mayaascii.py +++ b/pype/plugins/maya/load/load_mayaascii.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import json +import os class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): @@ -19,6 +21,11 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "model" + with maya.maintained_selection(): nodes = cmds.file(self.fname, namespace=namespace, @@ -28,6 +35,20 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): groupName="{}:{}".format(namespace, name)) self[:] = nodes + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) return nodes diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py index f29af65b72..16f3556de7 100644 --- a/pype/plugins/maya/load/load_model.py +++ b/pype/plugins/maya/load/load_model.py @@ -1,5 +1,7 @@ from avalon import api import pype.maya.plugin +import json +import os class ModelLoader(pype.maya.plugin.ReferenceLoader): @@ -19,13 +21,36 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "model" + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + with maya.maintained_selection(): + + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, reference=True, returnNewNodes=True, groupReference=True, - groupName="{}:{}".format(namespace, name)) + groupName=groupName) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes @@ -64,6 +89,19 @@ class GpuCacheLoader(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('model') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create transform with shape transform_name = label + "_GPU" @@ -125,6 +163,7 @@ class GpuCacheLoader(api.Loader): except RuntimeError: pass + class AbcModelLoader(pype.maya.plugin.ReferenceLoader): """Specific loader of Alembic for the studio.animation family""" @@ -141,15 +180,36 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds + groupName = "{}:{}".format(namespace, name) cmds.loadPlugin("AbcImport.mll", quiet=True) nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, groupReference=True, - groupName="{}:{}".format(namespace, name), + groupName=groupName, reference=True, returnNewNodes=True) + namespace = cmds.referenceQuery(nodes[0], namespace=True) + groupName = "{}:{}".format(namespace, name) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('model') + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_rig.py b/pype/plugins/maya/load/load_rig.py index aa40ca3cc2..1dcff45bb9 100644 --- a/pype/plugins/maya/load/load_rig.py +++ b/pype/plugins/maya/load/load_rig.py @@ -2,6 +2,8 @@ from maya import cmds import pype.maya.plugin from avalon import api, maya +import os +import json class RigLoader(pype.maya.plugin.ReferenceLoader): @@ -21,12 +23,35 @@ class RigLoader(pype.maya.plugin.ReferenceLoader): def process_reference(self, context, name, namespace, data): + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "rig" + + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, reference=True, returnNewNodes=True, groupReference=True, - groupName="{}:{}".format(namespace, name)) + groupName=groupName) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) # Store for post-process self[:] = nodes diff --git a/pype/plugins/maya/load/load_vdb_to_redshift.py b/pype/plugins/maya/load/load_vdb_to_redshift.py index 8ff8bc0326..169c3bf34a 100644 --- a/pype/plugins/maya/load/load_vdb_to_redshift.py +++ b/pype/plugins/maya/load/load_vdb_to_redshift.py @@ -1,4 +1,6 @@ from avalon import api +import os +import json class LoadVDBtoRedShift(api.Loader): @@ -17,6 +19,11 @@ class LoadVDBtoRedShift(api.Loader): import avalon.maya.lib as lib from avalon.maya.pipeline import containerise + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vdbcache" + # Check if the plugin for redshift is available on the pc try: cmds.loadPlugin("redshift4maya", quiet=True) @@ -48,6 +55,19 @@ class LoadVDBtoRedShift(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create VR volume_node = cmds.createNode("RedshiftVolumeShape", diff --git a/pype/plugins/maya/load/load_vdb_to_vray.py b/pype/plugins/maya/load/load_vdb_to_vray.py index ac20b0eb43..58d6d1b56e 100644 --- a/pype/plugins/maya/load/load_vdb_to_vray.py +++ b/pype/plugins/maya/load/load_vdb_to_vray.py @@ -1,4 +1,6 @@ from avalon import api +import json +import os class LoadVDBtoVRay(api.Loader): @@ -16,6 +18,11 @@ class LoadVDBtoVRay(api.Loader): import avalon.maya.lib as lib from avalon.maya.pipeline import containerise + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vdbcache" + # Check if viewport drawing engine is Open GL Core (compat) render_engine = None compatible = "OpenGLCoreProfileCompat" @@ -40,6 +47,19 @@ class LoadVDBtoVRay(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create VR grid_node = cmds.createNode("VRayVolumeGrid", diff --git a/pype/plugins/maya/load/load_vrayproxy.py b/pype/plugins/maya/load/load_vrayproxy.py index 9396e124ce..a3a114440a 100644 --- a/pype/plugins/maya/load/load_vrayproxy.py +++ b/pype/plugins/maya/load/load_vrayproxy.py @@ -1,6 +1,7 @@ from avalon.maya import lib from avalon import api - +import json +import os import maya.cmds as cmds @@ -20,6 +21,19 @@ class VRayProxyLoader(api.Loader): from avalon.maya.pipeline import containerise from pype.maya.lib import namespaced + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vrayproxy" + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + asset_name = context['asset']["name"] namespace = namespace or lib.unique_namespace( asset_name + "_", @@ -40,6 +54,12 @@ class VRayProxyLoader(api.Loader): if not nodes: return + c = colors.get(family) + if c is not None: + cmds.setAttr("{0}_{1}.useOutlinerColor".format(name, "GRP"), 1) + cmds.setAttr("{0}_{1}.outlinerColor".format(name, "GRP"), + c[0], c[1], c[2]) + return containerise( name=name, namespace=namespace, @@ -101,7 +121,7 @@ class VRayProxyLoader(api.Loader): # Create nodes vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name)) mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name)) - vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True, + vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True, name="{}_VRMM".format(name)) vray_mat_sg = cmds.sets(name="{}_VRSG".format(name), empty=True, diff --git a/pype/plugins/maya/load/load_yeti_cache.py b/pype/plugins/maya/load/load_yeti_cache.py index 2160924047..b19bed1393 100644 --- a/pype/plugins/maya/load/load_yeti_cache.py +++ b/pype/plugins/maya/load/load_yeti_cache.py @@ -23,6 +23,11 @@ class YetiCacheLoader(api.Loader): def load(self, context, name=None, namespace=None, data=None): + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "yeticache" + # Build namespace asset = context["asset"] if namespace is None: @@ -49,6 +54,19 @@ class YetiCacheLoader(api.Loader): group_name = "{}:{}".format(namespace, name) group_node = cmds.group(nodes, name=group_name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(group_name + ".useOutlinerColor", 1) + cmds.setAttr(group_name + ".outlinerColor", + c[0], c[1], c[2]) nodes.append(group_node) diff --git a/pype/plugins/maya/load/load_yeti_rig.py b/pype/plugins/maya/load/load_yeti_rig.py index 096b936b41..c821c6ca02 100644 --- a/pype/plugins/maya/load/load_yeti_rig.py +++ b/pype/plugins/maya/load/load_yeti_rig.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class YetiRigLoader(pype.maya.plugin.ReferenceLoader): @@ -24,6 +26,20 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName="{}:{}".format(namespace, name)) + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('yetiRig') + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes self.log.info("Yeti Rig Connection Manager will be available soon") diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 9c7b17acaa..dfefa15fe5 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -47,6 +47,8 @@ def get_look_attrs(node): for attr in attrs: if attr in SHAPE_ATTRS: result.append(attr) + elif attr.startswith('ai'): + result.append(attr) return result @@ -218,6 +220,7 @@ class CollectLook(pyblish.api.InstancePlugin): # make ftrack publishable instance.data["families"] = ['ftrack'] + instance.data['maketx'] = True def collect(self, instance): @@ -386,6 +389,8 @@ class CollectLook(pyblish.api.InstancePlugin): # Collect changes to "custom" attributes node_attrs = get_look_attrs(node) + self.log.info(node_attrs) + # Only include if there are any properties we care about if not node_attrs: continue diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index a30b1fe7d5..f6fdda8593 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -2,16 +2,97 @@ import os import json import tempfile import contextlib +import subprocess from collections import OrderedDict from maya import cmds import pyblish.api import avalon.maya +from avalon import io import pype.api import pype.maya.lib as lib +# Modes for transfer +COPY = 1 +HARDLINK = 2 + + +def source_hash(filepath, *args): + """Generate simple identifier for a source file. + This is used to identify whether a source file has previously been + processe into the pipeline, e.g. a texture. + The hash is based on source filepath, modification time and file size. + This is only used to identify whether a specific source file was already + published before from the same location with the same modification date. + We opt to do it this way as opposed to Avalanch C4 hash as this is much + faster and predictable enough for all our production use cases. + Args: + filepath (str): The source file path. + You can specify additional arguments in the function + to allow for specific 'processing' values to be included. + """ + # We replace dots with comma because . cannot be a key in a pymongo dict. + file_name = os.path.basename(filepath) + time = str(os.path.getmtime(filepath)) + size = str(os.path.getsize(filepath)) + return "|".join([ + file_name, + time, + size + ] + list(args)).replace(".", ",") + + +def find_paths_by_hash(texture_hash): + # Find the texture hash key in the dictionary and all paths that + # originate from it. + key = "data.sourceHashes.{0}".format(texture_hash) + return io.distinct(key, {"type": "version"}) + + +def maketx(source, destination, *args): + """Make .tx using maketx with some default settings. + The settings are based on default as used in Arnold's + txManager in the scene. + This function requires the `maketx` executable to be + on the `PATH`. + Args: + source (str): Path to source file. + destination (str): Writing destination path. + """ + + cmd = [ + "maketx", + "-v", # verbose + "-u", # update mode + # unpremultiply before conversion (recommended when alpha present) + "--unpremult", + # use oiio-optimized settings for tile-size, planarconfig, metadata + "--oiio" + ] + cmd.extend(args) + cmd.extend([ + "-o", destination, + source + ]) + + CREATE_NO_WINDOW = 0x08000000 + try: + out = subprocess.check_output( + cmd, + stderr=subprocess.STDOUT, + creationflags=CREATE_NO_WINDOW + ) + except subprocess.CalledProcessError as exc: + print exc + print out + import traceback + traceback.print_exc() + raise + + return out + @contextlib.contextmanager def no_workspace_dir(): @@ -79,12 +160,53 @@ class ExtractLook(pype.api.Extractor): relationships = lookdata["relationships"] sets = relationships.keys() + # Extract the textures to transfer, possibly convert with maketx and + # remap the node paths to the destination path. Note that a source + # might be included more than once amongst the resources as they could + # be the input file to multiple nodes. resources = instance.data["resources"] + do_maketx = instance.data.get("maketx", False) + # Collect all unique files used in the resources + files = set() + for resource in resources: + files.update(os.path.normpath(f) for f in resource["files"]) + + # Process the resource files + transfers = list() + hardlinks = list() + hashes = dict() + for filepath in files: + source, mode, hash = self._process_texture( + filepath, do_maketx, staging=dir_path + ) + destination = self.resource_destination( + instance, source, do_maketx + ) + if mode == COPY: + transfers.append((source, destination)) + elif mode == HARDLINK: + hardlinks.append((source, destination)) + + # Store the hashes from hash to destination to include in the + # database + hashes[hash] = destination + + # Remap the resources to the destination path (change node attributes) + destinations = dict() remap = OrderedDict() # needs to be ordered, see color space values for resource in resources: + source = os.path.normpath(resource["source"]) + if source not in destinations: + # Cache destination as source resource might be included + # multiple times + destinations[source] = self.resource_destination( + instance, source, do_maketx + ) + + # Remap file node filename to destination attr = resource['attribute'] - remap[attr] = resource['destination'] + remap[attr] = destinations[source] # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to @@ -107,15 +229,17 @@ class ExtractLook(pype.api.Extractor): with lib.attribute_values(remap): with avalon.maya.maintained_selection(): cmds.select(sets, noExpand=True) - cmds.file(maya_path, - force=True, - typ="mayaAscii", - exportSelected=True, - preserveReferences=False, - channels=True, - constraints=True, - expressions=True, - constructionHistory=True) + cmds.file( + maya_path, + force=True, + typ="mayaAscii", + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True + ) # Write the JSON data self.log.info("Extract json..") @@ -127,9 +251,90 @@ class ExtractLook(pype.api.Extractor): if "files" not in instance.data: instance.data["files"] = list() + if "hardlinks" not in instance.data: + instance.data["hardlinks"] = list() + if "transfers" not in instance.data: + instance.data["transfers"] = list() instance.data["files"].append(maya_fname) instance.data["files"].append(json_fname) - self.log.info("Extracted instance '%s' to: %s" % (instance.name, - maya_path)) + # Set up the resources transfers/links for the integrator + instance.data["transfers"].extend(transfers) + instance.data["hardlinks"].extend(hardlinks) + + # Source hash for the textures + instance.data["sourceHashes"] = hashes + + self.log.info("Extracted instance '%s' to: %s" % ( + instance.name, maya_path) + ) + + def resource_destination(self, instance, filepath, do_maketx): + + # Compute destination location + basename, ext = os.path.splitext(os.path.basename(filepath)) + + # If maketx then the texture will always end with .tx + if do_maketx: + ext = ".tx" + + return os.path.join( + instance.data["assumedDestination"], + "resources", + basename + ext + ) + + def _process_texture(self, filepath, do_maketx, staging): + """Process a single texture file on disk for publishing. + This will: + 1. Check whether it's already published, if so it will do hardlink + 2. If not published and maketx is enabled, generate a new .tx file. + 3. Compute the destination path for the source file. + Args: + filepath (str): The source file path to process. + do_maketx (bool): Whether to produce a .tx file + Returns: + """ + + fname, ext = os.path.splitext(os.path.basename(filepath)) + + args = [] + if do_maketx: + args.append("maketx") + texture_hash = source_hash(filepath, *args) + + # If source has been published before with the same settings, + # then don't reprocess but hardlink from the original + existing = find_paths_by_hash(texture_hash) + if existing: + self.log.info("Found hash in database, preparing hardlink..") + source = next((p for p in existing if os.path.exists(p)), None) + if filepath: + return source, HARDLINK, texture_hash + else: + self.log.warning( + "Paths not found on disk, " + "skipping hardlink: %s" % (existing,) + ) + + if do_maketx and ext != ".tx": + # Produce .tx file in staging if source file is not .tx + converted = os.path.join( + staging, + "resources", + fname + ".tx" + ) + + # Ensure folder exists + if not os.path.exists(os.path.dirname(converted)): + os.makedirs(os.path.dirname(converted)) + + self.log.info("Generating .tx file for %s .." % filepath) + maketx(filepath, converted, + # Include `source-hash` as string metadata + "-sattrib", "sourceHash", texture_hash) + + return converted, COPY, texture_hash + + return filepath, COPY, texture_hash diff --git a/pype/plugins/maya/publish/increment_current_file_deadline.py b/pype/plugins/maya/publish/increment_current_file_deadline.py index 527f3d781d..6f644adacb 100644 --- a/pype/plugins/maya/publish/increment_current_file_deadline.py +++ b/pype/plugins/maya/publish/increment_current_file_deadline.py @@ -31,10 +31,11 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): current_filepath = context.data["currentFile"] new_filepath = version_up(current_filepath) - # Ensure the suffix is .ma because we're saving to `mayaAscii` type - if not new_filepath.endswith(".ma"): - self.log.warning("Refactoring scene to .ma extension") - new_filepath = os.path.splitext(new_filepath)[0] + ".ma" + # # Ensure the suffix is .ma because we're saving to `mayaAscii` type + if new_filepath.endswith(".ma"): + fileType = "mayaAscii" + elif new_filepath.endswith(".mb"): + fileType = "mayaBinary" cmds.file(rename=new_filepath) - cmds.file(save=True, force=True, type="mayaAscii") + cmds.file(save=True, force=True, type=fileType) diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 56e4b1ea32..0a97a9b98f 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -238,6 +238,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # todo: This is a temporary fix for yeti variables "PEREGRINEL_LICENSE", + "SOLIDANGLE_LICENSE", "ARNOLD_LICENSE" "MAYA_MODULE_PATH", "TOOL_ENV" diff --git a/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py new file mode 100644 index 0000000000..3aae97b8fd --- /dev/null +++ b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py @@ -0,0 +1,277 @@ +from maya import cmds + +import pyblish.api +import pype.api +import pype.maya.action +import math +import maya.api.OpenMaya as om +from pymel.core import polyUVSet + + +class GetOverlappingUVs(object): + + def _createBoundingCircle(self, meshfn): + """ Represent a face by center and radius + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :returns: (center, radius) + :rtype: tuple + """ + center = [] + radius = [] + for i in xrange(meshfn.numPolygons): # noqa: F821 + # get uvs from face + uarray = [] + varray = [] + for j in range(len(meshfn.getPolygonVertices(i))): + uv = meshfn.getPolygonUV(i, j) + uarray.append(uv[0]) + varray.append(uv[1]) + + # loop through all vertices to construct edges/rays + cu = 0.0 + cv = 0.0 + for j in range(len(uarray)): + cu += uarray[j] + cv += varray[j] + + cu /= len(uarray) + cv /= len(varray) + rsqr = 0.0 + for j in range(len(varray)): + du = uarray[j] - cu + dv = varray[j] - cv + dsqr = du * du + dv * dv + rsqr = dsqr if dsqr > rsqr else rsqr + + center.append(cu) + center.append(cv) + radius.append(math.sqrt(rsqr)) + + return center, radius + + def _createRayGivenFace(self, meshfn, faceId): + """ Represent a face by a series of edges(rays), i.e. + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :param faceId: face id + :type faceId: int + :returns: False if no valid uv's. + ""(True, orig, vec)"" or ""(False, None, None)"" + :rtype: tuple + + .. code-block:: python + + orig = [orig1u, orig1v, orig2u, orig2v, ... ] + vec = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + orig = [] + vec = [] + # get uvs + uarray = [] + varray = [] + for i in range(len(meshfn.getPolygonVertices(faceId))): + uv = meshfn.getPolygonUV(faceId, i) + uarray.append(uv[0]) + varray.append(uv[1]) + + if len(uarray) == 0 or len(varray) == 0: + return (False, None, None) + + # loop throught all vertices to construct edges/rays + u = uarray[-1] + v = varray[-1] + for i in xrange(len(uarray)): # noqa: F821 + orig.append(uarray[i]) + orig.append(varray[i]) + vec.append(u - uarray[i]) + vec.append(v - varray[i]) + u = uarray[i] + v = varray[i] + + return (True, orig, vec) + + def _checkCrossingEdges(self, + face1Orig, + face1Vec, + face2Orig, + face2Vec): + """ Check if there are crossing edges between two faces. + Return True if there are crossing edges and False otherwise. + + :param face1Orig: origin of face 1 + :type face1Orig: tuple + :param face1Vec: face 1 edges + :type face1Vec: list + :param face2Orig: origin of face 2 + :type face2Orig: tuple + :param face2Vec: face 2 edges + :type face2Vec: list + + A face is represented by a series of edges(rays), i.e. + .. code-block:: python + + faceOrig[] = [orig1u, orig1v, orig2u, orig2v, ... ] + faceVec[] = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + face1Size = len(face1Orig) + face2Size = len(face2Orig) + for i in xrange(0, face1Size, 2): # noqa: F821 + o1x = face1Orig[i] + o1y = face1Orig[i+1] + v1x = face1Vec[i] + v1y = face1Vec[i+1] + n1x = v1y + n1y = -v1x + for j in xrange(0, face2Size, 2): # noqa: F821 + # Given ray1(O1, V1) and ray2(O2, V2) + # Normal of ray1 is (V1.y, V1.x) + o2x = face2Orig[j] + o2y = face2Orig[j+1] + v2x = face2Vec[j] + v2y = face2Vec[j+1] + n2x = v2y + n2y = -v2x + + # Find t for ray2 + # t = [(o1x-o2x)n1x + (o1y-o2y)n1y] / + # (v2x * n1x + v2y * n1y) + denum = v2x * n1x + v2y * n1y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t2 = ((o1x-o2x) * n1x + (o1y-o2y) * n1y) / denum + if (t2 < 0.00001 or t2 > 0.99999): + continue + + # Find t for ray1 + # t = [(o2x-o1x)n2x + # + (o2y-o1y)n2y] / (v1x * n2x + v1y * n2y) + denum = v1x * n2x + v1y * n2y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t1 = ((o2x-o1x) * n2x + (o2y-o1y) * n2y) / denum + + # Edges intersect + if (t1 > 0.00001 and t1 < 0.99999): + return 1 + + return 0 + + def _getOverlapUVFaces(self, meshName): + """ Return overlapping faces + + :param meshName: name of mesh + :type meshName: str + :returns: list of overlapping faces + :rtype: list + """ + faces = [] + # find polygon mesh node + selList = om.MSelectionList() + selList.add(meshName) + mesh = selList.getDependNode(0) + if mesh.apiType() == om.MFn.kTransform: + dagPath = selList.getDagPath(0) + dagFn = om.MFnDagNode(dagPath) + child = dagFn.child(0) + if child.apiType() != om.MFn.kMesh: + raise Exception("Can't find polygon mesh") + mesh = child + meshfn = om.MFnMesh(mesh) + + center, radius = self._createBoundingCircle(meshfn) + for i in xrange(meshfn.numPolygons): # noqa: F821 + rayb1, face1Orig, face1Vec = self._createRayGivenFace( + meshfn, i) + if not rayb1: + continue + cui = center[2*i] + cvi = center[2*i+1] + ri = radius[i] + # Exclude the degenerate face + # if(area(face1Orig) < 0.000001) continue; + # Loop through face j where j != i + for j in range(i+1, meshfn.numPolygons): + cuj = center[2*j] + cvj = center[2*j+1] + rj = radius[j] + du = cuj - cui + dv = cvj - cvi + dsqr = du * du + dv * dv + # Quick rejection if bounding circles don't overlap + if (dsqr >= (ri + rj) * (ri + rj)): + continue + + rayb2, face2Orig, face2Vec = self._createRayGivenFace( + meshfn, j) + if not rayb2: + continue + # Exclude the degenerate face + # if(area(face2Orig) < 0.000001): continue; + if self._checkCrossingEdges(face1Orig, + face1Vec, + face2Orig, + face2Vec): + face1 = '%s.f[%d]' % (meshfn.name(), i) + face2 = '%s.f[%d]' % (meshfn.name(), j) + if face1 not in faces: + faces.append(face1) + if face2 not in faces: + faces.append(face2) + return faces + + +class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin): + """ Validate the current mesh overlapping UVs. + + It validates whether the current UVs are overlapping or not. + It is optional to warn publisher about it. + """ + + order = pype.api.ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + category = 'geometry' + label = 'Mesh Has Overlapping UVs' + actions = [pype.maya.action.SelectInvalidAction] + optional = True + + @classmethod + def _has_overlapping_uvs(cls, node): + """ Check if mesh has overlapping UVs. + + :param node: node to check + :type node: str + :returns: True is has overlapping UVs, False otherwise + :rtype: bool + """ + ovl = GetOverlappingUVs() + + for i, uv in enumerate(polyUVSet(node, q=1, auv=1)): + polyUVSet(node, cuv=1, uvSet=uv) + of = ovl._getOverlapUVFaces(str(node)) + if of != []: + return True + return False + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + for node in cmds.ls(instance, type='mesh'): + if cls._has_overlapping_uvs(node): + invalid.append(node) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Meshes found with overlapping " + "UVs: {0}".format(invalid)) + pass diff --git a/pype/plugins/maya/publish/validate_resources.py b/pype/plugins/maya/publish/validate_resources.py index bc10d3003c..47a94e7529 100644 --- a/pype/plugins/maya/publish/validate_resources.py +++ b/pype/plugins/maya/publish/validate_resources.py @@ -1,8 +1,9 @@ +import os +from collections import defaultdict + import pyblish.api import pype.api -import os - class ValidateResources(pyblish.api.InstancePlugin): """Validates mapped resources. @@ -12,18 +13,45 @@ class ValidateResources(pyblish.api.InstancePlugin): media. This validates: - - The resources are existing files. - - The resources have correctly collected the data. + - The resources have unique filenames (without extension) """ order = pype.api.ValidateContentsOrder - label = "Resources" + label = "Resources Unique" def process(self, instance): - for resource in instance.data.get('resources', []): - # Required data - assert "source" in resource, "No source found" - assert "files" in resource, "No files from source" - assert all(os.path.exists(f) for f in resource['files']) + resources = instance.data.get("resources", []) + if not resources: + self.log.debug("No resources to validate..") + return + + basenames = defaultdict(set) + + for resource in resources: + files = resource.get("files", []) + for filename in files: + + # Use normalized paths in comparison and ignore case + # sensitivity + filename = os.path.normpath(filename).lower() + + basename = os.path.splitext(os.path.basename(filename))[0] + basenames[basename].add(filename) + + invalid_resources = list() + for basename, sources in basenames.items(): + if len(sources) > 1: + invalid_resources.extend(sources) + + self.log.error( + "Non-unique resource name: {0}" + "{0} (sources: {1})".format( + basename, + list(sources) + ) + ) + + if invalid_resources: + raise RuntimeError("Invalid resources in instance.") diff --git a/pype/plugins/maya/publish/validate_transfers.py b/pype/plugins/maya/publish/validate_transfers.py deleted file mode 100644 index 3234b2240e..0000000000 --- a/pype/plugins/maya/publish/validate_transfers.py +++ /dev/null @@ -1,45 +0,0 @@ -import pyblish.api -import pype.api -import os - -from collections import defaultdict - - -class ValidateTransfers(pyblish.api.InstancePlugin): - """Validates mapped resources. - - This validates: - - The resources all transfer to a unique destination. - - """ - - order = pype.api.ValidateContentsOrder - label = "Transfers" - - def process(self, instance): - - transfers = instance.data.get("transfers", []) - if not transfers: - return - - # Collect all destination with its sources - collected = defaultdict(set) - for source, destination in transfers: - - # Use normalized paths in comparison and ignore case sensitivity - source = os.path.normpath(source).lower() - destination = os.path.normpath(destination).lower() - - collected[destination].add(source) - - invalid_destinations = list() - for destination, sources in collected.items(): - if len(sources) > 1: - invalid_destinations.append(destination) - - self.log.error("Non-unique file transfer for resources: " - "{0} (sources: {1})".format(destination, - list(sources))) - - if invalid_destinations: - raise RuntimeError("Invalid transfers in queue.") diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index c82d697541..45cd6e616e 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -101,7 +101,7 @@ class LoadSequence(api.Loader): if namespace is None: namespace = context['asset']['name'] - file = self.fname + file = self.fname.replace("\\", "/") log.info("file: {}\n".format(self.fname)) read_name = "Read_" + context["representation"]["context"]["subset"] @@ -112,7 +112,7 @@ class LoadSequence(api.Loader): r = nuke.createNode( "Read", "name {}".format(read_name)) - r["file"].setValue(self.fname) + r["file"].setValue(file) # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) diff --git a/pype/templates.py b/pype/templates.py index 58ae54f466..071426859a 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -1,18 +1,20 @@ import os import re -from avalon import io -from avalon import api as avalon +import sys +from avalon import io, api as avalon, lib as avalonlib from . import lib # from pypeapp.api import (Templates, Logger, format) from pypeapp import Logger log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config")) -SESSION = None + +self = sys.modules[__name__] +self.SESSION = None def set_session(): lib.set_io_database() - SESSION = avalon.session + self.SESSION = avalon.session def load_data_from_templates(): @@ -104,9 +106,9 @@ def set_project_code(code): os.environ[KEY]: project code avalon.sesion[KEY]: project code """ - if SESSION is None: + if self.SESSION is None: set_session() - SESSION["AVALON_PROJECTCODE"] = code + self.SESSION["AVALON_PROJECTCODE"] = code os.environ["AVALON_PROJECTCODE"] = code @@ -118,9 +120,9 @@ def get_project_name(): string: project name """ - if SESSION is None: + if self.SESSION is None: set_session() - project_name = SESSION.get("AVALON_PROJECT", None) \ + project_name = self.SESSION.get("AVALON_PROJECT", None) \ or os.getenv("AVALON_PROJECT", None) assert project_name, log.error("missing `AVALON_PROJECT`" "in avalon session " @@ -138,9 +140,9 @@ def get_asset(): Raises: log: error """ - if SESSION is None: + if self.SESSION is None: set_session() - asset = SESSION.get("AVALON_ASSET", None) \ + asset = self.SESSION.get("AVALON_ASSET", None) \ or os.getenv("AVALON_ASSET", None) log.info("asset: {}".format(asset)) assert asset, log.error("missing `AVALON_ASSET`" @@ -159,9 +161,9 @@ def get_task(): Raises: log: error """ - if SESSION is None: + if self.SESSION is None: set_session() - task = SESSION.get("AVALON_TASK", None) \ + task = self.SESSION.get("AVALON_TASK", None) \ or os.getenv("AVALON_TASK", None) assert task, log.error("missing `AVALON_TASK`" "in avalon session " @@ -196,9 +198,9 @@ def set_hierarchy(hierarchy): Args: hierarchy (string): hierarchy path ("silo/folder/seq") """ - if SESSION is None: + if self.SESSION is None: set_session() - SESSION["AVALON_HIERARCHY"] = hierarchy + self.SESSION["AVALON_HIERARCHY"] = hierarchy os.environ["AVALON_HIERARCHY"] = hierarchy @@ -219,13 +221,14 @@ def get_context_data(project=None, dict: contextual data """ - + application = avalonlib.get_application(os.environ["AVALON_APP_NAME"]) data = { "task": task or get_task(), "asset": asset or get_asset(), "project": {"name": project or get_project_name(), "code": get_project_code()}, "hierarchy": hierarchy or get_hierarchy(), + "app": application["application_dir"] } return data @@ -248,10 +251,10 @@ def set_avalon_workdir(project=None, avalon.session[AVALON_WORKDIR]: workdir path """ - if SESSION is None: + if self.SESSION is None: set_session() - awd = SESSION.get("AVALON_WORKDIR", None) \ - or os.getenv("AVALON_WORKDIR", None) + + awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None) data = get_context_data(project, hierarchy, asset, task) if (not awd) or ("{" not in awd): @@ -259,7 +262,7 @@ def set_avalon_workdir(project=None, awd_filled = os.path.normpath(format(awd, data)) - SESSION["AVALON_WORKDIR"] = awd_filled + self.SESSION["AVALON_WORKDIR"] = awd_filled os.environ["AVALON_WORKDIR"] = awd_filled log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))