mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into feature/PYPE-202_preactions
This commit is contained in:
commit
a9ec09de43
121 changed files with 5324 additions and 1059 deletions
|
|
@ -1,9 +1,12 @@
|
|||
The base studio *config* for [Avalon](https://getavalon.github.io/)
|
||||
|
||||
<br>
|
||||
Currently this config is dependent on our customised avalon instalation so it won't work with vanilla avalon core. We're working on open sourcing all of the necessary code though. You can still get inspiration or take our individual validators and scripts which should work just fine in other pipelines.
|
||||
|
||||
|
||||
_This configuration acts as a starting point for all pype club clients wth avalon deployment._
|
||||
|
||||
|
||||
|
||||
### Code convention
|
||||
|
||||
Below are some of the standard practices applied to this repositories.
|
||||
|
|
|
|||
9
pype/clockify/__init__.py
Normal file
9
pype/clockify/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
from .clockify_api import ClockifyAPI
|
||||
from .widget_settings import ClockifySettings
|
||||
from .clockify import ClockifyModule
|
||||
|
||||
__all__ = [
|
||||
'ClockifyAPI',
|
||||
'ClockifySettings',
|
||||
'ClockifyModule'
|
||||
]
|
||||
97
pype/clockify/clockify.py
Normal file
97
pype/clockify/clockify.py
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
import threading
|
||||
from app import style
|
||||
from app.vendor.Qt import QtWidgets
|
||||
from pype.clockify import ClockifySettings, ClockifyAPI
|
||||
|
||||
|
||||
class ClockifyModule:
|
||||
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.clockapi = ClockifyAPI()
|
||||
self.widget_settings = ClockifySettings(main_parent, self)
|
||||
self.widget_settings_required = None
|
||||
|
||||
self.thread_timer_check = None
|
||||
# Bools
|
||||
self.bool_thread_check_running = False
|
||||
self.bool_api_key_set = False
|
||||
self.bool_workspace_set = False
|
||||
self.bool_timer_run = False
|
||||
|
||||
def start_up(self):
|
||||
self.clockapi.set_master(self)
|
||||
self.bool_api_key_set = self.clockapi.set_api()
|
||||
if self.bool_api_key_set is False:
|
||||
self.show_settings()
|
||||
return
|
||||
|
||||
self.bool_workspace_set = self.clockapi.workspace_id is not None
|
||||
if self.bool_workspace_set is False:
|
||||
return
|
||||
|
||||
self.start_timer_check()
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
def start_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is None:
|
||||
self.thread_timer_check = threading.Thread(
|
||||
target=self.check_running
|
||||
)
|
||||
self.thread_timer_check.daemon = True
|
||||
self.thread_timer_check.start()
|
||||
|
||||
def stop_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is not None:
|
||||
self.thread_timer_check.join()
|
||||
self.thread_timer_check = None
|
||||
|
||||
def check_running(self):
|
||||
import time
|
||||
while self.bool_thread_check_running is True:
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
self.bool_timer_run = True
|
||||
else:
|
||||
self.bool_timer_run = False
|
||||
self.set_menu_visibility()
|
||||
time.sleep(5)
|
||||
|
||||
def stop_timer(self):
|
||||
self.clockapi.finish_time_entry()
|
||||
self.bool_timer_run = False
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Actions
|
||||
self.aShowSettings = QtWidgets.QAction(
|
||||
"Settings", self.menu
|
||||
)
|
||||
self.aStopTimer = QtWidgets.QAction(
|
||||
"Stop timer", self.menu
|
||||
)
|
||||
|
||||
self.menu.addAction(self.aShowSettings)
|
||||
self.menu.addAction(self.aStopTimer)
|
||||
|
||||
self.aShowSettings.triggered.connect(self.show_settings)
|
||||
self.aStopTimer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
return self.menu
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.aStopTimer.setVisible(self.bool_timer_run)
|
||||
434
pype/clockify/clockify_api.py
Normal file
434
pype/clockify/clockify_api.py
Normal file
|
|
@ -0,0 +1,434 @@
|
|||
import os
|
||||
import requests
|
||||
import json
|
||||
import datetime
|
||||
import appdirs
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(
|
||||
Singleton, cls
|
||||
).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class ClockifyAPI(metaclass=Singleton):
|
||||
endpoint = "https://api.clockify.me/api/"
|
||||
headers = {"X-Api-Key": None}
|
||||
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
|
||||
file_name = 'clockify.json'
|
||||
fpath = os.path.join(app_dir, file_name)
|
||||
master_parent = None
|
||||
workspace_id = None
|
||||
|
||||
def set_master(self, master_parent):
|
||||
self.master_parent = master_parent
|
||||
|
||||
def verify_api(self):
|
||||
for key, value in self.headers.items():
|
||||
if value is None or value.strip() == '':
|
||||
return False
|
||||
return True
|
||||
|
||||
def set_api(self, api_key=None):
|
||||
if api_key is None:
|
||||
api_key = self.get_api_key()
|
||||
|
||||
if api_key is not None and self.validate_api_key(api_key) is True:
|
||||
self.headers["X-Api-Key"] = api_key
|
||||
self.set_workspace()
|
||||
return True
|
||||
return False
|
||||
|
||||
def validate_api_key(self, api_key):
|
||||
test_headers = {'X-Api-Key': api_key}
|
||||
action_url = 'workspaces/'
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=test_headers
|
||||
)
|
||||
if response.status_code != 200:
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_workspace_perm(self):
|
||||
test_project = '__test__'
|
||||
action_url = 'workspaces/{}/projects/'.format(self.workspace_id)
|
||||
body = {
|
||||
"name": test_project, "clientId": "", "isPublic": "false",
|
||||
"estimate": {"type": "AUTO"},
|
||||
"color": "#f44336", "billable": "true"
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers, json=body
|
||||
)
|
||||
if response.status_code == 201:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
else:
|
||||
projects = self.get_projects()
|
||||
if test_project in projects:
|
||||
try:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
except json.decoder.JSONDecodeError:
|
||||
return False
|
||||
return False
|
||||
|
||||
def set_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
self.workspace = name
|
||||
self.workspace_id = None
|
||||
if self.workspace is None:
|
||||
return
|
||||
try:
|
||||
result = self.validate_workspace()
|
||||
except Exception:
|
||||
result = False
|
||||
if result is not False:
|
||||
self.workspace_id = result
|
||||
if self.master_parent is not None:
|
||||
self.master_parent.start_timer_check()
|
||||
return True
|
||||
return False
|
||||
|
||||
def validate_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = self.workspace
|
||||
all_workspaces = self.get_workspaces()
|
||||
if name in all_workspaces:
|
||||
return all_workspaces[name]
|
||||
return False
|
||||
|
||||
def get_api_key(self):
|
||||
api_key = None
|
||||
try:
|
||||
file = open(self.fpath, 'r')
|
||||
api_key = json.load(file).get('api_key', None)
|
||||
if api_key == '':
|
||||
api_key = None
|
||||
except Exception:
|
||||
file = open(self.fpath, 'w')
|
||||
file.close()
|
||||
return api_key
|
||||
|
||||
def save_api_key(self, api_key):
|
||||
data = {'api_key': api_key}
|
||||
file = open(self.fpath, 'w')
|
||||
file.write(json.dumps(data))
|
||||
file.close()
|
||||
|
||||
def get_workspaces(self):
|
||||
action_url = 'workspaces/'
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
return {
|
||||
workspace["name"]: workspace["id"] for workspace in response.json()
|
||||
}
|
||||
|
||||
def get_projects(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/'.format(workspace_id)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
project["name"]: project["id"] for project in response.json()
|
||||
}
|
||||
|
||||
def get_tags(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/tags/'.format(workspace_id)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
tag["name"]: tag["id"] for tag in response.json()
|
||||
}
|
||||
|
||||
def get_tasks(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
task["name"]: task["id"] for task in response.json()
|
||||
}
|
||||
|
||||
def get_workspace_id(self, workspace_name):
|
||||
all_workspaces = self.get_workspaces()
|
||||
if workspace_name not in all_workspaces:
|
||||
return None
|
||||
return all_workspaces[workspace_name]
|
||||
|
||||
def get_project_id(self, project_name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_projects = self.get_projects(workspace_id)
|
||||
if project_name not in all_projects:
|
||||
return None
|
||||
return all_projects[project_name]
|
||||
|
||||
def get_tag_id(self, tag_name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_tasks = self.get_tags(workspace_id)
|
||||
if tag_name not in all_tasks:
|
||||
return None
|
||||
return all_tasks[tag_name]
|
||||
|
||||
def get_task_id(
|
||||
self, task_name, project_id, workspace_id=None
|
||||
):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_tasks = self.get_tasks(
|
||||
project_id, workspace_id
|
||||
)
|
||||
if task_name not in all_tasks:
|
||||
return None
|
||||
return all_tasks[task_name]
|
||||
|
||||
def get_current_time(self):
|
||||
return str(datetime.datetime.utcnow().isoformat())+'Z'
|
||||
|
||||
def start_time_entry(
|
||||
self, description, project_id, task_id=None, tag_ids=[],
|
||||
workspace_id=None, billable=True
|
||||
):
|
||||
# Workspace
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
|
||||
# Check if is currently run another times and has same values
|
||||
current = self.get_in_progress(workspace_id)
|
||||
if current is not None:
|
||||
if (
|
||||
current.get("description", None) == description and
|
||||
current.get("projectId", None) == project_id and
|
||||
current.get("taskId", None) == task_id
|
||||
):
|
||||
self.bool_timer_run = True
|
||||
return self.bool_timer_run
|
||||
self.finish_time_entry(workspace_id)
|
||||
|
||||
# Convert billable to strings
|
||||
if billable:
|
||||
billable = 'true'
|
||||
else:
|
||||
billable = 'false'
|
||||
# Rest API Action
|
||||
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
|
||||
start = self.get_current_time()
|
||||
body = {
|
||||
"start": start,
|
||||
"billable": billable,
|
||||
"description": description,
|
||||
"projectId": project_id,
|
||||
"taskId": task_id,
|
||||
"tagIds": tag_ids
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
|
||||
success = False
|
||||
if response.status_code < 300:
|
||||
success = True
|
||||
return success
|
||||
|
||||
def get_in_progress(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/inProgress'.format(
|
||||
workspace_id
|
||||
)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
try:
|
||||
output = response.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
output = None
|
||||
return output
|
||||
|
||||
def finish_time_entry(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
current = self.get_in_progress(workspace_id)
|
||||
current_id = current["id"]
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, current_id
|
||||
)
|
||||
body = {
|
||||
"start": current["timeInterval"]["start"],
|
||||
"billable": current["billable"],
|
||||
"description": current["description"],
|
||||
"projectId": current["projectId"],
|
||||
"taskId": current["taskId"],
|
||||
"tagIds": current["tagIds"],
|
||||
"end": self.get_current_time()
|
||||
}
|
||||
response = requests.put(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_time_entries(
|
||||
self, workspace_id=None, quantity=10
|
||||
):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
return response.json()[:quantity]
|
||||
|
||||
def remove_time_entry(self, tid, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, tid
|
||||
)
|
||||
response = requests.delete(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_project(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/'.format(workspace_id)
|
||||
body = {
|
||||
"name": name,
|
||||
"clientId": "",
|
||||
"isPublic": "false",
|
||||
"estimate": {
|
||||
# "estimate": "3600",
|
||||
"type": "AUTO"
|
||||
},
|
||||
"color": "#f44336",
|
||||
"billable": "true"
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_workspace(self, name):
|
||||
action_url = 'workspaces/'
|
||||
body = {"name": name}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_task(
|
||||
self, name, project_id, workspace_id=None
|
||||
):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
body = {
|
||||
"name": name,
|
||||
"projectId": project_id
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_tag(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/tags'.format(workspace_id)
|
||||
body = {
|
||||
"name": name
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def delete_project(
|
||||
self, project_id, workspace_id=None
|
||||
):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = '/workspaces/{}/projects/{}'.format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.delete(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers,
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def convert_input(
|
||||
self, entity_id, entity_name, mode='Workspace', project_id=None
|
||||
):
|
||||
if entity_id is None:
|
||||
error = False
|
||||
error_msg = 'Missing information "{}"'
|
||||
if mode.lower() == 'workspace':
|
||||
if entity_id is None and entity_name is None:
|
||||
if self.workspace_id is not None:
|
||||
entity_id = self.workspace_id
|
||||
else:
|
||||
error = True
|
||||
else:
|
||||
entity_id = self.get_workspace_id(entity_name)
|
||||
else:
|
||||
if entity_id is None and entity_name is None:
|
||||
error = True
|
||||
elif mode.lower() == 'project':
|
||||
entity_id = self.get_project_id(entity_name)
|
||||
elif mode.lower() == 'task':
|
||||
entity_id = self.get_task_id(
|
||||
task_name=entity_name, project_id=project_id
|
||||
)
|
||||
else:
|
||||
raise TypeError('Unknown type')
|
||||
# Raise error
|
||||
if error:
|
||||
raise ValueError(error_msg.format(mode))
|
||||
|
||||
return entity_id
|
||||
155
pype/clockify/widget_settings.py
Normal file
155
pype/clockify/widget_settings.py
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
import os
|
||||
from app.vendor.Qt import QtCore, QtGui, QtWidgets
|
||||
from app import style
|
||||
|
||||
|
||||
class ClockifySettings(QtWidgets.QWidget):
|
||||
|
||||
SIZE_W = 300
|
||||
SIZE_H = 130
|
||||
|
||||
loginSignal = QtCore.Signal(object, object, object)
|
||||
|
||||
def __init__(self, main_parent=None, parent=None, optional=True):
|
||||
|
||||
super(ClockifySettings, self).__init__()
|
||||
|
||||
self.parent = parent
|
||||
self.main_parent = main_parent
|
||||
self.clockapi = parent.clockapi
|
||||
self.optional = optional
|
||||
self.validated = False
|
||||
|
||||
# Icon
|
||||
if hasattr(parent, 'icon'):
|
||||
self.setWindowIcon(self.parent.icon)
|
||||
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
|
||||
self.setWindowIcon(self.parent.parent.icon)
|
||||
else:
|
||||
pype_setup = os.getenv('PYPE_SETUP_ROOT')
|
||||
items = [pype_setup, "app", "resources", "icon.png"]
|
||||
fname = os.path.sep.join(items)
|
||||
icon = QtGui.QIcon(fname)
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint |
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
self._translate = QtCore.QCoreApplication.translate
|
||||
|
||||
# Font
|
||||
self.font = QtGui.QFont()
|
||||
self.font.setFamily("DejaVu Sans Condensed")
|
||||
self.font.setPointSize(9)
|
||||
self.font.setBold(True)
|
||||
self.font.setWeight(50)
|
||||
self.font.setKerning(True)
|
||||
|
||||
# Size setting
|
||||
self.resize(self.SIZE_W, self.SIZE_H)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self.setLayout(self._main())
|
||||
self.setWindowTitle('Clockify settings')
|
||||
|
||||
def _main(self):
|
||||
self.main = QtWidgets.QVBoxLayout()
|
||||
self.main.setObjectName("main")
|
||||
|
||||
self.form = QtWidgets.QFormLayout()
|
||||
self.form.setContentsMargins(10, 15, 10, 5)
|
||||
self.form.setObjectName("form")
|
||||
|
||||
self.label_api_key = QtWidgets.QLabel("Clockify API key:")
|
||||
self.label_api_key.setFont(self.font)
|
||||
self.label_api_key.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
|
||||
self.label_api_key.setTextFormat(QtCore.Qt.RichText)
|
||||
self.label_api_key.setObjectName("label_api_key")
|
||||
|
||||
self.input_api_key = QtWidgets.QLineEdit()
|
||||
self.input_api_key.setEnabled(True)
|
||||
self.input_api_key.setFrame(True)
|
||||
self.input_api_key.setObjectName("input_api_key")
|
||||
self.input_api_key.setPlaceholderText(
|
||||
self._translate("main", "e.g. XX1XxXX2x3x4xXxx")
|
||||
)
|
||||
|
||||
self.error_label = QtWidgets.QLabel("")
|
||||
self.error_label.setFont(self.font)
|
||||
self.error_label.setTextFormat(QtCore.Qt.RichText)
|
||||
self.error_label.setObjectName("error_label")
|
||||
self.error_label.setWordWrap(True)
|
||||
self.error_label.hide()
|
||||
|
||||
self.form.addRow(self.label_api_key, self.input_api_key)
|
||||
self.form.addRow(self.error_label)
|
||||
|
||||
self.btn_group = QtWidgets.QHBoxLayout()
|
||||
self.btn_group.addStretch(1)
|
||||
self.btn_group.setObjectName("btn_group")
|
||||
|
||||
self.btn_ok = QtWidgets.QPushButton("Ok")
|
||||
self.btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
|
||||
self.btn_ok.clicked.connect(self.click_ok)
|
||||
|
||||
self.btn_cancel = QtWidgets.QPushButton("Cancel")
|
||||
cancel_tooltip = 'Application won\'t start'
|
||||
if self.optional:
|
||||
cancel_tooltip = 'Close this window'
|
||||
self.btn_cancel.setToolTip(cancel_tooltip)
|
||||
self.btn_cancel.clicked.connect(self._close_widget)
|
||||
|
||||
self.btn_group.addWidget(self.btn_ok)
|
||||
self.btn_group.addWidget(self.btn_cancel)
|
||||
|
||||
self.main.addLayout(self.form)
|
||||
self.main.addLayout(self.btn_group)
|
||||
|
||||
return self.main
|
||||
|
||||
def setError(self, msg):
|
||||
self.error_label.setText(msg)
|
||||
self.error_label.show()
|
||||
|
||||
def invalid_input(self, entity):
|
||||
entity.setStyleSheet("border: 1px solid red;")
|
||||
|
||||
def click_ok(self):
|
||||
api_key = self.input_api_key.text().strip()
|
||||
if self.optional is True and api_key == '':
|
||||
self.clockapi.save_api_key(None)
|
||||
self.clockapi.set_api(api_key)
|
||||
self.validated = False
|
||||
self._close_widget()
|
||||
return
|
||||
|
||||
validation = self.clockapi.validate_api_key(api_key)
|
||||
|
||||
if validation:
|
||||
self.clockapi.save_api_key(api_key)
|
||||
self.clockapi.set_api(api_key)
|
||||
self.validated = True
|
||||
self._close_widget()
|
||||
else:
|
||||
self.invalid_input(self.input_api_key)
|
||||
self.validated = False
|
||||
self.setError(
|
||||
"Entered invalid API key"
|
||||
)
|
||||
|
||||
def closeEvent(self, event):
|
||||
if self.optional is True:
|
||||
event.ignore()
|
||||
self._close_widget()
|
||||
else:
|
||||
self.validated = False
|
||||
|
||||
def _close_widget(self):
|
||||
if self.optional is True:
|
||||
self.hide()
|
||||
else:
|
||||
self.close()
|
||||
|
|
@ -1 +1,2 @@
|
|||
from .lib import *
|
||||
from .ftrack_server import *
|
||||
|
|
|
|||
|
|
@ -71,9 +71,12 @@ def register(session):
|
|||
apps.append(app)
|
||||
|
||||
apps = sorted(apps, key=lambda x: x['name'])
|
||||
app_counter = 0
|
||||
for app in apps:
|
||||
try:
|
||||
registerApp(app, session)
|
||||
time.sleep(0.05)
|
||||
if app_counter%5 == 0:
|
||||
time.sleep(0.1)
|
||||
app_counter += 1
|
||||
except Exception as e:
|
||||
log.warning("'{0}' - not proper App ({1})".format(app['name'], e))
|
||||
|
|
|
|||
|
|
@ -87,8 +87,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = AssetDelete(session)
|
||||
action_handler.register()
|
||||
AssetDelete(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
105
pype/ftrack/actions/action_clockify_start.py
Normal file
105
pype/ftrack/actions/action_clockify_start.py
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
||||
class StartClockify(BaseAction):
|
||||
'''Starts timer on clockify.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'clockify.start.timer'
|
||||
#: Action label.
|
||||
label = 'Start timer'
|
||||
#: Action description.
|
||||
description = 'Starts timer on clockify'
|
||||
#: roles that are allowed to register this action
|
||||
icon = 'https://clockify.me/assets/images/clockify-logo.png'
|
||||
#: Clockify api
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
if entities[0].entity_type.lower() != 'task':
|
||||
return False
|
||||
if self.clockapi.workspace_id is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
task = entities[0]
|
||||
task_name = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(self.clockapi.get_tag_id(task_name))
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
StartClockify(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
148
pype/ftrack/actions/action_clockify_sync.py
Normal file
148
pype/ftrack/actions/action_clockify_sync.py
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction, MissingPermision
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
||||
class SyncClocify(BaseAction):
|
||||
'''Synchronise project names and task types.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'clockify.sync'
|
||||
#: Action label.
|
||||
label = 'Sync To Clockify'
|
||||
#: Action description.
|
||||
description = 'Synchronise data to Clockify workspace'
|
||||
#: priority
|
||||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
#: icon
|
||||
icon = 'https://clockify.me/assets/images/clockify-logo-white.svg'
|
||||
#: CLockifyApi
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def register(self):
|
||||
if self.clockapi.workspace_id is None:
|
||||
raise ValueError('Clockify Workspace or API key are not set!')
|
||||
|
||||
if self.clockapi.validate_workspace_perm() is False:
|
||||
raise MissingPermision('Clockify')
|
||||
super().register()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Ftrack to Clockify'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
entity = entities[0]
|
||||
|
||||
if entity.entity_type.lower() == 'project':
|
||||
project = entity
|
||||
else:
|
||||
project = entity['project']
|
||||
project_name = project['full_name']
|
||||
|
||||
task_types = []
|
||||
for task_type in project['project_schema']['_task_type_schema'][
|
||||
'types'
|
||||
]:
|
||||
task_types.append(task_type['name'])
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockapi.add_project(project_name)
|
||||
if 'id' not in response:
|
||||
self.log.error('Project {} can\'t be created'.format(
|
||||
project_name
|
||||
))
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Can\'t create project, unexpected error'
|
||||
}
|
||||
project_id = response['id']
|
||||
else:
|
||||
project_id = clockify_projects[project_name]
|
||||
|
||||
clockify_workspace_tags = self.clockapi.get_tags()
|
||||
for task_type in task_types:
|
||||
if task_type not in clockify_workspace_tags:
|
||||
response = self.clockapi.add_tag(task_type)
|
||||
if 'id' not in response:
|
||||
self.log.error('Task {} can\'t be created'.format(
|
||||
task_type
|
||||
))
|
||||
continue
|
||||
except Exception:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
return False
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncClocify(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -1,6 +1,3 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2015 Milan Kolar
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
|
@ -45,9 +42,7 @@ class ComponentOpen(BaseAction):
|
|||
# Get component filepath
|
||||
# TODO with locations it will be different???
|
||||
fpath = entity['component_locations'][0]['resource_identifier']
|
||||
items = fpath.split(os.sep)
|
||||
items.pop(-1)
|
||||
fpath = os.sep.join(items)
|
||||
fpath = os.path.normpath(os.path.dirname(fpath))
|
||||
|
||||
if os.path.isdir(fpath):
|
||||
if 'win' in sys.platform: # windows
|
||||
|
|
@ -80,8 +75,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = ComponentOpen(session)
|
||||
action_handler.register()
|
||||
ComponentOpen(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2017 ftrack
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import json
|
||||
import ftrack_api
|
||||
import arrow
|
||||
import logging
|
||||
from pype.ftrack import BaseAction, get_ca_mongoid
|
||||
|
||||
"""
|
||||
|
|
@ -112,6 +111,13 @@ class CustomAttributes(BaseAction):
|
|||
label = 'Create/Update Avalon Attributes'
|
||||
#: Action description.
|
||||
description = 'Creates Avalon/Mongo ID for double check'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = (
|
||||
'https://cdn4.iconfinder.com/data/icons/'
|
||||
'ios-web-user-interface-multi-circle-flat-vol-4/512/'
|
||||
'Bullet_list_menu_lines_points_items_options-512.png'
|
||||
)
|
||||
|
||||
def __init__(self, session):
|
||||
super().__init__(session)
|
||||
|
|
@ -136,14 +142,7 @@ class CustomAttributes(BaseAction):
|
|||
Validation
|
||||
- action is only for Administrators
|
||||
'''
|
||||
success = False
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] == 'Administrator':
|
||||
success = True
|
||||
|
||||
return success
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# JOB SETTINGS
|
||||
|
|
@ -584,14 +583,11 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = CustomAttributes(session)
|
||||
action_handler.register()
|
||||
CustomAttributes(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
import logging
|
||||
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
|
|
|
|||
|
|
@ -29,10 +29,6 @@ class CreateFolders(BaseAction):
|
|||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
||||
# if (len(entities) == 0 or entities[0].entity_type not in
|
||||
# ['Episode', 'Sequence', 'Shot', 'Folder', 'Asset Build']):
|
||||
# return False
|
||||
|
||||
return True
|
||||
|
||||
def getShotAsset(self, entity):
|
||||
|
|
@ -141,8 +137,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = CreateFolders(session)
|
||||
action_handler.register()
|
||||
CreateFolders(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,115 +1,282 @@
|
|||
import sys
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from bson.objectid import ObjectId
|
||||
import argparse
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteEntity(BaseAction):
|
||||
class DeleteAsset(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'delete.entity'
|
||||
identifier = 'delete.asset'
|
||||
#: Action label.
|
||||
label = 'Delete entity'
|
||||
label = 'Delete Asset/Subsets'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
icon = "https://www.iconsdb.com/icons/preview/white/full-trash-xxl.png"
|
||||
description = 'Removes from Avalon with all childs and asset from Ftrack'
|
||||
icon = (
|
||||
'https://cdn4.iconfinder.com/data/icons/'
|
||||
'ios-web-user-interface-multi-circle-flat-vol-5/512/'
|
||||
'Delete_dustbin_empty_recycle_recycling_remove_trash-512.png'
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
value = None
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
selection = event["data"].get("selection", None)
|
||||
if selection is None or len(selection) > 1:
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
valid = ["task"]
|
||||
entityType = selection[0].get("entityType", "")
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
return True
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
discover = True
|
||||
break
|
||||
def _launch(self, event):
|
||||
self.reset_session()
|
||||
try:
|
||||
self.db.install()
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
|
||||
return discover
|
||||
interface = self._interface(
|
||||
self.session, *args
|
||||
)
|
||||
|
||||
confirmation = self.confirm_delete(
|
||||
True, *args
|
||||
)
|
||||
|
||||
if interface:
|
||||
return interface
|
||||
|
||||
if confirmation:
|
||||
return confirmation
|
||||
|
||||
response = self.launch(
|
||||
self.session, *args
|
||||
)
|
||||
finally:
|
||||
self.db.uninstall()
|
||||
|
||||
return self._handle_result(
|
||||
self.session, response, *args
|
||||
)
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
entity = entities[0]
|
||||
title = 'Going to delete "{}"'.format(entity['name'])
|
||||
|
||||
self.attempt = 1
|
||||
items = []
|
||||
item = {
|
||||
'label': 'Enter "DELETE" to confirm',
|
||||
'name': 'key',
|
||||
'type': 'text',
|
||||
'value': ''
|
||||
entity = entities[0]
|
||||
title = 'Choose items to delete from "{}"'.format(entity['name'])
|
||||
project = entity['project']
|
||||
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
})
|
||||
|
||||
asset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Delete whole asset: ##'
|
||||
}
|
||||
items.append(item)
|
||||
asset_item = {
|
||||
'label': av_entity['name'],
|
||||
'name': 'whole_asset',
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
splitter = {
|
||||
'type': 'label',
|
||||
'value': '{}'.format(200*"-")
|
||||
}
|
||||
subset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Subsets: ##'
|
||||
}
|
||||
if av_entity is not None:
|
||||
items.append(asset_label)
|
||||
items.append(asset_item)
|
||||
items.append(splitter)
|
||||
|
||||
all_subsets = self.db.find({
|
||||
'type': 'subset',
|
||||
'parent': av_entity['_id']
|
||||
})
|
||||
|
||||
subset_items = []
|
||||
for subset in all_subsets:
|
||||
item = {
|
||||
'label': subset['name'],
|
||||
'name': str(subset['_id']),
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
subset_items.append(item)
|
||||
if len(subset_items) > 0:
|
||||
items.append(subset_label)
|
||||
items.extend(subset_items)
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found assets in avalon'
|
||||
}
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def confirm_delete(self, first_attempt, entities, event):
|
||||
if first_attempt is True:
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
|
||||
if len(values) <= 0:
|
||||
return
|
||||
if 'whole_asset' not in values:
|
||||
return
|
||||
else:
|
||||
values = self.values
|
||||
|
||||
title = 'Confirmation of deleting {}'
|
||||
if values['whole_asset'] is True:
|
||||
title = title.format(
|
||||
'whole asset {}'.format(
|
||||
entities[0]['name']
|
||||
)
|
||||
)
|
||||
else:
|
||||
subsets = []
|
||||
for key, value in values.items():
|
||||
if value is True:
|
||||
subsets.append(key)
|
||||
len_subsets = len(subsets)
|
||||
if len_subsets == 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing was selected to delete'
|
||||
}
|
||||
elif len_subsets == 1:
|
||||
title = title.format(
|
||||
'{} subset'.format(len_subsets)
|
||||
)
|
||||
else:
|
||||
title = title.format(
|
||||
'{} subsets'.format(len_subsets)
|
||||
)
|
||||
|
||||
self.values = values
|
||||
items = []
|
||||
|
||||
delete_label = {
|
||||
'type': 'label',
|
||||
'value': '# Please enter "DELETE" to confirm #'
|
||||
}
|
||||
|
||||
delete_item = {
|
||||
'name': 'delete_key',
|
||||
'type': 'text',
|
||||
'value': '',
|
||||
'empty_text': 'Type Delete here...'
|
||||
}
|
||||
items.append(delete_label)
|
||||
items.append(delete_item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No Assets to delete!'
|
||||
}
|
||||
elif values.get('key', '').lower() != 'delete':
|
||||
return
|
||||
if 'delete_key' not in values:
|
||||
return
|
||||
|
||||
if values['delete_key'].lower() != 'delete':
|
||||
if values['delete_key'].lower() == '':
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Deleting cancelled'
|
||||
}
|
||||
if self.attempt < 3:
|
||||
self.attempt += 1
|
||||
return_dict = self.confirm_delete(False, entities, event)
|
||||
return_dict['title'] = '{} ({} attempt)'.format(
|
||||
return_dict['title'], self.attempt
|
||||
)
|
||||
return return_dict
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Entered key does not match'
|
||||
'message': 'You didn\'t enter "DELETE" properly 3 times!'
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
project = entity['project']
|
||||
|
||||
self.db.install()
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
})
|
||||
all_ids = []
|
||||
if self.values.get('whole_asset', False) is True:
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
})
|
||||
|
||||
if av_entity is not None:
|
||||
all_ids = []
|
||||
all_ids.append(av_entity['_id'])
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
if av_entity is not None:
|
||||
all_ids.append(av_entity['_id'])
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
|
||||
if len(all_ids) == 0:
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'None of assets'
|
||||
}
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
else:
|
||||
subset_names = []
|
||||
for key, value in self.values.items():
|
||||
if key == 'delete_key' or value is False:
|
||||
continue
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
entity_id = ObjectId(key)
|
||||
av_entity = self.db.find_one({'_id': entity_id})
|
||||
subset_names.append(av_entity['name'])
|
||||
if av_entity is None:
|
||||
continue
|
||||
all_ids.append(entity_id)
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
self.db.uninstall()
|
||||
for ft_asset in entity['assets']:
|
||||
if ft_asset['name'] in subset_names:
|
||||
session.delete(ft_asset)
|
||||
session.commit()
|
||||
|
||||
if len(all_ids) == 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No entities to delete in avalon'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
|
|
@ -148,8 +315,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = DeleteEntity(session)
|
||||
action_handler.register()
|
||||
DeleteAsset(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -15,31 +15,27 @@ class AssetsRemover(BaseAction):
|
|||
label = 'Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = (
|
||||
'https://cdn4.iconfinder.com/data/icons/'
|
||||
'ios-web-user-interface-multi-circle-flat-vol-5/512/'
|
||||
'Clipboard_copy_delete_minus_paste_remove-512.png'
|
||||
)
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
selection = event["data"].get("selection", None)
|
||||
if selection is None:
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
valid = ["show", "task"]
|
||||
entityType = selection[0].get("entityType", "")
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
|
|
@ -145,8 +141,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = AssetsRemover(session)
|
||||
action_handler.register()
|
||||
AssetsRemover(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -51,8 +51,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = VersionsCleanup(session)
|
||||
action_handler.register()
|
||||
VersionsCleanup(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,393 +1,245 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from operator import itemgetter
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseHandler
|
||||
from pype.ftrack import BaseAction
|
||||
from app.api import Logger
|
||||
from pype import pypelib
|
||||
|
||||
|
||||
class DJVViewAction(BaseHandler):
|
||||
log = Logger.getLogger(__name__)
|
||||
|
||||
|
||||
class DJVViewAction(BaseAction):
|
||||
"""Launch DJVView action."""
|
||||
identifier = "djvview-launch-action"
|
||||
# label = "DJV View"
|
||||
# icon = "http://a.fsdn.com/allura/p/djv/icon"
|
||||
label = "DJV View"
|
||||
description = "DJV View Launcher"
|
||||
icon = "http://a.fsdn.com/allura/p/djv/icon"
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
self.djv_path = None
|
||||
self.config_data = None
|
||||
|
||||
if self.identifier is None:
|
||||
raise ValueError(
|
||||
'Action missing identifier.'
|
||||
)
|
||||
self.load_config_data()
|
||||
self.set_djv_path()
|
||||
|
||||
def is_valid_selection(self, event):
|
||||
selection = event["data"].get("selection", [])
|
||||
|
||||
if not selection:
|
||||
if self.djv_path is None:
|
||||
return
|
||||
|
||||
entityType = selection[0]["entityType"]
|
||||
|
||||
if entityType not in ["assetversion", "task"]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def discover(self, event):
|
||||
"""Return available actions based on *event*. """
|
||||
|
||||
if not self.is_valid_selection(event):
|
||||
return
|
||||
|
||||
items = []
|
||||
applications = self.get_applications()
|
||||
applications = sorted(
|
||||
applications, key=lambda application: application["label"]
|
||||
self.allowed_types = self.config_data.get(
|
||||
'file_ext', ["img", "mov", "exr"]
|
||||
)
|
||||
|
||||
for application in applications:
|
||||
self.djv_path = application.get("path", None)
|
||||
applicationIdentifier = application["identifier"]
|
||||
label = application["label"]
|
||||
items.append({
|
||||
"actionIdentifier": self.identifier,
|
||||
"label": label,
|
||||
"variant": application.get("variant", None),
|
||||
"description": application.get("description", None),
|
||||
"icon": application.get("icon", "default"),
|
||||
"applicationIdentifier": applicationIdentifier
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items
|
||||
}
|
||||
|
||||
def register(self):
|
||||
'''Registers the action, subscribing the discover and launch topics.'''
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.discover and source.user.username={0}'.format(
|
||||
self.session.api_user
|
||||
), self.discover
|
||||
)
|
||||
launch_subscription = (
|
||||
'topic=ftrack.action.launch'
|
||||
' and data.actionIdentifier={0}'
|
||||
' and source.user.username={1}'
|
||||
)
|
||||
self.session.event_hub.subscribe(
|
||||
launch_subscription.format(
|
||||
self.identifier,
|
||||
self.session.api_user
|
||||
),
|
||||
self.launch
|
||||
assert (self.djv_path is not None), (
|
||||
'DJV View is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
super().register()
|
||||
|
||||
def get_applications(self):
|
||||
applications = []
|
||||
def discover(self, session, entities, event):
|
||||
"""Return available actions based on *event*. """
|
||||
selection = event["data"].get("selection", [])
|
||||
if len(selection) != 1:
|
||||
return False
|
||||
|
||||
label = "DJVView {version}"
|
||||
versionExpression = re.compile(r"(?P<version>\d+.\d+.\d+)")
|
||||
applicationIdentifier = "djvview"
|
||||
description = "DJV View Launcher"
|
||||
icon = "http://a.fsdn.com/allura/p/djv/icon"
|
||||
expression = []
|
||||
if sys.platform == "win32":
|
||||
expression = ["C:\\", "Program Files", "djv-\d.+",
|
||||
"bin", "djv_view.exe"]
|
||||
entityType = selection[0].get("entityType", None)
|
||||
if entityType in ["assetversion", "task"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
elif sys.platform == "darwin":
|
||||
expression = ["Application", "DJV.app", "Contents", "MacOS", "DJV"]
|
||||
# Linuxs
|
||||
else:
|
||||
expression = ["usr", "local", "djv", "djv_view"]
|
||||
|
||||
pieces = expression[:]
|
||||
start = pieces.pop(0)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# On Windows C: means current directory so convert roots that look
|
||||
# like drive letters to the C:\ format.
|
||||
if start and start[-1] == ':':
|
||||
start += '\\'
|
||||
|
||||
if not os.path.exists(start):
|
||||
raise ValueError(
|
||||
'First part "{0}" of expression "{1}" must match exactly to an'
|
||||
' existing entry on the filesystem.'
|
||||
.format(start, expression)
|
||||
)
|
||||
|
||||
expressions = list(map(re.compile, pieces))
|
||||
expressionsCount = len(expression)-1
|
||||
|
||||
for location, folders, files in os.walk(
|
||||
start, topdown=True, followlinks=True
|
||||
):
|
||||
level = location.rstrip(os.path.sep).count(os.path.sep)
|
||||
expression = expressions[level]
|
||||
|
||||
if level < (expressionsCount - 1):
|
||||
# If not yet at final piece then just prune directories.
|
||||
folders[:] = [folder for folder in folders
|
||||
if expression.match(folder)]
|
||||
else:
|
||||
# Match executable. Note that on OSX executable might equate to
|
||||
# a folder (.app).
|
||||
for entry in folders + files:
|
||||
match = expression.match(entry)
|
||||
if match:
|
||||
# Extract version from full matching path.
|
||||
path = os.path.join(start, location, entry)
|
||||
versionMatch = versionExpression.search(path)
|
||||
if versionMatch:
|
||||
version = versionMatch.group('version')
|
||||
|
||||
applications.append({
|
||||
'identifier': applicationIdentifier.format(
|
||||
version=version
|
||||
),
|
||||
'path': path,
|
||||
'version': version,
|
||||
'label': label.format(version=version),
|
||||
'icon': icon,
|
||||
# 'variant': variant.format(version=version),
|
||||
'description': description
|
||||
})
|
||||
else:
|
||||
self.logger.debug(
|
||||
'Discovered application executable, but it '
|
||||
'does not appear to o contain required version'
|
||||
' information: {0}'.format(path)
|
||||
)
|
||||
|
||||
# Don't descend any further as out of patterns to match.
|
||||
del folders[:]
|
||||
|
||||
return applications
|
||||
|
||||
def translate_event(self, session, event):
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
|
||||
selection = event['data'].get('selection', [])
|
||||
|
||||
entities = list()
|
||||
for entity in selection:
|
||||
entities.append(
|
||||
(session.get(
|
||||
self.get_entity_type(entity), entity.get('entityId')
|
||||
))
|
||||
)
|
||||
|
||||
return entities
|
||||
|
||||
def get_entity_type(self, entity):
|
||||
entity_type = entity.get('entityType').replace('_', '').lower()
|
||||
|
||||
for schema in self.session.schemas:
|
||||
alias_for = schema.get('alias_for')
|
||||
|
||||
if (
|
||||
alias_for and isinstance(alias_for, str) and
|
||||
alias_for.lower() == entity_type
|
||||
):
|
||||
return schema['id']
|
||||
|
||||
for schema in self.session.schemas:
|
||||
if schema['id'].lower() == entity_type:
|
||||
return schema['id']
|
||||
|
||||
raise ValueError(
|
||||
'Unable to translate entity type: {0}.'.format(entity_type)
|
||||
)
|
||||
|
||||
def launch(self, event):
|
||||
"""Callback method for DJVView action."""
|
||||
session = self.session
|
||||
entities = self.translate_event(session, event)
|
||||
|
||||
# Launching application
|
||||
if "values" in event["data"]:
|
||||
filename = event['data']['values']['path']
|
||||
file_type = filename.split(".")[-1]
|
||||
|
||||
# TODO Is this proper way?
|
||||
try:
|
||||
fps = int(entities[0]['custom_attributes']['fps'])
|
||||
except Exception:
|
||||
fps = 24
|
||||
|
||||
# TODO issequence is probably already built-in validation in ftrack
|
||||
isseq = re.findall('%[0-9]*d', filename)
|
||||
if len(isseq) > 0:
|
||||
if len(isseq) == 1:
|
||||
frames = []
|
||||
padding = re.findall('%[0-9]*d', filename).pop()
|
||||
index = filename.find(padding)
|
||||
|
||||
full_file = filename[0:index-1]
|
||||
file = full_file.split(os.sep)[-1]
|
||||
folder = os.path.dirname(full_file)
|
||||
|
||||
for fname in os.listdir(path=folder):
|
||||
if fname.endswith(file_type) and file in fname:
|
||||
frames.append(int(fname.split(".")[-2]))
|
||||
|
||||
if len(frames) > 0:
|
||||
start = min(frames)
|
||||
end = max(frames)
|
||||
|
||||
range = (padding % start) + '-' + (padding % end)
|
||||
filename = re.sub('%[0-9]*d', range, filename)
|
||||
else:
|
||||
msg = (
|
||||
'DJV View - Filename has more than one'
|
||||
' sequence identifier.'
|
||||
)
|
||||
return {
|
||||
'success': False,
|
||||
'message': (msg)
|
||||
}
|
||||
|
||||
cmd = []
|
||||
# DJV path
|
||||
cmd.append(os.path.normpath(self.djv_path))
|
||||
# DJV Options Start ##############################################
|
||||
'''layer name'''
|
||||
# cmd.append('-file_layer (value)')
|
||||
''' Proxy scale: 1/2, 1/4, 1/8'''
|
||||
cmd.append('-file_proxy 1/2')
|
||||
''' Cache: True, False.'''
|
||||
cmd.append('-file_cache True')
|
||||
''' Start in full screen '''
|
||||
# cmd.append('-window_fullscreen')
|
||||
''' Toolbar controls: False, True.'''
|
||||
# cmd.append("-window_toolbar False")
|
||||
''' Window controls: False, True.'''
|
||||
# cmd.append("-window_playbar False")
|
||||
''' Grid overlay: None, 1x1, 10x10, 100x100.'''
|
||||
# cmd.append("-view_grid None")
|
||||
''' Heads up display: True, False.'''
|
||||
# cmd.append("-view_hud True")
|
||||
''' Playback: Stop, Forward, Reverse.'''
|
||||
cmd.append("-playback Forward")
|
||||
''' Frame.'''
|
||||
# cmd.append("-playback_frame (value)")
|
||||
cmd.append("-playback_speed " + str(fps))
|
||||
''' Timer: Sleep, Timeout. Value: Sleep.'''
|
||||
# cmd.append("-playback_timer (value)")
|
||||
''' Timer resolution (seconds): 0.001.'''
|
||||
# cmd.append("-playback_timer_resolution (value)")
|
||||
''' Time units: Timecode, Frames.'''
|
||||
cmd.append("-time_units Frames")
|
||||
# DJV Options End ################################################
|
||||
|
||||
# PATH TO COMPONENT
|
||||
cmd.append(os.path.normpath(filename))
|
||||
|
||||
# Run DJV with these commands
|
||||
subprocess.Popen(' '.join(cmd))
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'DJV View started.'
|
||||
}
|
||||
|
||||
if 'items' not in event["data"]:
|
||||
event["data"]['items'] = []
|
||||
def load_config_data(self):
|
||||
path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
|
||||
data = dict()
|
||||
try:
|
||||
for entity in entities:
|
||||
versions = []
|
||||
allowed_types = ["img", "mov", "exr"]
|
||||
|
||||
if entity.entity_type.lower() == "assetversion":
|
||||
if entity['components'][0]['file_type'] in allowed_types:
|
||||
versions.append(entity)
|
||||
|
||||
elif entity.entity_type.lower() == "task":
|
||||
# AssetVersions are obtainable only from shot!
|
||||
shotentity = entity['parent']
|
||||
|
||||
for asset in shotentity['assets']:
|
||||
for version in asset['versions']:
|
||||
# Get only AssetVersion of selected task
|
||||
if version['task']['id'] != entity['id']:
|
||||
continue
|
||||
# Get only components with allowed type
|
||||
filetype = version['components'][0]['file_type']
|
||||
if filetype in allowed_types:
|
||||
versions.append(version)
|
||||
|
||||
# Raise error if no components were found
|
||||
if len(versions) < 1:
|
||||
raise ValueError('There are no Asset Versions to open.')
|
||||
|
||||
for version in versions:
|
||||
for component in version['components']:
|
||||
label = "v{0} - {1} - {2}"
|
||||
|
||||
label = label.format(
|
||||
str(version['version']).zfill(3),
|
||||
version['asset']['type']['name'],
|
||||
component['name']
|
||||
)
|
||||
|
||||
try:
|
||||
# TODO This is proper way to get filepath!!!
|
||||
# THIS WON'T WORK RIGHT NOW
|
||||
location = component[
|
||||
'component_locations'
|
||||
][0]['location']
|
||||
file_path = location.get_filesystem_path(component)
|
||||
# if component.isSequence():
|
||||
# if component.getMembers():
|
||||
# frame = int(
|
||||
# component.getMembers()[0].getName()
|
||||
# )
|
||||
# file_path = file_path % frame
|
||||
except Exception:
|
||||
# This works but is NOT proper way
|
||||
file_path = component[
|
||||
'component_locations'
|
||||
][0]['resource_identifier']
|
||||
|
||||
event["data"]["items"].append(
|
||||
{"label": label, "value": file_path}
|
||||
)
|
||||
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
'Failed to load data from DJV presets file ({})'.format(e)
|
||||
)
|
||||
|
||||
self.config_data = data
|
||||
|
||||
def set_djv_path(self):
|
||||
for path in self.config_data.get("djv_paths", []):
|
||||
if os.path.exists(path):
|
||||
self.djv_path = path
|
||||
break
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
entity = entities[0]
|
||||
versions = []
|
||||
|
||||
entity_type = entity.entity_type.lower()
|
||||
if entity_type == "assetversion":
|
||||
if (
|
||||
entity[
|
||||
'components'
|
||||
][0]['file_type'][1:] in self.allowed_types
|
||||
):
|
||||
versions.append(entity)
|
||||
else:
|
||||
master_entity = entity
|
||||
if entity_type == "task":
|
||||
master_entity = entity['parent']
|
||||
|
||||
for asset in master_entity['assets']:
|
||||
for version in asset['versions']:
|
||||
# Get only AssetVersion of selected task
|
||||
if (
|
||||
entity_type == "task" and
|
||||
version['task']['id'] != entity['id']
|
||||
):
|
||||
continue
|
||||
# Get only components with allowed type
|
||||
filetype = version['components'][0]['file_type']
|
||||
if filetype[1:] in self.allowed_types:
|
||||
versions.append(version)
|
||||
|
||||
if len(versions) < 1:
|
||||
return {
|
||||
'success': False,
|
||||
'message': str(e)
|
||||
'message': 'There are no Asset Versions to open.'
|
||||
}
|
||||
|
||||
return {
|
||||
"items": [
|
||||
{
|
||||
"label": "Items to view",
|
||||
"type": "enumerator",
|
||||
"name": "path",
|
||||
"data": sorted(
|
||||
event["data"]['items'],
|
||||
key=itemgetter("label"),
|
||||
reverse=True
|
||||
)
|
||||
}
|
||||
]
|
||||
}
|
||||
items = []
|
||||
base_label = "v{0} - {1} - {2}"
|
||||
default_component = self.config_data.get(
|
||||
'default_component', None
|
||||
)
|
||||
last_available = None
|
||||
select_value = None
|
||||
for version in versions:
|
||||
for component in version['components']:
|
||||
label = base_label.format(
|
||||
str(version['version']).zfill(3),
|
||||
version['asset']['type']['name'],
|
||||
component['name']
|
||||
)
|
||||
|
||||
try:
|
||||
location = component[
|
||||
'component_locations'
|
||||
][0]['location']
|
||||
file_path = location.get_filesystem_path(component)
|
||||
except Exception:
|
||||
file_path = component[
|
||||
'component_locations'
|
||||
][0]['resource_identifier']
|
||||
|
||||
if os.path.isdir(os.path.dirname(file_path)):
|
||||
last_available = file_path
|
||||
if component['name'] == default_component:
|
||||
select_value = file_path
|
||||
items.append(
|
||||
{'label': label, 'value': file_path}
|
||||
)
|
||||
|
||||
if len(items) == 0:
|
||||
return {
|
||||
'success': False,
|
||||
'message': (
|
||||
'There are no Asset Versions with accessible path.'
|
||||
)
|
||||
}
|
||||
|
||||
item = {
|
||||
'label': 'Items to view',
|
||||
'type': 'enumerator',
|
||||
'name': 'path',
|
||||
'data': sorted(
|
||||
items,
|
||||
key=itemgetter('label'),
|
||||
reverse=True
|
||||
)
|
||||
}
|
||||
if select_value is not None:
|
||||
item['value'] = select_value
|
||||
else:
|
||||
item['value'] = last_available
|
||||
|
||||
return {'items': [item]}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
"""Callback method for DJVView action."""
|
||||
|
||||
# Launching application
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
filename = event['data']['values']['path']
|
||||
|
||||
fps = entities[0].get('custom_attributes', {}).get('fps', None)
|
||||
|
||||
cmd = []
|
||||
# DJV path
|
||||
cmd.append(os.path.normpath(self.djv_path))
|
||||
# DJV Options Start ##############################################
|
||||
# '''layer name'''
|
||||
# cmd.append('-file_layer (value)')
|
||||
# ''' Proxy scale: 1/2, 1/4, 1/8'''
|
||||
# cmd.append('-file_proxy 1/2')
|
||||
# ''' Cache: True, False.'''
|
||||
# cmd.append('-file_cache True')
|
||||
# ''' Start in full screen '''
|
||||
# cmd.append('-window_fullscreen')
|
||||
# ''' Toolbar controls: False, True.'''
|
||||
# cmd.append("-window_toolbar False")
|
||||
# ''' Window controls: False, True.'''
|
||||
# cmd.append("-window_playbar False")
|
||||
# ''' Grid overlay: None, 1x1, 10x10, 100x100.'''
|
||||
# cmd.append("-view_grid None")
|
||||
# ''' Heads up display: True, False.'''
|
||||
# cmd.append("-view_hud True")
|
||||
''' Playback: Stop, Forward, Reverse.'''
|
||||
cmd.append("-playback Forward")
|
||||
# ''' Frame.'''
|
||||
# cmd.append("-playback_frame (value)")
|
||||
if fps is not None:
|
||||
cmd.append("-playback_speed {}".format(int(fps)))
|
||||
# ''' Timer: Sleep, Timeout. Value: Sleep.'''
|
||||
# cmd.append("-playback_timer (value)")
|
||||
# ''' Timer resolution (seconds): 0.001.'''
|
||||
# cmd.append("-playback_timer_resolution (value)")
|
||||
''' Time units: Timecode, Frames.'''
|
||||
cmd.append("-time_units Frames")
|
||||
# DJV Options End ################################################
|
||||
|
||||
# PATH TO COMPONENT
|
||||
cmd.append(os.path.normpath(filename))
|
||||
|
||||
try:
|
||||
# Run DJV with these commands
|
||||
subprocess.Popen(' '.join(cmd))
|
||||
except FileNotFoundError:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'File "{}" was not found.'.format(
|
||||
os.path.basename(filename)
|
||||
)
|
||||
}
|
||||
|
||||
return True
|
||||
|
||||
def register(session):
|
||||
"""Register hooks."""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action = DJVViewAction(session)
|
||||
action.register()
|
||||
DJVViewAction(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -16,7 +16,13 @@ class JobKiller(BaseAction):
|
|||
#: Action label.
|
||||
label = 'Job Killer'
|
||||
#: Action description.
|
||||
description = 'Killing all running jobs younger than day'
|
||||
description = 'Killing selected running jobs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = (
|
||||
'https://cdn2.iconfinder.com/data/icons/new-year-resolutions/64/'
|
||||
'resolutions-23-512.png'
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -30,29 +36,42 @@ class JobKiller(BaseAction):
|
|||
jobs = session.query(
|
||||
'select id, status from Job'
|
||||
' where status in ("queued", "running")'
|
||||
)
|
||||
).all()
|
||||
|
||||
items = []
|
||||
import json
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
for job in jobs:
|
||||
data = json.loads(job['data'])
|
||||
user = job['user']['username']
|
||||
created = job['created_at'].strftime('%d.%m.%Y %H:%M:%S')
|
||||
label = '{}/ {}/ {}'.format(
|
||||
label = '{} - {} - {}'.format(
|
||||
data['description'], created, user
|
||||
)
|
||||
item_label = {
|
||||
'type': 'label',
|
||||
'value': label
|
||||
}
|
||||
item = {
|
||||
'label': label,
|
||||
'name': job['id'],
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
if len(items) > 0:
|
||||
items.append(item_splitter)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
if len(items) == 0:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found any running jobs'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
""" GET JOB """
|
||||
|
|
@ -104,8 +123,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = JobKiller(session)
|
||||
action_handler.register()
|
||||
JobKiller(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -80,8 +80,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = SetVersion(session)
|
||||
action_handler.register()
|
||||
SetVersion(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -54,6 +54,8 @@ class SyncToAvalon(BaseAction):
|
|||
'https://cdn1.iconfinder.com/data/icons/hawcons/32/'
|
||||
'699650-icon-92-inbox-download-512.png'
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub']
|
||||
#: Action priority
|
||||
priority = 200
|
||||
|
||||
|
|
@ -63,22 +65,11 @@ class SyncToAvalon(BaseAction):
|
|||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
role_check = False
|
||||
discover = False
|
||||
role_list = ['Pypeclub']
|
||||
user_id = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + user_id).one()
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
return True
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in role_list:
|
||||
role_check = True
|
||||
if role_check is True:
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
message = ""
|
||||
|
|
@ -91,15 +82,11 @@ class SyncToAvalon(BaseAction):
|
|||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Synch Ftrack to Avalon.'
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
self.log.info(
|
||||
"Action <" + self.__class__.__name__ + "> is running"
|
||||
)
|
||||
|
||||
self.importable = []
|
||||
|
||||
# get from top entity in hierarchy all parent entities
|
||||
|
|
@ -146,26 +133,11 @@ class SyncToAvalon(BaseAction):
|
|||
)
|
||||
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
items = []
|
||||
for error in result['errors']:
|
||||
for key, message in error.items():
|
||||
name = key.lower().replace(' ', '')
|
||||
info = {
|
||||
'label': key,
|
||||
'type': 'textarea',
|
||||
'name': name,
|
||||
'value': message
|
||||
}
|
||||
items.append(info)
|
||||
self.log.error(
|
||||
'{}: {}'.format(key, message)
|
||||
)
|
||||
title = 'Hey You! Few Errors were raised! (*look below*)'
|
||||
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
self.show_interface(event, items, title)
|
||||
ftracklib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
|
|
@ -176,7 +148,6 @@ class SyncToAvalon(BaseAction):
|
|||
avalon_project = result['project']
|
||||
|
||||
job['status'] = 'done'
|
||||
self.log.info('Synchronization to Avalon was successfull!')
|
||||
|
||||
except ValueError as ve:
|
||||
job['status'] = 'failed'
|
||||
|
|
@ -234,8 +205,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = SyncToAvalon(session)
|
||||
action_handler.register()
|
||||
SyncToAvalon(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2017 ftrack
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
|
@ -27,20 +25,17 @@ class TestAction(BaseAction):
|
|||
description = 'Test action'
|
||||
#: priority
|
||||
priority = 10000
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub']
|
||||
icon = (
|
||||
'https://cdn4.iconfinder.com/data/icons/hospital-19/512/'
|
||||
'8_hospital-512.png'
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
discover = False
|
||||
roleList = ['Pypeclub']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.log.info(event)
|
||||
|
|
@ -54,8 +49,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = TestAction(session)
|
||||
action_handler.register()
|
||||
TestAction(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2015 Milan Kolar
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
|
@ -72,8 +69,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = ThumbToChildren(session)
|
||||
action_handler.register()
|
||||
ThumbToChildren(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2015 Milan Kolar
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
|
@ -94,8 +91,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = ThumbToParent(session)
|
||||
action_handler.register()
|
||||
ThumbToParent(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -1,54 +1,65 @@
|
|||
import os
|
||||
import toml
|
||||
|
||||
import json
|
||||
import ftrack_api
|
||||
import appdirs
|
||||
|
||||
|
||||
config_path = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
|
||||
config_name = 'ftrack_cred.toml'
|
||||
fpath = os.path.join(config_path, config_name)
|
||||
folder = os.path.dirname(fpath)
|
||||
|
||||
if not os.path.isdir(folder):
|
||||
os.makedirs(folder)
|
||||
|
||||
|
||||
def _get_credentials():
|
||||
|
||||
folder = os.path.dirname(fpath)
|
||||
action_file_name = 'ftrack_cred.json'
|
||||
event_file_name = 'ftrack_event_cred.json'
|
||||
action_fpath = os.path.join(config_path, action_file_name)
|
||||
event_fpath = os.path.join(config_path, event_file_name)
|
||||
folders = set([os.path.dirname(action_fpath), os.path.dirname(event_fpath)])
|
||||
|
||||
for folder in folders:
|
||||
if not os.path.isdir(folder):
|
||||
os.makedirs(folder)
|
||||
|
||||
|
||||
def _get_credentials(event=False):
|
||||
if event:
|
||||
fpath = event_fpath
|
||||
else:
|
||||
fpath = action_fpath
|
||||
|
||||
credentials = {}
|
||||
try:
|
||||
file = open(fpath, 'r')
|
||||
credentials = json.load(file)
|
||||
except Exception:
|
||||
filecreate = open(fpath, 'w')
|
||||
filecreate.close()
|
||||
file = open(fpath, 'r')
|
||||
file = open(fpath, 'w')
|
||||
|
||||
credentials = toml.load(file)
|
||||
file.close()
|
||||
|
||||
return credentials
|
||||
|
||||
|
||||
def _save_credentials(username, apiKey):
|
||||
file = open(fpath, 'w')
|
||||
|
||||
def _save_credentials(username, apiKey, event=False, auto_connect=None):
|
||||
data = {
|
||||
'username': username,
|
||||
'apiKey': apiKey
|
||||
}
|
||||
|
||||
credentials = toml.dumps(data)
|
||||
file.write(credentials)
|
||||
if event:
|
||||
fpath = event_fpath
|
||||
if auto_connect is None:
|
||||
cred = _get_credentials(True)
|
||||
auto_connect = cred.get('auto_connect', False)
|
||||
data['auto_connect'] = auto_connect
|
||||
else:
|
||||
fpath = action_fpath
|
||||
|
||||
file = open(fpath, 'w')
|
||||
file.write(json.dumps(data))
|
||||
file.close()
|
||||
|
||||
|
||||
def _clear_credentials():
|
||||
file = open(fpath, 'w').close()
|
||||
def _clear_credentials(event=False):
|
||||
if event:
|
||||
fpath = event_fpath
|
||||
else:
|
||||
fpath = action_fpath
|
||||
open(fpath, 'w').close()
|
||||
_set_env(None, None)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ class Sync_To_Avalon(BaseAction):
|
|||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
roleCheck = True
|
||||
break
|
||||
if roleCheck is True:
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
|
|
@ -97,15 +98,11 @@ class Sync_To_Avalon(BaseAction):
|
|||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Synch Ftrack to Avalon.'
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
self.log.info(
|
||||
"Action <" + self.__class__.__name__ + "> is running"
|
||||
)
|
||||
|
||||
self.importable = []
|
||||
|
||||
# get from top entity in hierarchy all parent entities
|
||||
|
|
@ -152,26 +149,11 @@ class Sync_To_Avalon(BaseAction):
|
|||
)
|
||||
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
items = []
|
||||
for error in result['errors']:
|
||||
for key, message in error.items():
|
||||
name = key.lower().replace(' ', '')
|
||||
info = {
|
||||
'label': key,
|
||||
'type': 'textarea',
|
||||
'name': name,
|
||||
'value': message
|
||||
}
|
||||
items.append(info)
|
||||
self.log.error(
|
||||
'{}: {}'.format(key, message)
|
||||
)
|
||||
title = 'Hey You! Few Errors were raised! (*look below*)'
|
||||
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
self.show_interface(event, items, title)
|
||||
lib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
|
|
@ -183,7 +165,6 @@ class Sync_To_Avalon(BaseAction):
|
|||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
self.log.info('Synchronization to Avalon was successfull!')
|
||||
|
||||
except ValueError as ve:
|
||||
job['status'] = 'failed'
|
||||
|
|
@ -239,8 +220,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = Sync_To_Avalon(session)
|
||||
action_handler.register()
|
||||
Sync_To_Avalon(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -13,25 +13,25 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
'''
|
||||
priority = Sync_to_Avalon.priority - 1
|
||||
|
||||
def launch(self, event):
|
||||
def launch(self, session, event):
|
||||
created = []
|
||||
entities = event['data']['entities']
|
||||
for entity in entities:
|
||||
try:
|
||||
entity_id = entity['entityId']
|
||||
|
||||
if entity['action'] == 'add':
|
||||
if entity.get('action', None) == 'add':
|
||||
id_dict = entity['changes']['id']
|
||||
|
||||
if id_dict['new'] is not None and id_dict['old'] is None:
|
||||
created.append(id_dict['new'])
|
||||
|
||||
elif (
|
||||
entity['action'] == 'update' and
|
||||
entity.get('action', None) == 'update' and
|
||||
get_ca_mongoid() in entity['keys'] and
|
||||
entity_id in created
|
||||
):
|
||||
ftrack_entity = self.session.get(
|
||||
ftrack_entity = session.get(
|
||||
self._get_entity_type(entity),
|
||||
entity_id
|
||||
)
|
||||
|
|
@ -44,19 +44,12 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
] = ''
|
||||
self.session.commit()
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
session.rollback()
|
||||
continue
|
||||
|
||||
def register(self):
|
||||
'''Registers the event, subscribing the discover and launch topics.'''
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.update',
|
||||
self.launch,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
|
|
|||
|
|
@ -34,49 +34,56 @@ class NextTaskUpdate(BaseEvent):
|
|||
|
||||
return None
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
def launch(self, session, event):
|
||||
'''Propagates status from version to task when changed'''
|
||||
|
||||
# self.log.info(event)
|
||||
# start of event procedure ----------------------------------
|
||||
|
||||
for entity in event['data'].get('entities', []):
|
||||
changes = entity.get('changes', None)
|
||||
if changes is None:
|
||||
continue
|
||||
statusid_changes = changes.get('statusid', {})
|
||||
if (
|
||||
entity['entityType'] != 'task' or
|
||||
'statusid' not in entity['keys'] or
|
||||
statusid_changes.get('new', None) is None or
|
||||
statusid_changes.get('old', None) is None
|
||||
):
|
||||
continue
|
||||
|
||||
if (entity['entityType'] == 'task' and
|
||||
'statusid' in entity['keys']):
|
||||
task = session.get('Task', entity['entityId'])
|
||||
|
||||
task = session.get('Task', entity['entityId'])
|
||||
status = session.get('Status',
|
||||
entity['changes']['statusid']['new'])
|
||||
state = status['state']['name']
|
||||
|
||||
status = session.get('Status',
|
||||
entity['changes']['statusid']['new'])
|
||||
state = status['state']['name']
|
||||
next_task = self.get_next_task(task, session)
|
||||
|
||||
next_task = self.get_next_task(task, session)
|
||||
# Setting next task to Ready, if on NOT READY
|
||||
if next_task and state == 'Done':
|
||||
if next_task['status']['name'].lower() == 'not ready':
|
||||
|
||||
# Setting next task to Ready, if on NOT READY
|
||||
if next_task and state == 'Done':
|
||||
if next_task['status']['name'].lower() == 'not ready':
|
||||
# Get path to task
|
||||
path = task['name']
|
||||
for p in task['ancestors']:
|
||||
path = p['name'] + '/' + path
|
||||
|
||||
# Get path to task
|
||||
path = task['name']
|
||||
for p in task['ancestors']:
|
||||
path = p['name'] + '/' + path
|
||||
|
||||
# Setting next task status
|
||||
try:
|
||||
query = 'Status where name is "{}"'.format('Ready')
|
||||
status_to_set = session.query(query).one()
|
||||
next_task['status'] = status_to_set
|
||||
except Exception as e:
|
||||
self.log.warning((
|
||||
'!!! [ {} ] status couldnt be set: [ {} ]'
|
||||
).format(path, e))
|
||||
else:
|
||||
self.log.info((
|
||||
'>>> [ {} ] updated to [ Ready ]'
|
||||
).format(path))
|
||||
|
||||
session.commit()
|
||||
# Setting next task status
|
||||
try:
|
||||
query = 'Status where name is "{}"'.format('Ready')
|
||||
status_to_set = session.query(query).one()
|
||||
next_task['status'] = status_to_set
|
||||
session.commit()
|
||||
self.log.info((
|
||||
'>>> [ {} ] updated to [ Ready ]'
|
||||
).format(path))
|
||||
except Exception as e:
|
||||
self.log.warning((
|
||||
'!!! [ {} ] status couldnt be set: [ {} ]'
|
||||
).format(path, e))
|
||||
session.rollback()
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
|
|
@ -84,5 +91,4 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
event = NextTaskUpdate(session)
|
||||
event.register()
|
||||
NextTaskUpdate(session).register()
|
||||
|
|
|
|||
|
|
@ -2,9 +2,12 @@ import ftrack_api
|
|||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
||||
class Radio_buttons(BaseEvent):
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
def launch(self, session, event):
|
||||
'''Provides a readio button behaviour to any bolean attribute in
|
||||
radio_button group.'''
|
||||
|
||||
|
|
@ -31,7 +34,6 @@ class Radio_buttons(BaseEvent):
|
|||
|
||||
session.commit()
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
|
|
|
|||
|
|
@ -4,8 +4,12 @@ from pype.ftrack import BaseEvent, lib
|
|||
|
||||
class Sync_to_Avalon(BaseEvent):
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
ignore_entityType = [
|
||||
'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
|
||||
'socialfeed', 'socialnotification', 'timelog'
|
||||
]
|
||||
|
||||
def launch(self, session, event):
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
# If mongo_id textfield has changed: RETURN!
|
||||
# - infinite loop
|
||||
|
|
@ -14,6 +18,7 @@ class Sync_to_Avalon(BaseEvent):
|
|||
if ca_mongoid in ent['keys']:
|
||||
return
|
||||
|
||||
entities = self._get_entities(session, event, self.ignore_entityType)
|
||||
ft_project = None
|
||||
# get project
|
||||
for entity in entities:
|
||||
|
|
@ -84,23 +89,9 @@ class Sync_to_Avalon(BaseEvent):
|
|||
custom_attributes=custom_attributes
|
||||
)
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
items = []
|
||||
for error in result['errors']:
|
||||
for key, message in error.items():
|
||||
name = key.lower().replace(' ', '')
|
||||
info = {
|
||||
'label': key,
|
||||
'type': 'textarea',
|
||||
'name': name,
|
||||
'value': message
|
||||
}
|
||||
items.append(info)
|
||||
self.log.error(
|
||||
'{}: {}'.format(key, message)
|
||||
)
|
||||
session.commit()
|
||||
title = 'Hey You! You raised few Errors! (*look below*)'
|
||||
self.show_interface(event, items, title)
|
||||
lib.show_errors(self, event, result['errors'])
|
||||
|
||||
return
|
||||
|
||||
if avalon_project is None:
|
||||
|
|
@ -109,56 +100,21 @@ class Sync_to_Avalon(BaseEvent):
|
|||
|
||||
except Exception as e:
|
||||
message = str(e)
|
||||
title = 'Hey You! Unknown Error has been raised! (*look below*)'
|
||||
ftrack_message = (
|
||||
'SyncToAvalon event ended with unexpected error'
|
||||
' please check log file for more information.'
|
||||
' please check log file or contact Administrator'
|
||||
' for more information.'
|
||||
)
|
||||
items = [{
|
||||
'label': 'Fatal Error',
|
||||
'type': 'textarea',
|
||||
'name': 'error',
|
||||
'value': ftrack_message
|
||||
}]
|
||||
title = 'Hey You! Unknown Error has been raised! (*look below*)'
|
||||
items = [
|
||||
{'type': 'label', 'value':'# Fatal Error'},
|
||||
{'type': 'label', 'value': '<p>{}</p>'.format(ftrack_message)}
|
||||
]
|
||||
self.show_interface(event, items, title)
|
||||
self.log.error(message)
|
||||
self.log.error('Fatal error during sync: {}'.format(message))
|
||||
|
||||
return
|
||||
|
||||
def _launch(self, event):
|
||||
self.session.reset()
|
||||
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
|
||||
self.launch(
|
||||
self.session, *args
|
||||
)
|
||||
return
|
||||
|
||||
def _translate_event(self, session, event):
|
||||
exceptions = [
|
||||
'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
|
||||
'socialfeed', 'timelog'
|
||||
]
|
||||
_selection = event['data'].get('entities', [])
|
||||
|
||||
_entities = list()
|
||||
for entity in _selection:
|
||||
if entity['entityType'] in exceptions:
|
||||
continue
|
||||
_entities.append(
|
||||
(
|
||||
session.get(
|
||||
self._get_entity_type(entity),
|
||||
entity.get('entityId')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return [_entities, event]
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
|
@ -166,5 +122,4 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
event = Sync_to_Avalon(session)
|
||||
event.register()
|
||||
Sync_to_Avalon(session).register()
|
||||
|
|
|
|||
|
|
@ -13,11 +13,11 @@ class Test_Event(BaseEvent):
|
|||
|
||||
priority = 10000
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
def launch(self, session, event):
|
||||
|
||||
'''just a testing event'''
|
||||
|
||||
self.log.info(event)
|
||||
# self.log.info(event)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -27,5 +27,4 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
event = Test_Event(session)
|
||||
event.register()
|
||||
Test_Event(session).register()
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent
|
|||
|
||||
class ThumbnailEvents(BaseEvent):
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
def launch(self, session, event):
|
||||
'''just a testing event'''
|
||||
|
||||
# self.log.info(event)
|
||||
|
|
@ -23,8 +23,12 @@ class ThumbnailEvents(BaseEvent):
|
|||
parent['name'], task['name']))
|
||||
|
||||
# Update task thumbnail from published version
|
||||
if (entity['entityType'] == 'assetversion' and
|
||||
entity['action'] == 'encoded'):
|
||||
# if (entity['entityType'] == 'assetversion' and
|
||||
# entity['action'] == 'encoded'):
|
||||
if (
|
||||
entity['entityType'] == 'assetversion'
|
||||
and 'thumbid' in entity['keys']
|
||||
):
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
thumbnail = version.get('thumbnail')
|
||||
|
|
@ -38,11 +42,12 @@ class ThumbnailEvents(BaseEvent):
|
|||
|
||||
session.commit()
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
event = ThumbnailEvents(session)
|
||||
event.register()
|
||||
ThumbnailEvents(session).register()
|
||||
|
|
|
|||
|
|
@ -4,20 +4,25 @@ from pype.ftrack import BaseEvent
|
|||
|
||||
class VersionToTaskStatus(BaseEvent):
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
def launch(self, session, event):
|
||||
'''Propagates status from version to task when changed'''
|
||||
session.commit()
|
||||
|
||||
# self.log.info(event)
|
||||
# start of event procedure ----------------------------------
|
||||
for entity in event['data'].get('entities', []):
|
||||
# Filter non-assetversions
|
||||
if (entity['entityType'] == 'assetversion' and
|
||||
'statusid' in entity['keys']):
|
||||
if (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
'statusid' in entity.get('keys', [])
|
||||
):
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
version_status = session.get(
|
||||
'Status', entity['changes']['statusid']['new']
|
||||
)
|
||||
try:
|
||||
version_status = session.get(
|
||||
'Status', entity['changes']['statusid']['new']
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
task_status = version_status
|
||||
task = version['task']
|
||||
self.log.info('>>> version status: [ {} ]'.format(
|
||||
|
|
@ -36,10 +41,17 @@ class VersionToTaskStatus(BaseEvent):
|
|||
|
||||
if status_to_set is not None:
|
||||
query = 'Status where name is "{}"'.format(status_to_set)
|
||||
task_status = session.query(query).one()
|
||||
try:
|
||||
task_status = session.query(query).one()
|
||||
except Exception:
|
||||
self.log.info(
|
||||
'!!! status was not found in Ftrack [ {} ]'.format(
|
||||
status_to_set
|
||||
))
|
||||
continue
|
||||
|
||||
# Proceed if the task status was set
|
||||
if task_status:
|
||||
if task_status is not None:
|
||||
# Get path to task
|
||||
path = task['name']
|
||||
for p in task['ancestors']:
|
||||
|
|
@ -62,5 +74,4 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
event = VersionToTaskStatus(session)
|
||||
event.register()
|
||||
VersionToTaskStatus(session).register()
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from app.vendor.Qt import QtCore, QtGui, QtWidgets
|
|||
from pype.ftrack import credentials, login_dialog as login_dialog
|
||||
|
||||
from pype.vendor.pynput import mouse, keyboard
|
||||
from FtrackServer import FtrackServer
|
||||
from . import FtrackServer
|
||||
|
||||
from pype import api as pype
|
||||
|
||||
|
|
|
|||
8
pype/ftrack/ftrack_server/__init__.py
Normal file
8
pype/ftrack/ftrack_server/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
from .ftrack_server import FtrackServer
|
||||
from . import event_server, event_server_cli
|
||||
|
||||
__all__ = [
|
||||
'event_server',
|
||||
'event_server_cli',
|
||||
'FtrackServer'
|
||||
]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
from pype.ftrack import credentials, login_dialog as login_dialog
|
||||
from FtrackServer import FtrackServer
|
||||
from pype.ftrack.ftrack_server import FtrackServer
|
||||
from app.vendor.Qt import QtWidgets
|
||||
from pype import api
|
||||
|
||||
|
|
@ -9,10 +9,12 @@ log = api.Logger.getLogger(__name__, "ftrack-event-server")
|
|||
|
||||
class EventServer:
|
||||
def __init__(self):
|
||||
self.login_widget = login_dialog.Login_Dialog_ui(self)
|
||||
self.login_widget = login_dialog.Login_Dialog_ui(
|
||||
parent=self, is_event=True
|
||||
)
|
||||
self.event_server = FtrackServer('event')
|
||||
|
||||
cred = credentials._get_credentials()
|
||||
cred = credentials._get_credentials(True)
|
||||
|
||||
if 'username' in cred and 'apiKey' in cred:
|
||||
self.login_widget.user_input.setText(cred['username'])
|
||||
|
|
@ -24,6 +26,7 @@ class EventServer:
|
|||
|
||||
def loginChange(self):
|
||||
log.info("Logged successfully")
|
||||
|
||||
self.login_widget.close()
|
||||
self.event_server.run_server()
|
||||
|
||||
114
pype/ftrack/ftrack_server/event_server_cli.py
Normal file
114
pype/ftrack/ftrack_server/event_server_cli.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import sys
|
||||
from pype.ftrack import credentials
|
||||
from pype.ftrack.ftrack_server import FtrackServer
|
||||
from app import api
|
||||
|
||||
log = api.Logger.getLogger(__name__, "ftrack-event-server-cli")
|
||||
|
||||
possible_yes = ['y', 'yes']
|
||||
possible_no = ['n', 'no']
|
||||
possible_third = ['a', 'auto']
|
||||
possible_exit = ['exit']
|
||||
|
||||
|
||||
def ask_yes_no(third=False):
|
||||
msg = "Y/N:"
|
||||
if third:
|
||||
msg = "Y/N/AUTO:"
|
||||
log.info(msg)
|
||||
response = input().lower()
|
||||
if response in possible_exit:
|
||||
sys.exit()
|
||||
elif response in possible_yes:
|
||||
return True
|
||||
elif response in possible_no:
|
||||
return False
|
||||
else:
|
||||
all_entries = possible_no
|
||||
all_entries.extend(possible_yes)
|
||||
if third is True:
|
||||
if response in possible_third:
|
||||
return 'auto'
|
||||
else:
|
||||
all_entries.extend(possible_third)
|
||||
all_entries.extend(possible_exit)
|
||||
all_entries = ', '.join(all_entries)
|
||||
log.info(
|
||||
'Invalid input. Possible entries: [{}]. Try it again:'.foramt(
|
||||
all_entries
|
||||
)
|
||||
)
|
||||
return ask_yes_no()
|
||||
|
||||
|
||||
def cli_login():
|
||||
enter_cred = True
|
||||
cred_data = credentials._get_credentials(True)
|
||||
|
||||
user = cred_data.get('username', None)
|
||||
key = cred_data.get('apiKey', None)
|
||||
auto = cred_data.get('auto_connect', False)
|
||||
if user is None or key is None:
|
||||
log.info(
|
||||
'Credentials are not set. Do you want to enter them now? (Y/N)'
|
||||
)
|
||||
if ask_yes_no() is False:
|
||||
log.info("Exiting...")
|
||||
return
|
||||
elif credentials._check_credentials(user, key):
|
||||
if auto is False:
|
||||
log.info((
|
||||
'Do you want to log with username {}'
|
||||
' enter "auto" if want to autoconnect next time (Y/N/AUTO)'
|
||||
).format(
|
||||
user
|
||||
))
|
||||
result = ask_yes_no(True)
|
||||
if result is True:
|
||||
enter_cred = False
|
||||
elif result == 'auto':
|
||||
credentials._save_credentials(user, key, True, True)
|
||||
enter_cred = False
|
||||
else:
|
||||
enter_cred = False
|
||||
else:
|
||||
log.info(
|
||||
'Stored credentials are not valid.'
|
||||
' Do you want enter them now?(Y/N)'
|
||||
)
|
||||
if ask_yes_no() is False:
|
||||
log.info("Exiting...")
|
||||
return
|
||||
|
||||
while enter_cred:
|
||||
log.info('Please enter Ftrack API User:')
|
||||
user = input()
|
||||
log.info('And now enter Ftrack API Key:')
|
||||
key = input()
|
||||
if credentials._check_credentials(user, key):
|
||||
log.info(
|
||||
'Credentials are valid.'
|
||||
' Do you want to auto-connect next time?(Y/N)'
|
||||
)
|
||||
credentials._save_credentials(user, key, True, ask_yes_no())
|
||||
enter_cred = False
|
||||
break
|
||||
else:
|
||||
log.info(
|
||||
'Entered credentials are not valid.'
|
||||
' Do you want to try it again?(Y/N)'
|
||||
)
|
||||
if ask_yes_no() is False:
|
||||
log.info('Exiting...')
|
||||
return
|
||||
|
||||
server = FtrackServer('event')
|
||||
server.run_server()
|
||||
|
||||
|
||||
def main():
|
||||
cli_login()
|
||||
|
||||
|
||||
if (__name__ == ('__main__')):
|
||||
main()
|
||||
160
pype/ftrack/ftrack_server/ftrack_server.py
Normal file
160
pype/ftrack/ftrack_server/ftrack_server.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import os
|
||||
import sys
|
||||
import types
|
||||
import importlib
|
||||
import ftrack_api
|
||||
import time
|
||||
import logging
|
||||
from app.api import Logger
|
||||
|
||||
log = Logger.getLogger(__name__)
|
||||
|
||||
"""
|
||||
# Required - Needed for connection to Ftrack
|
||||
FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com"
|
||||
FTRACK_API_KEY # Ftrack user's API key "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
FTRACK_API_USER # Ftrack username e.g. "user.name"
|
||||
|
||||
# Required - Paths to folder with actions
|
||||
FTRACK_ACTIONS_PATH # Paths to folders where are located actions
|
||||
- EXAMPLE: "M:/FtrackApi/../actions/"
|
||||
FTRACK_EVENTS_PATH # Paths to folders where are located actions
|
||||
- EXAMPLE: "M:/FtrackApi/../events/"
|
||||
|
||||
# Required - Needed for import included modules
|
||||
PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
|
||||
- path to ftrack_action_handler, etc.
|
||||
"""
|
||||
|
||||
|
||||
class FtrackServer():
|
||||
def __init__(self, type='action'):
|
||||
"""
|
||||
- 'type' is by default set to 'action' - Runs Action server
|
||||
- enter 'event' for Event server
|
||||
|
||||
EXAMPLE FOR EVENT SERVER:
|
||||
...
|
||||
server = FtrackServer('event')
|
||||
server.run_server()
|
||||
..
|
||||
"""
|
||||
# set Ftrack logging to Warning only - OPTIONAL
|
||||
ftrack_log = logging.getLogger("ftrack_api")
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
|
||||
self.type = type
|
||||
self.actionsAvailable = True
|
||||
self.eventsAvailable = True
|
||||
# Separate all paths
|
||||
if "FTRACK_ACTIONS_PATH" in os.environ:
|
||||
all_action_paths = os.environ["FTRACK_ACTIONS_PATH"]
|
||||
self.actionsPaths = all_action_paths.split(os.pathsep)
|
||||
else:
|
||||
self.actionsAvailable = False
|
||||
|
||||
if "FTRACK_EVENTS_PATH" in os.environ:
|
||||
all_event_paths = os.environ["FTRACK_EVENTS_PATH"]
|
||||
self.eventsPaths = all_event_paths.split(os.pathsep)
|
||||
else:
|
||||
self.eventsAvailable = False
|
||||
|
||||
def stop_session(self):
|
||||
if self.session.event_hub.connected is True:
|
||||
self.session.event_hub.disconnect()
|
||||
self.session.close()
|
||||
self.session = None
|
||||
|
||||
def set_files(self, paths):
|
||||
# Iterate all paths
|
||||
functions = []
|
||||
for path in paths:
|
||||
# add path to PYTHON PATH
|
||||
if path not in sys.path:
|
||||
sys.path.append(path)
|
||||
|
||||
# Get all modules with functions
|
||||
for file in os.listdir(path):
|
||||
# Get only .py files with action functions
|
||||
try:
|
||||
if '.pyc' in file or '.py' not in file:
|
||||
continue
|
||||
|
||||
ignore = 'ignore_me'
|
||||
mod = importlib.import_module(os.path.splitext(file)[0])
|
||||
importlib.reload(mod)
|
||||
mod_functions = dict(
|
||||
[
|
||||
(name, function)
|
||||
for name, function in mod.__dict__.items()
|
||||
if isinstance(function, types.FunctionType) or
|
||||
name == ignore
|
||||
]
|
||||
)
|
||||
# Don't care about ignore_me files
|
||||
if (
|
||||
ignore in mod_functions and
|
||||
mod_functions[ignore] is True
|
||||
):
|
||||
continue
|
||||
# separate files by register function
|
||||
if 'register' not in mod_functions:
|
||||
msg = (
|
||||
'"{0}" - Missing register method'
|
||||
).format(file, self.type)
|
||||
log.warning(msg)
|
||||
continue
|
||||
|
||||
functions.append({
|
||||
'name': file,
|
||||
'register': mod_functions['register']
|
||||
})
|
||||
except Exception as e:
|
||||
msg = 'Loading of file "{}" failed ({})'.format(
|
||||
file, str(e)
|
||||
)
|
||||
log.warning(msg)
|
||||
|
||||
if len(functions) < 1:
|
||||
raise Exception
|
||||
|
||||
function_counter = 0
|
||||
for function in functions:
|
||||
try:
|
||||
function['register'](self.session)
|
||||
if function_counter%7 == 0:
|
||||
time.sleep(0.1)
|
||||
function_counter += 1
|
||||
except Exception as e:
|
||||
msg = '"{}" - register was not successful ({})'.format(
|
||||
function['name'], str(e)
|
||||
)
|
||||
log.warning(msg)
|
||||
|
||||
def run_server(self):
|
||||
self.session = ftrack_api.Session(auto_connect_event_hub=True,)
|
||||
|
||||
if self.type.lower() == 'event':
|
||||
if self.eventsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_EVENTS_PATH is not set'
|
||||
', event server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.eventsPaths)
|
||||
else:
|
||||
if self.actionsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_ACTIONS_PATH is not set'
|
||||
', action server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.actionsPaths)
|
||||
|
||||
log.info(60*"*")
|
||||
log.info('Registration of actions/events has finished!')
|
||||
|
||||
# keep event_hub on session running
|
||||
self.session.event_hub.wait()
|
||||
|
|
@ -457,12 +457,17 @@ def get_avalon_project(ft_project):
|
|||
def get_project_config(entity):
|
||||
config = {}
|
||||
config['schema'] = pypelib.get_avalon_project_config_schema()
|
||||
config['tasks'] = [{'name': ''}]
|
||||
config['tasks'] = get_tasks(entity)
|
||||
config['apps'] = get_project_apps(entity)
|
||||
config['template'] = pypelib.get_avalon_project_template()
|
||||
|
||||
return config
|
||||
|
||||
def get_tasks(project):
|
||||
return [
|
||||
{'name': task_type['name']} for task_type in project[
|
||||
'project_schema']['_task_type_schema']['types']
|
||||
]
|
||||
|
||||
def get_project_apps(entity):
|
||||
""" Get apps from project
|
||||
|
|
@ -536,3 +541,26 @@ def get_config_data():
|
|||
log.warning("{} - {}".format(msg, str(e)))
|
||||
|
||||
return data
|
||||
|
||||
def show_errors(obj, event, errors):
|
||||
title = 'Hey You! You raised few Errors! (*look below*)'
|
||||
items = []
|
||||
splitter = {'type': 'label', 'value': '---'}
|
||||
for error in errors:
|
||||
for key, message in error.items():
|
||||
error_title = {
|
||||
'type': 'label',
|
||||
'value': '# {}'.format(key)
|
||||
}
|
||||
error_message = {
|
||||
'type': 'label',
|
||||
'value': '<p>{}</p>'.format(message)
|
||||
}
|
||||
if len(items) > 0:
|
||||
items.append(splitter)
|
||||
items.append(error_title)
|
||||
items.append(error_message)
|
||||
obj.log.error(
|
||||
'{}: {}'.format(key, message)
|
||||
)
|
||||
obj.show_interface(event, items, title)
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ class BaseAction(BaseHandler):
|
|||
)
|
||||
|
||||
def _launch(self, event):
|
||||
self.reset_session()
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
|
|
@ -85,14 +84,20 @@ class BaseAction(BaseHandler):
|
|||
def _handle_result(self, session, result, entities, event):
|
||||
'''Validate the returned result from the action callback'''
|
||||
if isinstance(result, bool):
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launched successfully.'.format(
|
||||
self.label
|
||||
if result is True:
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launched successfully.'.format(self.label)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launch failed.'.format(self.label)
|
||||
)
|
||||
}
|
||||
|
||||
elif isinstance(result, dict):
|
||||
if 'items' in result:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2017 ftrack
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
|
|
@ -94,17 +92,13 @@ class AppAction(BaseHandler):
|
|||
|
||||
'''
|
||||
|
||||
entity = entities[0]
|
||||
|
||||
# TODO Should return False if not TASK ?!!!
|
||||
# TODO Should return False if more than one entity is selected ?!!!
|
||||
if (
|
||||
len(entities) > 1 or
|
||||
entity.entity_type.lower() != 'task'
|
||||
len(entities) != 1 or
|
||||
entities[0].entity_type.lower() != 'task'
|
||||
):
|
||||
return False
|
||||
|
||||
ft_project = entity['project']
|
||||
ft_project = entities[0]['project']
|
||||
|
||||
database = pypelib.get_avalon_database()
|
||||
project_name = ft_project['full_name']
|
||||
|
|
@ -115,9 +109,9 @@ class AppAction(BaseHandler):
|
|||
if avalon_project is None:
|
||||
return False
|
||||
else:
|
||||
apps = []
|
||||
for app in avalon_project['config']['apps']:
|
||||
apps.append(app['name'])
|
||||
apps = [app['name'] for app in avalon_project['config'].get(
|
||||
'apps', []
|
||||
)]
|
||||
|
||||
if self.identifier not in apps:
|
||||
return False
|
||||
|
|
@ -243,13 +237,28 @@ class AppAction(BaseHandler):
|
|||
|
||||
'''
|
||||
|
||||
self.log.info((
|
||||
"Action - {0} ({1}) - just started"
|
||||
).format(self.label, self.identifier))
|
||||
|
||||
entity = entities[0]
|
||||
project_name = entity['project']['full_name']
|
||||
|
||||
# Validate Clockify settings if Clockify is required
|
||||
clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
if clockify_timer is not None:
|
||||
from pype.clockify import ClockifyAPI
|
||||
clockapi = ClockifyAPI()
|
||||
if clockapi.verify_api() is False:
|
||||
title = 'Launch message'
|
||||
header = '# You Can\'t launch **any Application**'
|
||||
message = (
|
||||
'<p>You don\'t have set Clockify API'
|
||||
' key in Clockify settings</p>'
|
||||
)
|
||||
items = [
|
||||
{'type': 'label', 'value': header},
|
||||
{'type': 'label', 'value': message}
|
||||
]
|
||||
self.show_interface(event, items, title)
|
||||
return False
|
||||
|
||||
database = pypelib.get_avalon_database()
|
||||
|
||||
# Get current environments
|
||||
|
|
@ -397,6 +406,31 @@ class AppAction(BaseHandler):
|
|||
self.log.info('Starting timer for task: ' + task['name'])
|
||||
user.start_timer(task, force=True)
|
||||
|
||||
# RUN TIMER IN Clockify
|
||||
if clockify_timer is not None:
|
||||
task_type = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
|
||||
project_id = clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(clockapi.get_tag_id(task_type))
|
||||
clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
# Change status of task to In progress
|
||||
config = get_config_data()
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ import time
|
|||
from pype import api as pype
|
||||
|
||||
|
||||
class MissingPermision(Exception):
|
||||
def __init__(self, message=None):
|
||||
if message is None:
|
||||
message = 'Ftrack'
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
'''Custom Action base class
|
||||
|
||||
|
|
@ -25,10 +32,11 @@ class BaseHandler(object):
|
|||
self.log = pype.Logger.getLogger(self.__class__.__name__)
|
||||
|
||||
# Using decorator
|
||||
self.register = self.register_log(self.register)
|
||||
self.register = self.register_decorator(self.register)
|
||||
self.launch = self.launch_log(self.launch)
|
||||
|
||||
# Decorator
|
||||
def register_log(self, func):
|
||||
def register_decorator(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_register(*args, **kwargs):
|
||||
label = self.__class__.__name__
|
||||
|
|
@ -37,8 +45,20 @@ class BaseHandler(object):
|
|||
label = self.label
|
||||
else:
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
|
||||
try:
|
||||
if hasattr(self, "role_list") and len(self.role_list) > 0:
|
||||
username = self.session.api_user
|
||||
user = self.session.query(
|
||||
'User where username is "{}"'.format(username)
|
||||
).one()
|
||||
available = False
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in self.role_list:
|
||||
available = True
|
||||
break
|
||||
if available is False:
|
||||
raise MissingPermision
|
||||
|
||||
start_time = time.perf_counter()
|
||||
func(*args, **kwargs)
|
||||
end_time = time.perf_counter()
|
||||
|
|
@ -46,6 +66,14 @@ class BaseHandler(object):
|
|||
self.log.info((
|
||||
'{} "{}" - Registered successfully ({:.4f}sec)'
|
||||
).format(self.type, label, run_time))
|
||||
except MissingPermision as MPE:
|
||||
self.log.info((
|
||||
'!{} "{}" - You\'re missing required {} permissions'
|
||||
).format(self.type, label, str(MPE)))
|
||||
except AssertionError as ae:
|
||||
self.log.info((
|
||||
'!{} "{}" - {}'
|
||||
).format(self.type, label, str(ae)))
|
||||
except NotImplementedError:
|
||||
self.log.error((
|
||||
'{} "{}" - Register method is not implemented'
|
||||
|
|
@ -58,6 +86,31 @@ class BaseHandler(object):
|
|||
)
|
||||
return wrapper_register
|
||||
|
||||
# Decorator
|
||||
def launch_log(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_launch(*args, **kwargs):
|
||||
label = self.__class__.__name__
|
||||
if hasattr(self, 'label'):
|
||||
if self.variant is None:
|
||||
label = self.label
|
||||
else:
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
|
||||
try:
|
||||
self.log.info(('{} "{}": Launched').format(self.type, label))
|
||||
result = func(*args, **kwargs)
|
||||
self.log.info(('{} "{}": Finished').format(self.type, label))
|
||||
return result
|
||||
except Exception as e:
|
||||
msg = '{} "{}": Failed ({})'.format(self.type, label, str(e))
|
||||
self.log.error(msg)
|
||||
return {
|
||||
'success': False,
|
||||
'message': msg
|
||||
}
|
||||
return wrapper_launch
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
'''Return current session.'''
|
||||
|
|
@ -75,6 +128,16 @@ class BaseHandler(object):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _discover(self, event):
|
||||
items = {
|
||||
'items': [{
|
||||
'label': self.label,
|
||||
'variant': self.variant,
|
||||
'description': self.description,
|
||||
'actionIdentifier': self.identifier,
|
||||
'icon': self.icon,
|
||||
}]
|
||||
}
|
||||
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
|
|
@ -83,18 +146,10 @@ class BaseHandler(object):
|
|||
self.session, *args
|
||||
)
|
||||
|
||||
if accepts:
|
||||
if accepts is True:
|
||||
self.log.debug(u'Discovering action with selection: {0}'.format(
|
||||
args[1]['data'].get('selection', [])))
|
||||
return {
|
||||
'items': [{
|
||||
'label': self.label,
|
||||
'variant': self.variant,
|
||||
'description': self.description,
|
||||
'actionIdentifier': self.identifier,
|
||||
'icon': self.icon,
|
||||
}]
|
||||
}
|
||||
event['data'].get('selection', [])))
|
||||
return items
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
'''Return true if we can handle the selected entities.
|
||||
|
|
@ -118,25 +173,32 @@ class BaseHandler(object):
|
|||
'''Return *event* translated structure to be used with the API.'''
|
||||
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
|
||||
_selection = event['data'].get('selection', [])
|
||||
|
||||
_entities = list()
|
||||
for entity in _selection:
|
||||
_entities.append(
|
||||
(
|
||||
session.get(
|
||||
self._get_entity_type(entity),
|
||||
entity.get('entityId')
|
||||
)
|
||||
)
|
||||
)
|
||||
_entities = event['data'].get('entities_object', None)
|
||||
if (
|
||||
_entities is None or
|
||||
_entities[0].get('link', None) == ftrack_api.symbol.NOT_SET
|
||||
):
|
||||
_entities = self._get_entities(event)
|
||||
|
||||
return [
|
||||
_entities,
|
||||
event
|
||||
]
|
||||
|
||||
def _get_entities(self, event):
|
||||
self.session._local_cache.clear()
|
||||
selection = event['data'].get('selection', [])
|
||||
_entities = []
|
||||
for entity in selection:
|
||||
_entities.append(
|
||||
self.session.get(
|
||||
self._get_entity_type(entity),
|
||||
entity.get('entityId')
|
||||
)
|
||||
)
|
||||
event['data']['entities_object'] = _entities
|
||||
return _entities
|
||||
|
||||
def _get_entity_type(self, entity):
|
||||
'''Return translated entity type tht can be used with API.'''
|
||||
# Get entity type and make sure it is lower cased. Most places except
|
||||
|
|
@ -204,7 +266,10 @@ class BaseHandler(object):
|
|||
def _interface(self, *args):
|
||||
interface = self.interface(*args)
|
||||
if interface:
|
||||
if 'items' in interface:
|
||||
if (
|
||||
'items' in interface or
|
||||
('success' in interface and 'message' in interface)
|
||||
):
|
||||
return interface
|
||||
|
||||
return {
|
||||
|
|
@ -229,23 +294,31 @@ class BaseHandler(object):
|
|||
def _handle_result(self, session, result, entities, event):
|
||||
'''Validate the returned result from the action callback'''
|
||||
if isinstance(result, bool):
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launched successfully.'.format(
|
||||
self.label
|
||||
if result is True:
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launched successfully.'.format(self.label)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
'success': result,
|
||||
'message': (
|
||||
'{0} launch failed.'.format(self.label)
|
||||
)
|
||||
}
|
||||
|
||||
elif isinstance(result, dict):
|
||||
for key in ('success', 'message'):
|
||||
if key in result:
|
||||
continue
|
||||
items = 'items' in result
|
||||
if items is False:
|
||||
for key in ('success', 'message'):
|
||||
if key in result:
|
||||
continue
|
||||
|
||||
raise KeyError(
|
||||
'Missing required key: {0}.'.format(key)
|
||||
)
|
||||
raise KeyError(
|
||||
'Missing required key: {0}.'.format(key)
|
||||
)
|
||||
|
||||
else:
|
||||
self.log.error(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import functools
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
|
||||
|
||||
|
|
@ -18,6 +19,18 @@ class BaseEvent(BaseHandler):
|
|||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
|
||||
# Decorator
|
||||
def launch_log(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_launch(*args, **kwargs):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
self.log.info('{} Failed ({})'.format(
|
||||
self.__class__.__name__, str(e))
|
||||
)
|
||||
return wrapper_launch
|
||||
|
||||
def register(self):
|
||||
'''Registers the event, subscribing the discover and launch topics.'''
|
||||
self.session.event_hub.subscribe(
|
||||
|
|
@ -27,23 +40,31 @@ class BaseEvent(BaseHandler):
|
|||
)
|
||||
|
||||
def _launch(self, event):
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
self.session.rollback()
|
||||
self.session._local_cache.clear()
|
||||
|
||||
self.launch(
|
||||
self.session, *args
|
||||
self.session, event
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def _translate_event(self, session, event):
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
_selection = event['data'].get('entities', [])
|
||||
return [
|
||||
self._get_entities(session, event),
|
||||
event
|
||||
]
|
||||
|
||||
def _get_entities(
|
||||
self, session, event, ignore=['socialfeed', 'socialnotification']
|
||||
):
|
||||
_selection = event['data'].get('entities', [])
|
||||
_entities = list()
|
||||
if isinstance(ignore, str):
|
||||
ignore = list(ignore)
|
||||
for entity in _selection:
|
||||
if entity['entityType'] in ['socialfeed']:
|
||||
if entity['entityType'] in ignore:
|
||||
continue
|
||||
_entities.append(
|
||||
(
|
||||
|
|
@ -53,8 +74,4 @@ class BaseEvent(BaseHandler):
|
|||
)
|
||||
)
|
||||
)
|
||||
|
||||
return [
|
||||
_entities,
|
||||
event
|
||||
]
|
||||
return _entities
|
||||
|
|
|
|||
|
|
@ -16,11 +16,12 @@ class Login_Dialog_ui(QtWidgets.QWidget):
|
|||
buttons = []
|
||||
labels = []
|
||||
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, parent=None, is_event=False):
|
||||
|
||||
super(Login_Dialog_ui, self).__init__()
|
||||
|
||||
self.parent = parent
|
||||
self.is_event = is_event
|
||||
|
||||
if hasattr(parent, 'icon'):
|
||||
self.setWindowIcon(self.parent.icon)
|
||||
|
|
@ -205,7 +206,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
|
|||
verification = credentials._check_credentials(username, apiKey)
|
||||
|
||||
if verification:
|
||||
credentials._save_credentials(username, apiKey)
|
||||
credentials._save_credentials(username, apiKey, self.is_event)
|
||||
credentials._set_env(username, apiKey)
|
||||
if self.parent is not None:
|
||||
self.parent.loginChange()
|
||||
|
|
@ -305,7 +306,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
|
|||
verification = credentials._check_credentials(username, apiKey)
|
||||
|
||||
if verification is True:
|
||||
credentials._save_credentials(username, apiKey)
|
||||
credentials._save_credentials(username, apiKey, self.is_event)
|
||||
credentials._set_env(username, apiKey)
|
||||
if self.parent is not None:
|
||||
self.parent.loginChange()
|
||||
|
|
|
|||
12
pype/lib.py
12
pype/lib.py
|
|
@ -206,6 +206,11 @@ def version_up(filepath):
|
|||
new_label = label.replace(version, new_version, 1)
|
||||
new_basename = _rreplace(basename, label, new_label)
|
||||
|
||||
if not new_basename.endswith(new_label):
|
||||
index = (new_basename.find(new_label))
|
||||
index += len(new_label)
|
||||
new_basename = new_basename[:index]
|
||||
|
||||
new_filename = "{}{}".format(new_basename, ext)
|
||||
new_filename = os.path.join(dirname, new_filename)
|
||||
new_filename = os.path.normpath(new_filename)
|
||||
|
|
@ -214,9 +219,10 @@ def version_up(filepath):
|
|||
raise RuntimeError("Created path is the same as current file,"
|
||||
"this is a bug")
|
||||
|
||||
if os.path.exists(new_filename):
|
||||
log.info("Skipping existing version %s" % new_label)
|
||||
return version_up(new_filename)
|
||||
for file in os.listdir(dirname):
|
||||
if file.endswith(ext) and file.startswith(new_basename):
|
||||
log.info("Skipping existing version %s" % new_label)
|
||||
return version_up(new_filename)
|
||||
|
||||
log.info("New version %s" % new_label)
|
||||
return new_filename
|
||||
|
|
|
|||
|
|
@ -78,6 +78,8 @@ def override_toolbox_ui():
|
|||
import avalon.tools.cbsceneinventory as inventory
|
||||
import avalon.tools.cbloader as loader
|
||||
from avalon.maya.pipeline import launch_workfiles_app
|
||||
import mayalookassigner
|
||||
|
||||
|
||||
# Ensure the maya web icon on toolbox exists
|
||||
web_button = "ToolBox|MainToolboxLayout|mayaWebButton"
|
||||
|
|
@ -98,6 +100,18 @@ def override_toolbox_ui():
|
|||
background_color = (0.267, 0.267, 0.267)
|
||||
controls = []
|
||||
|
||||
control = mc.iconTextButton(
|
||||
"pype_toolbox_lookmanager",
|
||||
annotation="Look Manager",
|
||||
label="Look Manager",
|
||||
image=os.path.join(icons, "lookmanager.png"),
|
||||
command=lambda: mayalookassigner.show(),
|
||||
bgc=background_color,
|
||||
width=icon_size,
|
||||
height=icon_size,
|
||||
parent=parent)
|
||||
controls.append(control)
|
||||
|
||||
control = mc.iconTextButton(
|
||||
"pype_toolbox_workfiles",
|
||||
annotation="Work Files",
|
||||
|
|
|
|||
|
|
@ -19,22 +19,30 @@
|
|||
"title": "# Project Manager",
|
||||
"tooltip": "Add assets to the project"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "from pype.tools.assetcreator import app as assetcreator; assetcreator.show(context='maya')",
|
||||
"sourcetype": "python",
|
||||
"title": "Asset Creator",
|
||||
"tooltip": "Open the Asset Creator"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Modeling",
|
||||
"title": "Modeling",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$PYPE_SCRIPTS\\modeling\\polyDeleteOtherUVSets.py",
|
||||
"sourcetype": "file",
|
||||
"command": "import easyTreezSource; reload(easyTreezSource); easyTreezSource.easyTreez()",
|
||||
"sourcetype": "python",
|
||||
"tags": ["modeling",
|
||||
"polygon",
|
||||
"uvset",
|
||||
"delete"],
|
||||
"title": "# Polygon Delete Other UV Sets",
|
||||
"trees",
|
||||
"generate",
|
||||
"create",
|
||||
"plants"],
|
||||
"title": "EasyTreez",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
|
||||
import sys
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
|
@ -26,15 +26,26 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
str: String query to use with "session.query"
|
||||
"""
|
||||
queries = []
|
||||
for key, value in data.iteritems():
|
||||
if not isinstance(value, (basestring, int)):
|
||||
self.log.info(value)
|
||||
if "id" in value.keys():
|
||||
queries.append(
|
||||
"{0}.id is \"{1}\"".format(key, value["id"])
|
||||
)
|
||||
else:
|
||||
queries.append("{0} is \"{1}\"".format(key, value))
|
||||
if sys.version_info[0] < 3:
|
||||
for key, value in data.iteritems():
|
||||
if not isinstance(value, (basestring, int)):
|
||||
self.log.info(value)
|
||||
if "id" in value.keys():
|
||||
queries.append(
|
||||
"{0}.id is \"{1}\"".format(key, value["id"])
|
||||
)
|
||||
else:
|
||||
queries.append("{0} is \"{1}\"".format(key, value))
|
||||
else:
|
||||
for key, value in data.items():
|
||||
if not isinstance(value, (str, int)):
|
||||
self.log.info(value)
|
||||
if "id" in value.keys():
|
||||
queries.append(
|
||||
"{0}.id is \"{1}\"".format(key, value["id"])
|
||||
)
|
||||
else:
|
||||
queries.append("{0} is \"{1}\"".format(key, value))
|
||||
|
||||
query = (
|
||||
"select id from " + entitytype + " where " + " and ".join(queries)
|
||||
|
|
|
|||
|
|
@ -57,14 +57,20 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
self.log.debug('dest ext: ' + ext)
|
||||
thumbnail = False
|
||||
|
||||
|
||||
|
||||
if ext in ['.mov']:
|
||||
if not instance.data.get('startFrameReview'):
|
||||
instance.data['startFrameReview'] = instance.data['startFrame']
|
||||
if not instance.data.get('endFrameReview'):
|
||||
instance.data['endFrameReview'] = instance.data['endFrame']
|
||||
location = ft_session.query(
|
||||
'Location where name is "ftrack.server"').one()
|
||||
component_data = {
|
||||
"name": "ftrackreview-mp4", # Default component name is "main".
|
||||
"metadata": {'ftr_meta': json.dumps({
|
||||
'frameIn': int(instance.data["startFrame"]),
|
||||
'frameOut': int(instance.data["startFrame"]),
|
||||
'frameIn': int(instance.data['startFrameReview']),
|
||||
'frameOut': int(instance.data['startFrameReview']),
|
||||
'frameRate': 25})}
|
||||
}
|
||||
elif ext in [".jpg", ".jpeg"]:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -33,9 +33,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
from avalon.fusion.lib import get_frame_path
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
saver_instances = []
|
||||
|
|
@ -139,7 +139,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
114
pype/plugins/global/load/open_djv.py
Normal file
114
pype/plugins/global/load/open_djv.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import os
|
||||
import subprocess
|
||||
import json
|
||||
from pype import lib as pypelib
|
||||
from avalon import api
|
||||
|
||||
|
||||
def get_config_data():
|
||||
path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
data = dict()
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
return data
|
||||
|
||||
|
||||
def get_families():
|
||||
families = []
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
families.append("*")
|
||||
break
|
||||
return families
|
||||
|
||||
|
||||
def get_representation():
|
||||
return get_config_data().get('file_ext', [])
|
||||
|
||||
|
||||
class OpenInDJV(api.Loader):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
config_data = get_config_data()
|
||||
families = get_families()
|
||||
representations = get_representation()
|
||||
|
||||
label = "Open in DJV"
|
||||
order = -10
|
||||
icon = "play-circle"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
self.djv_path = None
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
self.djv_path = path
|
||||
break
|
||||
directory = os.path.dirname(self.fname)
|
||||
from avalon.vendor import clique
|
||||
|
||||
pattern = clique.PATTERNS["frames"]
|
||||
files = os.listdir(directory)
|
||||
collections, remainder = clique.assemble(
|
||||
files,
|
||||
patterns=[pattern],
|
||||
minimum_items=1
|
||||
)
|
||||
|
||||
if not remainder:
|
||||
seqeunce = collections[0]
|
||||
first_image = list(seqeunce)[0]
|
||||
# start = min(collections)
|
||||
# end = max(collections)
|
||||
#
|
||||
# range = (padding % start) + '-' + (padding % end)
|
||||
# filename = re.sub('%[0-9]*d', range, filename)
|
||||
else:
|
||||
first_image = self.fname
|
||||
filepath = os.path.normpath(os.path.join(directory, first_image))
|
||||
|
||||
self.log.info("Opening : {}".format(filepath))
|
||||
|
||||
fps = context.get('project', {}).get('data', {}).get('fps', 24)
|
||||
|
||||
cmd = []
|
||||
# DJV path
|
||||
cmd.append(os.path.normpath(self.djv_path))
|
||||
# DJV Options Start ##############################################
|
||||
'''layer name'''
|
||||
# cmd.append('-file_layer (value)')
|
||||
''' Proxy scale: 1/2, 1/4, 1/8'''
|
||||
# cmd.append('-file_proxy 1/2')
|
||||
''' Cache: True, False.'''
|
||||
cmd.append('-file_cache True')
|
||||
''' Start in full screen '''
|
||||
# cmd.append('-window_fullscreen')
|
||||
''' Toolbar controls: False, True.'''
|
||||
# cmd.append("-window_toolbar False")
|
||||
''' Window controls: False, True.'''
|
||||
# cmd.append("-window_playbar False")
|
||||
''' Grid overlay: None, 1x1, 10x10, 100x100.'''
|
||||
# cmd.append("-view_grid None")
|
||||
''' Heads up display: True, False.'''
|
||||
# cmd.append("-view_hud True")
|
||||
''' Playback: Stop, Forward, Reverse.'''
|
||||
cmd.append("-playback Forward")
|
||||
''' Frame.'''
|
||||
# cmd.append("-playback_frame (value)")
|
||||
cmd.append("-playback_speed " + str(fps))
|
||||
''' Timer: Sleep, Timeout. Value: Sleep.'''
|
||||
# cmd.append("-playback_timer (value)")
|
||||
''' Timer resolution (seconds): 0.001.'''
|
||||
# cmd.append("-playback_timer_resolution (value)")
|
||||
''' Time units: Timecode, Frames.'''
|
||||
cmd.append("-time_units Frames")
|
||||
# DJV Options End ################################################
|
||||
|
||||
# PATH TO COMPONENT
|
||||
cmd.append(os.path.normpath(filepath))
|
||||
|
||||
# Run DJV with these commands
|
||||
subprocess.Popen(' '.join(cmd))
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
from avalon import io, api
|
||||
|
||||
|
|
@ -8,61 +8,11 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
"""Generate the assumed destination path where the file will be stored"""
|
||||
|
||||
label = "Collect Assumed Destination"
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
order = pyblish.api.CollectorOrder + 0.498
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
template_data = instance.data["assumedTemplateData"]
|
||||
# template = instance.data["template"]
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
# template = anatomy.publish.path
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
mock_template = anatomy_filled.publish.path
|
||||
|
||||
# For now assume resources end up in a "resources" folder in the
|
||||
# published folder
|
||||
mock_destination = os.path.join(os.path.dirname(mock_template),
|
||||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"])
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
|
||||
# Force forward slashes to fix issue with software unable
|
||||
# to work correctly with backslashes in specific scenarios
|
||||
# (e.g. escape characters in PLN-151 V-Ray UDIM)
|
||||
destination = destination.replace("\\", "/")
|
||||
|
||||
resource['destination'] = destination
|
||||
|
||||
# Collect transfers for the individual files of the resource
|
||||
# e.g. all individual files of a cache or UDIM textures.
|
||||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname)
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def create_destination_template(self, instance):
|
||||
"""Create a filepath based on the current data available
|
||||
"""Create a destination filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
|
|
@ -73,6 +23,9 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
|
|
@ -84,7 +37,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
projection={"config": True, "data": True})
|
||||
|
||||
template = project["config"]["template"]["publish"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
|
|
@ -126,5 +79,10 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
instance.data["template"] = template
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
|
||||
# We take the parent folder of representation 'filepath'
|
||||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
(anatomy.format(template_data)).publish.path
|
||||
)
|
||||
|
|
|
|||
|
|
@ -147,11 +147,13 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
"found sequence")
|
||||
raise RuntimeError("Invalid sequence")
|
||||
|
||||
fps = data.get("fps", 25)
|
||||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
assert isinstance(families, (list, tuple)), "Must be iterable"
|
||||
assert families, "Must have at least a single family"
|
||||
|
||||
families.append("ftrack")
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
self.log.info("Collection: %s" % list(collection))
|
||||
|
|
@ -180,6 +182,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
"files": [list(collection)],
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
})
|
||||
instance.append(collection)
|
||||
|
|
|
|||
24
pype/plugins/global/publish/collect_output_repre_config.py
Normal file
24
pype/plugins/global/publish/collect_output_repre_config.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
from pype import lib as pypelib
|
||||
|
||||
|
||||
class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Config for representation"
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, context):
|
||||
config_items = [
|
||||
pypelib.get_presets_path(),
|
||||
"ftrack",
|
||||
"output_representation.json"
|
||||
]
|
||||
config_file = os.path.sep.join(config_items)
|
||||
with open(config_file) as data_file:
|
||||
config_data = json.load(data_file)
|
||||
|
||||
context.data['output_repre_config'] = config_data
|
||||
67
pype/plugins/global/publish/extract_jpeg.py
Normal file
67
pype/plugins/global/publish/extract_jpeg.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
|
||||
|
||||
class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Extract Jpeg EXR"
|
||||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
start = instance.data.get("startFrame")
|
||||
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
||||
input_file = (
|
||||
collections[0].format('{head}{padding}{tail}') % start
|
||||
)
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpegFile = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpegFile)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append("ffmpeg")
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
sub_proc = subprocess.Popen(subprocess_jpeg)
|
||||
sub_proc.wait()
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
instance.data["files"].append(jpegFile)
|
||||
75
pype/plugins/global/publish/extract_quicktime.py
Normal file
75
pype/plugins/global/publish/extract_quicktime.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Extract Quicktime EXR"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, instance):
|
||||
fps = instance.data.get("fps")
|
||||
start = instance.data.get("startFrame")
|
||||
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
||||
full_input_path = os.path.join(
|
||||
stagingdir, collections[0].format('{head}{padding}{tail}')
|
||||
)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
movFile = filename + "mov"
|
||||
full_output_path = os.path.join(stagingdir, movFile)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
input_args = []
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
# preset's input data
|
||||
input_args.extend(profile.get('input', []))
|
||||
# necessary input data
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
input_args.append("-framerate {}".format(fps))
|
||||
input_args.append("-start_number {}".format(start))
|
||||
|
||||
output_args = []
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
mov_args = [
|
||||
"ffmpeg",
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
subprocess_mov = " ".join(mov_args)
|
||||
sub_proc = subprocess.Popen(subprocess_mov)
|
||||
sub_proc.wait()
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
instance.data["files"].append(movFile)
|
||||
|
|
@ -5,6 +5,7 @@ import shutil
|
|||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon.vendor import filelink
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -31,12 +32,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"pointcache",
|
||||
"vdbcache",
|
||||
"setdress",
|
||||
"assembly",
|
||||
"layout",
|
||||
"rig",
|
||||
"vrayproxy",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
"nukescript",
|
||||
"review",
|
||||
"workfile",
|
||||
"scene",
|
||||
"ass"]
|
||||
exclude_families = ["clip"]
|
||||
|
|
@ -88,6 +92,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("Establishing staging directory @ %s" % stagingdir)
|
||||
|
||||
# Ensure at least one file is set up for transfer in staging dir.
|
||||
files = instance.data.get("files", [])
|
||||
assert files, "Instance has no files to transfer"
|
||||
assert isinstance(files, (list, tuple)), (
|
||||
"Instance 'files' must be a list, got: {0}".format(files)
|
||||
)
|
||||
|
||||
project = io.find_one({"type": "project"})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
|
|
@ -167,6 +178,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# Each should be a single representation (as such, a single extension)
|
||||
representations = []
|
||||
destination_list = []
|
||||
if 'transfers' not in instance.data:
|
||||
instance.data['transfers'] = []
|
||||
|
||||
for files in instance.data["files"]:
|
||||
|
||||
|
|
@ -268,12 +281,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
instance: the instance to integrate
|
||||
"""
|
||||
|
||||
transfers = instance.data["transfers"]
|
||||
transfers = instance.data.get("transfers", list())
|
||||
|
||||
for src, dest in transfers:
|
||||
self.log.info("Copying file .. {} -> {}".format(src, dest))
|
||||
self.copy_file(src, dest)
|
||||
|
||||
# Produce hardlinked copies
|
||||
# Note: hardlink can only be produced between two files on the same
|
||||
# server/disk and editing one of the two will edit both files at once.
|
||||
# As such it is recommended to only make hardlinks between static files
|
||||
# to ensure publishes remain safe and non-edited.
|
||||
hardlinks = instance.data.get("hardlinks", list())
|
||||
for src, dest in hardlinks:
|
||||
self.log.info("Hardlinking file .. {} -> {}".format(src, dest))
|
||||
self.hardlink_file(src, dest)
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
|
|
@ -296,6 +319,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
shutil.copy(src, dst)
|
||||
|
||||
def hardlink_file(self, src, dst):
|
||||
|
||||
dirname = os.path.dirname(dst)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
self.log.critical("An unexpected error occurred.")
|
||||
raise
|
||||
|
||||
filelink.create(src, dst, filelink.HARDLINK)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
|
|
@ -359,7 +396,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
families.append(instance_family)
|
||||
families += current_families
|
||||
|
||||
self.log.debug("Registered roor: {}".format(api.registered_root()))
|
||||
self.log.debug("Registered root: {}".format(api.registered_root()))
|
||||
# create relative source path for DB
|
||||
try:
|
||||
source = instance.data['source']
|
||||
|
|
@ -379,7 +416,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"fps": context.data.get("fps")}
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = ["startFrame", "endFrame", "step", "handles"]
|
||||
optionals = [
|
||||
"startFrame", "endFrame", "step", "handles", "sourceHashes"
|
||||
]
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
version_data[key] = instance.data[key]
|
||||
|
|
|
|||
|
|
@ -168,6 +168,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
representations = []
|
||||
destination_list = []
|
||||
|
||||
if 'transfers' not in instance.data:
|
||||
instance.data['transfers'] = []
|
||||
|
||||
for files in instance.data["files"]:
|
||||
# Collection
|
||||
# _______
|
||||
|
|
@ -240,7 +243,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
template_data["frame"] = "#####"
|
||||
template_data["frame"] = "#" * anatomy.render.padding
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
path_to_save = anatomy_filled.render.path
|
||||
template = anatomy.render.fullpath
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
plug-in.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
Options in instance.data:
|
||||
- deadlineSubmission (dict, Required): The returned .json
|
||||
|
|
@ -126,16 +126,16 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
hosts = ["fusion", "maya", "nuke"]
|
||||
|
||||
families = [
|
||||
"render.deadline",
|
||||
"render.farm",
|
||||
"renderlayer",
|
||||
"imagesequence"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
# try:
|
||||
# deadline_url = os.environ["DEADLINE_REST_URL"]
|
||||
|
|
@ -192,6 +192,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
"regex": regex,
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"fps": context.data.get("fps", None),
|
||||
"families": ["render"],
|
||||
"source": source,
|
||||
"user": context.data["user"],
|
||||
|
|
@ -326,7 +327,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
36
pype/plugins/launcher/actions/AssetCreator.py
Normal file
36
pype/plugins/launcher/actions/AssetCreator.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
import os
|
||||
import sys
|
||||
import acre
|
||||
|
||||
from avalon import api, lib
|
||||
from pype.tools import assetcreator
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger.getLogger(__name__, "asset_creator")
|
||||
|
||||
|
||||
class AssetCreator(api.Action):
|
||||
|
||||
name = "asset_creator"
|
||||
label = "Asset Creator"
|
||||
icon = "plus-square"
|
||||
order = 250
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_PROJECT" in session:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
asset = ''
|
||||
if 'AVALON_ASSET' in session:
|
||||
asset = session['AVALON_ASSET']
|
||||
return lib.launch(
|
||||
executable="python",
|
||||
args=[
|
||||
"-u", "-m", "pype.tools.assetcreator",
|
||||
session['AVALON_PROJECT'], asset
|
||||
]
|
||||
)
|
||||
42
pype/plugins/launcher/actions/ClockifyStart.py
Normal file
42
pype/plugins/launcher/actions/ClockifyStart.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from avalon import api, io
|
||||
from pype.clockify import ClockifyAPI
|
||||
from pype.api import Logger
|
||||
log = Logger.getLogger(__name__, "clockify_start")
|
||||
|
||||
|
||||
class ClockifyStart(api.Action):
|
||||
|
||||
name = "clockify_start_timer"
|
||||
label = "Clockify - Start Timer"
|
||||
icon = "clockify_icon"
|
||||
order = 500
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_TASK" in session:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
project_name = session['AVALON_PROJECT']
|
||||
asset_name = session['AVALON_ASSET']
|
||||
task_name = session['AVALON_TASK']
|
||||
|
||||
description = asset_name
|
||||
asset = io.find_one({
|
||||
'type': 'asset',
|
||||
'name': asset_name
|
||||
})
|
||||
if asset is not None:
|
||||
desc_items = asset.get('data', {}).get('parents', [])
|
||||
desc_items.append(asset_name)
|
||||
desc_items.append(task_name)
|
||||
description = '/'.join(desc_items)
|
||||
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(self.clockapi.get_tag_id(task_name))
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
57
pype/plugins/launcher/actions/ClockifySync.py
Normal file
57
pype/plugins/launcher/actions/ClockifySync.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from avalon import api, io
|
||||
from pype.clockify import ClockifyAPI
|
||||
from pype.api import Logger
|
||||
log = Logger.getLogger(__name__, "clockify_sync")
|
||||
|
||||
|
||||
class ClockifySync(api.Action):
|
||||
|
||||
name = "sync_to_clockify"
|
||||
label = "Sync to Clockify"
|
||||
icon = "clockify_white_icon"
|
||||
order = 500
|
||||
clockapi = ClockifyAPI()
|
||||
have_permissions = clockapi.validate_workspace_perm()
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
return self.have_permissions
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
project_name = session.get('AVALON_PROJECT', None)
|
||||
|
||||
projects_to_sync = []
|
||||
if project_name.strip() == '' or project_name is None:
|
||||
for project in io.projects():
|
||||
projects_to_sync.append(project)
|
||||
else:
|
||||
project = io.find_one({'type': 'project'})
|
||||
projects_to_sync.append(project)
|
||||
|
||||
projects_info = {}
|
||||
for project in projects_to_sync:
|
||||
task_types = [task['name'] for task in project['config']['tasks']]
|
||||
projects_info[project['name']] = task_types
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
for project_name, task_types in projects_info.items():
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockapi.add_project(project_name)
|
||||
if 'id' not in response:
|
||||
self.log.error('Project {} can\'t be created'.format(
|
||||
project_name
|
||||
))
|
||||
continue
|
||||
project_id = response['id']
|
||||
else:
|
||||
project_id = clockify_projects[project_name]
|
||||
|
||||
clockify_workspace_tags = self.clockapi.get_tags()
|
||||
for task_type in task_types:
|
||||
if task_type not in clockify_workspace_tags:
|
||||
response = self.clockapi.add_tag(task_type)
|
||||
if 'id' not in response:
|
||||
self.log.error('Task {} can\'t be created'.format(
|
||||
task_type
|
||||
))
|
||||
continue
|
||||
|
|
@ -9,6 +9,7 @@ class CreateAnimation(avalon.maya.Creator):
|
|||
label = "Animation"
|
||||
family = "animation"
|
||||
icon = "male"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAnimation, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ class CreateAss(avalon.maya.Creator):
|
|||
label = "Ass StandIn"
|
||||
family = "ass"
|
||||
icon = "cube"
|
||||
defaults = ['Main']
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
|
|
|
|||
11
pype/plugins/maya/create/create_assembly.py
Normal file
11
pype/plugins/maya/create/create_assembly.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import avalon.maya
|
||||
|
||||
|
||||
class CreateAssembly(avalon.maya.Creator):
|
||||
"""A grouped package of loaded content"""
|
||||
|
||||
name = "assembly"
|
||||
label = "Assembly"
|
||||
family = "assembly"
|
||||
icon = "boxes"
|
||||
defaults = ['Main']
|
||||
|
|
@ -9,6 +9,7 @@ class CreateCamera(avalon.maya.Creator):
|
|||
label = "Camera"
|
||||
family = "camera"
|
||||
icon = "video-camera"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateCamera, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
11
pype/plugins/maya/create/create_layout.py
Normal file
11
pype/plugins/maya/create/create_layout.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import avalon.maya
|
||||
|
||||
|
||||
class CreateLayout(avalon.maya.Creator):
|
||||
"""A grouped package of loaded content"""
|
||||
|
||||
name = "layoutMain"
|
||||
label = "Layout"
|
||||
family = "layout"
|
||||
icon = "boxes"
|
||||
defaults = ["Main"]
|
||||
|
|
@ -9,8 +9,12 @@ class CreateLook(avalon.maya.Creator):
|
|||
label = "Look"
|
||||
family = "look"
|
||||
icon = "paint-brush"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateLook, self).__init__(*args, **kwargs)
|
||||
|
||||
self.data["renderlayer"] = lib.get_current_renderlayer()
|
||||
|
||||
# Whether to automatically convert the textures to .tx upon publish.
|
||||
self.data["maketx"] = True
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ class CreateMayaAscii(avalon.maya.Creator):
|
|||
label = "Maya Ascii"
|
||||
family = "mayaAscii"
|
||||
icon = "file-archive-o"
|
||||
defaults = ['Main']
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import avalon.maya
|
|||
class CreateModel(avalon.maya.Creator):
|
||||
"""Polygonal static geometry"""
|
||||
|
||||
name = "modelDefault"
|
||||
name = "modelMain"
|
||||
label = "Model"
|
||||
family = "model"
|
||||
icon = "cube"
|
||||
defaults = ["Main", "Proxy"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateModel, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ class CreatePointCache(avalon.maya.Creator):
|
|||
label = "Point Cache"
|
||||
family = "pointcache"
|
||||
icon = "gears"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreatePointCache, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -4,14 +4,15 @@ import pype.maya.lib as lib
|
|||
|
||||
from avalon.vendor import requests
|
||||
import avalon.maya
|
||||
# from avalon import api
|
||||
import os
|
||||
|
||||
|
||||
class CreateRenderGlobals(avalon.maya.Creator):
|
||||
|
||||
label = "Render Globals"
|
||||
family = "renderglobals"
|
||||
icon = "gears"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateRenderGlobals, self).__init__(*args, **kwargs)
|
||||
|
|
@ -19,19 +20,23 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
# We won't be publishing this one
|
||||
self.data["id"] = "avalon.renderglobals"
|
||||
|
||||
# Get available Deadline pools
|
||||
try:
|
||||
AVALON_DEADLINE = os.environ["AVALON_DEADLINE"]
|
||||
except KeyError:
|
||||
self.log.error("Deadline REST API url not found.")
|
||||
# get pools
|
||||
pools = []
|
||||
|
||||
argument = "{}/api/pools?NamesOnly=true".format(AVALON_DEADLINE)
|
||||
response = requests.get(argument)
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
pools = []
|
||||
deadline_url = os.environ.get('DEADLINE_REST_URL', None)
|
||||
if deadline_url is None:
|
||||
self.log.warning("Deadline REST API url not found.")
|
||||
else:
|
||||
pools = response.json()
|
||||
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
|
||||
response = requests.get(argument)
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
else:
|
||||
pools = response.json()
|
||||
self.data["primaryPool"] = pools
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pools
|
||||
|
||||
# We don't need subset or asset attributes
|
||||
# self.data.pop("subset", None)
|
||||
|
|
@ -47,9 +52,6 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
self.data["whitelist"] = False
|
||||
self.data["machineList"] = ""
|
||||
self.data["useMayaBatch"] = True
|
||||
self.data["primaryPool"] = pools
|
||||
# We add a string "-" to allow the user to not set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pools
|
||||
|
||||
self.options = {"useSelection": False} # Force no content
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ class CreateReview(avalon.maya.Creator):
|
|||
label = "Review"
|
||||
family = "review"
|
||||
icon = "video-camera"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ class CreateRig(avalon.maya.Creator):
|
|||
label = "Rig"
|
||||
family = "rig"
|
||||
icon = "wheelchair"
|
||||
defaults = ['Main']
|
||||
|
||||
def process(self):
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ import avalon.maya
|
|||
class CreateSetDress(avalon.maya.Creator):
|
||||
"""A grouped package of loaded content"""
|
||||
|
||||
name = "setdress"
|
||||
name = "setdressMain"
|
||||
label = "Set Dress"
|
||||
family = "setdress"
|
||||
icon = "cubes"
|
||||
icon = "boxes"
|
||||
defaults = ["Main", "Anim"]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class AbcLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -16,6 +18,12 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
import maya.cmds as cmds
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "animation"
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
|
|
@ -25,6 +33,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
|
|||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from avalon import api
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
import pymel.core as pm
|
||||
import json
|
||||
|
||||
|
||||
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -21,6 +22,11 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "ass"
|
||||
|
||||
with maya.maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
|
@ -34,7 +40,8 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True)
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
|
|
@ -43,6 +50,19 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
|
@ -132,7 +152,6 @@ class AssStandinLoader(api.Loader):
|
|||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset + "_",
|
||||
|
|
@ -146,6 +165,20 @@ class AssStandinLoader(api.Loader):
|
|||
label = "{}:{}".format(namespace, name)
|
||||
root = pm.group(name=label, empty=True)
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get('ass')
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create transform with shape
|
||||
transform_name = label + "_ASS"
|
||||
# transform = pm.createNode("transform", name=transform_name,
|
||||
|
|
@ -160,10 +193,6 @@ class AssStandinLoader(api.Loader):
|
|||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
|
||||
|
||||
# Lock parenting of the transform and standin
|
||||
cmds.lockNode([root, standin], lock=True)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class SetDressLoader(api.Loader):
|
||||
class AssemblyLoader(api.Loader):
|
||||
|
||||
families = ["setdress"]
|
||||
families = ["assembly"]
|
||||
representations = ["json"]
|
||||
|
||||
label = "Load Set Dress"
|
||||
|
|
@ -77,4 +77,4 @@ class SetDressLoader(api.Loader):
|
|||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# TODO: Ensure namespace is gone
|
||||
# TODO: Ensure namespace is gone
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class CameraLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -16,7 +18,13 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
|
|||
import maya.cmds as cmds
|
||||
# Get family type from the context
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "camera"
|
||||
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
|
|
@ -27,6 +35,20 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
cameras = cmds.ls(nodes, type="camera")
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Check the Maya version, lockTransform has been introduced since
|
||||
# Maya 2016.5 Ext 2
|
||||
version = int(cmds.about(version=True))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class FBXLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -17,6 +19,11 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
|
|||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "fbx"
|
||||
|
||||
# Ensure FBX plug-in is loaded
|
||||
cmds.loadPlugin("fbxmaya", quiet=True)
|
||||
|
||||
|
|
@ -28,6 +35,21 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
|
|||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import pype.maya.plugin
|
||||
from avalon import api, io
|
||||
import json
|
||||
import pype.maya.lib
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class LookLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -14,7 +18,7 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
"""
|
||||
Load and try to ssign Lookdev to nodes based on relationship data
|
||||
Load and try to assign Lookdev to nodes based on relationship data
|
||||
Args:
|
||||
name:
|
||||
namespace:
|
||||
|
|
@ -38,3 +42,99 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import os
|
||||
from maya import cmds
|
||||
|
||||
node = container["objectName"]
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = self._get_reference_node(members)
|
||||
|
||||
file_type = {
|
||||
"ma": "mayaAscii",
|
||||
"mb": "mayaBinary",
|
||||
"abc": "Alembic"
|
||||
}.get(representation["name"])
|
||||
|
||||
assert file_type, "Unsupported representation: %s" % representation
|
||||
|
||||
assert os.path.exists(path), "%s does not exist." % path
|
||||
|
||||
try:
|
||||
content = cmds.file(path,
|
||||
loadReference=reference_node,
|
||||
type=file_type,
|
||||
returnNewNodes=True)
|
||||
except RuntimeError as exc:
|
||||
# When changing a reference to a file that has load errors the
|
||||
# command will raise an error even if the file is still loaded
|
||||
# correctly (e.g. when raising errors on Arnold attributes)
|
||||
# When the file is loaded and has content, we consider it's fine.
|
||||
if not cmds.referenceQuery(reference_node, isLoaded=True):
|
||||
raise
|
||||
|
||||
content = cmds.referenceQuery(reference_node,
|
||||
nodes=True,
|
||||
dagPath=True)
|
||||
if not content:
|
||||
raise
|
||||
|
||||
self.log.warning("Ignoring file read error:\n%s", exc)
|
||||
|
||||
# Fix PLN-40 for older containers created with Avalon that had the
|
||||
# `.verticesOnlySet` set to True.
|
||||
if cmds.getAttr("{}.verticesOnlySet".format(node)):
|
||||
self.log.info("Setting %s.verticesOnlySet to False", node)
|
||||
cmds.setAttr("{}.verticesOnlySet".format(node), False)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# Get container members
|
||||
shader_nodes = cmds.ls(members, type='shadingEngine')
|
||||
|
||||
nodes_list = []
|
||||
for shader in shader_nodes:
|
||||
connections = cmds.listConnections(cmds.listHistory(shader, f=1),
|
||||
type='mesh')
|
||||
if connections:
|
||||
for connection in connections:
|
||||
nodes_list.extend(cmds.listRelatives(connection,
|
||||
shapes=True))
|
||||
nodes = set(nodes_list)
|
||||
|
||||
json_representation = io.find_one({"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
|
||||
# region compute lookup
|
||||
nodes_by_id = defaultdict(list)
|
||||
for n in nodes:
|
||||
nodes_by_id[pype.maya.lib.get_id(n)].append(n)
|
||||
|
||||
pype.maya.lib.apply_attributes(attributes, nodes_by_id)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,14 @@
|
|||
import pype.maya.plugin
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
||||
families = ["mayaAscii"]
|
||||
families = ["mayaAscii",
|
||||
"setdress",
|
||||
"layout"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference Maya Ascii"
|
||||
|
|
@ -17,6 +21,11 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
|
|||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
|
|
@ -26,6 +35,20 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
|
|||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
self[:] = nodes
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
return nodes
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from avalon import api
|
||||
import pype.maya.plugin
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -7,6 +9,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
families = ["model"]
|
||||
representations = ["ma"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference Model"
|
||||
order = -10
|
||||
|
|
@ -18,13 +21,36 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
with maya.maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
|
@ -63,6 +89,19 @@ class GpuCacheLoader(api.Loader):
|
|||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get('model')
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create transform with shape
|
||||
transform_name = label + "_GPU"
|
||||
|
|
@ -124,11 +163,13 @@ class GpuCacheLoader(api.Loader):
|
|||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the studio.animation family"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["abc"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference Model"
|
||||
order = -10
|
||||
|
|
@ -139,15 +180,36 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
import maya.cmds as cmds
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name),
|
||||
groupName=groupName,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get('model')
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ from maya import cmds
|
|||
|
||||
import pype.maya.plugin
|
||||
from avalon import api, maya
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class RigLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -21,12 +23,35 @@ class RigLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "rig"
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Store for post-process
|
||||
self[:] = nodes
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
from avalon import api
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class LoadVDBtoRedShift(api.Loader):
|
||||
|
|
@ -17,6 +19,11 @@ class LoadVDBtoRedShift(api.Loader):
|
|||
import avalon.maya.lib as lib
|
||||
from avalon.maya.pipeline import containerise
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "vdbcache"
|
||||
|
||||
# Check if the plugin for redshift is available on the pc
|
||||
try:
|
||||
cmds.loadPlugin("redshift4maya", quiet=True)
|
||||
|
|
@ -48,6 +55,19 @@ class LoadVDBtoRedShift(api.Loader):
|
|||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create VR
|
||||
volume_node = cmds.createNode("RedshiftVolumeShape",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
from avalon import api
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class LoadVDBtoVRay(api.Loader):
|
||||
|
|
@ -16,6 +18,11 @@ class LoadVDBtoVRay(api.Loader):
|
|||
import avalon.maya.lib as lib
|
||||
from avalon.maya.pipeline import containerise
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "vdbcache"
|
||||
|
||||
# Check if viewport drawing engine is Open GL Core (compat)
|
||||
render_engine = None
|
||||
compatible = "OpenGLCoreProfileCompat"
|
||||
|
|
@ -40,6 +47,19 @@ class LoadVDBtoVRay(api.Loader):
|
|||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create VR
|
||||
grid_node = cmds.createNode("VRayVolumeGrid",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from avalon.maya import lib
|
||||
from avalon import api
|
||||
|
||||
import json
|
||||
import os
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
||||
|
|
@ -20,6 +21,19 @@ class VRayProxyLoader(api.Loader):
|
|||
from avalon.maya.pipeline import containerise
|
||||
from pype.maya.lib import namespaced
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "vrayproxy"
|
||||
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
asset_name = context['asset']["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
|
|
@ -40,6 +54,12 @@ class VRayProxyLoader(api.Loader):
|
|||
if not nodes:
|
||||
return
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr("{0}_{1}.useOutlinerColor".format(name, "GRP"), 1)
|
||||
cmds.setAttr("{0}_{1}.outlinerColor".format(name, "GRP"),
|
||||
c[0], c[1], c[2])
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
|
|
@ -101,7 +121,7 @@ class VRayProxyLoader(api.Loader):
|
|||
# Create nodes
|
||||
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
|
||||
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
|
||||
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
|
||||
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
|
||||
name="{}_VRMM".format(name))
|
||||
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
|
||||
empty=True,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,11 @@ class YetiCacheLoader(api.Loader):
|
|||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "yeticache"
|
||||
|
||||
# Build namespace
|
||||
asset = context["asset"]
|
||||
if namespace is None:
|
||||
|
|
@ -49,6 +54,19 @@ class YetiCacheLoader(api.Loader):
|
|||
|
||||
group_name = "{}:{}".format(namespace, name)
|
||||
group_node = cmds.group(nodes, name=group_name)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(group_name + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(group_name + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
nodes.append(group_node)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -24,6 +26,20 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
|
|||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
preset_file = os.path.join(
|
||||
os.environ.get('PYPE_STUDIO_TEMPLATES'),
|
||||
'presets', 'tools',
|
||||
'family_colors.json'
|
||||
)
|
||||
with open(preset_file, 'r') as cfile:
|
||||
colors = json.load(cfile)
|
||||
|
||||
c = colors.get('yetiRig')
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
self[:] = nodes
|
||||
|
||||
self.log.info("Yeti Rig Connection Manager will be available soon")
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ from maya import cmds, mel
|
|||
from avalon import maya as avalon
|
||||
from pype.maya import lib
|
||||
|
||||
# TODO : Publish of setdress: -unique namespace for all assets, VALIDATOR!
|
||||
# TODO : Publish of assembly: -unique namespace for all assets, VALIDATOR!
|
||||
|
||||
|
||||
class CollectSetDress(pyblish.api.InstancePlugin):
|
||||
"""Collect all relevant setdress items
|
||||
class CollectAssembly(pyblish.api.InstancePlugin):
|
||||
"""Collect all relevant assembly items
|
||||
|
||||
Collected data:
|
||||
|
||||
|
|
@ -24,8 +24,8 @@ class CollectSetDress(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
label = "Set Dress"
|
||||
families = ["setdress"]
|
||||
label = "Assemby"
|
||||
families = ["assembly"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -35,11 +35,14 @@ class CollectSetDress(pyblish.api.InstancePlugin):
|
|||
# Get all content from the instance
|
||||
instance_lookup = set(cmds.ls(instance, type="transform", long=True))
|
||||
data = defaultdict(list)
|
||||
self.log.info(instance_lookup)
|
||||
|
||||
hierarchy_nodes = []
|
||||
for container in containers:
|
||||
|
||||
self.log.info(container)
|
||||
root = lib.get_container_transforms(container, root=True)
|
||||
self.log.info(root)
|
||||
if not root or root not in instance_lookup:
|
||||
continue
|
||||
|
||||
|
|
@ -47,6 +47,8 @@ def get_look_attrs(node):
|
|||
for attr in attrs:
|
||||
if attr in SHAPE_ATTRS:
|
||||
result.append(attr)
|
||||
elif attr.startswith('ai'):
|
||||
result.append(attr)
|
||||
|
||||
return result
|
||||
|
||||
|
|
@ -157,6 +159,9 @@ def get_file_node_path(node):
|
|||
if any(pattern in lower for pattern in patterns):
|
||||
return texture_pattern
|
||||
|
||||
if cmds.nodeType(node) == 'aiImage':
|
||||
return cmds.getAttr('{0}.filename'.format(node))
|
||||
|
||||
# otherwise use fileTextureName
|
||||
return cmds.getAttr('{0}.fileTextureName'.format(node))
|
||||
|
||||
|
|
@ -202,7 +207,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
families = ["look"]
|
||||
label = "Collect Look"
|
||||
hosts = ["maya"]
|
||||
|
|
@ -215,6 +220,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
# make ftrack publishable
|
||||
instance.data["families"] = ['ftrack']
|
||||
instance.data['maketx'] = True
|
||||
|
||||
def collect(self, instance):
|
||||
|
||||
|
|
@ -261,15 +267,37 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Collect file nodes used by shading engines (if we have any)
|
||||
files = list()
|
||||
looksets = sets.keys()
|
||||
shaderAttrs = [
|
||||
"surfaceShader",
|
||||
"volumeShader",
|
||||
"displacementShader",
|
||||
"aiSurfaceShader",
|
||||
"aiVolumeShader"]
|
||||
materials = list()
|
||||
|
||||
if looksets:
|
||||
for look in looksets:
|
||||
for at in shaderAttrs:
|
||||
con = cmds.listConnections("{}.{}".format(look, at))
|
||||
if con:
|
||||
materials.extend(con)
|
||||
|
||||
self.log.info("Found materials:\n{}".format(materials))
|
||||
|
||||
self.log.info("Found the following sets:\n{}".format(looksets))
|
||||
# Get the entire node chain of the look sets
|
||||
history = cmds.listHistory(looksets)
|
||||
# history = cmds.listHistory(looksets)
|
||||
history = list()
|
||||
for material in materials:
|
||||
history.extend(cmds.listHistory(material))
|
||||
files = cmds.ls(history, type="file", long=True)
|
||||
files.extend(cmds.ls(history, type="aiImage", long=True))
|
||||
|
||||
self.log.info("Collected file nodes:\n{}".format(files))
|
||||
# Collect textures if any file nodes are found
|
||||
instance.data["resources"] = [self.collect_resource(n)
|
||||
for n in files]
|
||||
self.log.info("Collected resources:\n{}".format(instance.data["resources"]))
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data["lookData"]["relationships"]:
|
||||
|
|
@ -361,6 +389,8 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Collect changes to "custom" attributes
|
||||
node_attrs = get_look_attrs(node)
|
||||
|
||||
self.log.info(node_attrs)
|
||||
|
||||
# Only include if there are any properties we care about
|
||||
if not node_attrs:
|
||||
continue
|
||||
|
|
@ -387,13 +417,21 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
dict
|
||||
"""
|
||||
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
self.log.debug("processing: {}".format(node))
|
||||
if cmds.nodeType(node) == 'file':
|
||||
self.log.debug("file node")
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
elif cmds.nodeType(node) == 'aiImage':
|
||||
self.log.debug("aiImage node")
|
||||
attribute = "{}.filename".format(node)
|
||||
computed_attribute = attribute
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Compare with the computed file path, e.g. the one with the <UDIM>
|
||||
# pattern in it, to generate some logging information about this
|
||||
# difference
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
# computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
self.log.debug("Detected computed file pattern difference "
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Model Data'
|
||||
families = ["model"]
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
|
||||
# Get render globals node
|
||||
try:
|
||||
render_globals = cmds.ls("renderglobalsDefault")[0]
|
||||
render_globals = cmds.ls("renderglobalsMain")[0]
|
||||
except IndexError:
|
||||
self.log.info("Skipping renderlayer collection, no "
|
||||
"renderGlobalsDefault found..")
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ class CollectReview(pyblish.api.InstancePlugin):
|
|||
data['step'] = instance.data['step']
|
||||
data['fps'] = instance.data['fps']
|
||||
cmds.setAttr(str(instance) + '.active', 0)
|
||||
self.log.debug('data {}'.format(instance.context[i].data))
|
||||
instance.context[i].data.update(data)
|
||||
instance.data['remove'] = True
|
||||
i += 1
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
|
|||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
label = "Maya Scene"
|
||||
label = "Maya Workfile"
|
||||
hosts = ['maya']
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -29,14 +29,14 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
|
|||
|
||||
# create instance
|
||||
instance = context.create_instance(name=filename)
|
||||
subset = 'scene' + task.capitalize()
|
||||
subset = 'workfile' + task.capitalize()
|
||||
|
||||
data.update({
|
||||
"subset": subset,
|
||||
"asset": os.getenv("AVALON_ASSET", None),
|
||||
"label": subset,
|
||||
"publish": True,
|
||||
"family": 'scene',
|
||||
"publish": False,
|
||||
"family": 'workfile',
|
||||
"representation": "ma",
|
||||
"setMembers": [current_file],
|
||||
"stagingDir": folder
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from pype.maya.lib import extract_alembic
|
|||
from maya import cmds
|
||||
|
||||
|
||||
class ExtractSetDress(pype.api.Extractor):
|
||||
class ExtractAssembly(pype.api.Extractor):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals are preserved, but nothing more,
|
||||
|
|
@ -16,9 +16,9 @@ class ExtractSetDress(pype.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Extract Set Dress"
|
||||
label = "Extract Assembly"
|
||||
hosts = ["maya"]
|
||||
families = ["setdress"]
|
||||
families = ["assembly"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -2,16 +2,97 @@ import os
|
|||
import json
|
||||
import tempfile
|
||||
import contextlib
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import avalon.maya
|
||||
from avalon import io
|
||||
|
||||
import pype.api
|
||||
import pype.maya.lib as lib
|
||||
|
||||
# Modes for transfer
|
||||
COPY = 1
|
||||
HARDLINK = 2
|
||||
|
||||
|
||||
def source_hash(filepath, *args):
|
||||
"""Generate simple identifier for a source file.
|
||||
This is used to identify whether a source file has previously been
|
||||
processe into the pipeline, e.g. a texture.
|
||||
The hash is based on source filepath, modification time and file size.
|
||||
This is only used to identify whether a specific source file was already
|
||||
published before from the same location with the same modification date.
|
||||
We opt to do it this way as opposed to Avalanch C4 hash as this is much
|
||||
faster and predictable enough for all our production use cases.
|
||||
Args:
|
||||
filepath (str): The source file path.
|
||||
You can specify additional arguments in the function
|
||||
to allow for specific 'processing' values to be included.
|
||||
"""
|
||||
# We replace dots with comma because . cannot be a key in a pymongo dict.
|
||||
file_name = os.path.basename(filepath)
|
||||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([
|
||||
file_name,
|
||||
time,
|
||||
size
|
||||
] + list(args)).replace(".", ",")
|
||||
|
||||
|
||||
def find_paths_by_hash(texture_hash):
|
||||
# Find the texture hash key in the dictionary and all paths that
|
||||
# originate from it.
|
||||
key = "data.sourceHashes.{0}".format(texture_hash)
|
||||
return io.distinct(key, {"type": "version"})
|
||||
|
||||
|
||||
def maketx(source, destination, *args):
|
||||
"""Make .tx using maketx with some default settings.
|
||||
The settings are based on default as used in Arnold's
|
||||
txManager in the scene.
|
||||
This function requires the `maketx` executable to be
|
||||
on the `PATH`.
|
||||
Args:
|
||||
source (str): Path to source file.
|
||||
destination (str): Writing destination path.
|
||||
"""
|
||||
|
||||
cmd = [
|
||||
"maketx",
|
||||
"-v", # verbose
|
||||
"-u", # update mode
|
||||
# unpremultiply before conversion (recommended when alpha present)
|
||||
"--unpremult",
|
||||
# use oiio-optimized settings for tile-size, planarconfig, metadata
|
||||
"--oiio"
|
||||
]
|
||||
cmd.extend(args)
|
||||
cmd.extend([
|
||||
"-o", destination,
|
||||
source
|
||||
])
|
||||
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
cmd,
|
||||
stderr=subprocess.STDOUT,
|
||||
creationflags=CREATE_NO_WINDOW
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print exc
|
||||
print out
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_workspace_dir():
|
||||
|
|
@ -79,12 +160,53 @@ class ExtractLook(pype.api.Extractor):
|
|||
relationships = lookdata["relationships"]
|
||||
sets = relationships.keys()
|
||||
|
||||
# Extract the textures to transfer, possibly convert with maketx and
|
||||
# remap the node paths to the destination path. Note that a source
|
||||
# might be included more than once amongst the resources as they could
|
||||
# be the input file to multiple nodes.
|
||||
resources = instance.data["resources"]
|
||||
do_maketx = instance.data.get("maketx", False)
|
||||
|
||||
# Collect all unique files used in the resources
|
||||
files = set()
|
||||
for resource in resources:
|
||||
files.update(os.path.normpath(f) for f in resource["files"])
|
||||
|
||||
# Process the resource files
|
||||
transfers = list()
|
||||
hardlinks = list()
|
||||
hashes = dict()
|
||||
for filepath in files:
|
||||
source, mode, hash = self._process_texture(
|
||||
filepath, do_maketx, staging=dir_path
|
||||
)
|
||||
destination = self.resource_destination(
|
||||
instance, source, do_maketx
|
||||
)
|
||||
if mode == COPY:
|
||||
transfers.append((source, destination))
|
||||
elif mode == HARDLINK:
|
||||
hardlinks.append((source, destination))
|
||||
|
||||
# Store the hashes from hash to destination to include in the
|
||||
# database
|
||||
hashes[hash] = destination
|
||||
|
||||
# Remap the resources to the destination path (change node attributes)
|
||||
destinations = dict()
|
||||
remap = OrderedDict() # needs to be ordered, see color space values
|
||||
for resource in resources:
|
||||
source = os.path.normpath(resource["source"])
|
||||
if source not in destinations:
|
||||
# Cache destination as source resource might be included
|
||||
# multiple times
|
||||
destinations[source] = self.resource_destination(
|
||||
instance, source, do_maketx
|
||||
)
|
||||
|
||||
# Remap file node filename to destination
|
||||
attr = resource['attribute']
|
||||
remap[attr] = resource['destination']
|
||||
remap[attr] = destinations[source]
|
||||
|
||||
# Preserve color space values (force value after filepath change)
|
||||
# This will also trigger in the same order at end of context to
|
||||
|
|
@ -107,15 +229,17 @@ class ExtractLook(pype.api.Extractor):
|
|||
with lib.attribute_values(remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
cmds.file(
|
||||
maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True
|
||||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
|
|
@ -127,9 +251,90 @@ class ExtractLook(pype.api.Extractor):
|
|||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
if "hardlinks" not in instance.data:
|
||||
instance.data["hardlinks"] = list()
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = list()
|
||||
|
||||
instance.data["files"].append(maya_fname)
|
||||
instance.data["files"].append(json_fname)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
# Set up the resources transfers/links for the integrator
|
||||
instance.data["transfers"].extend(transfers)
|
||||
instance.data["hardlinks"].extend(hardlinks)
|
||||
|
||||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (
|
||||
instance.name, maya_path)
|
||||
)
|
||||
|
||||
def resource_destination(self, instance, filepath, do_maketx):
|
||||
|
||||
# Compute destination location
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
# If maketx then the texture will always end with .tx
|
||||
if do_maketx:
|
||||
ext = ".tx"
|
||||
|
||||
return os.path.join(
|
||||
instance.data["assumedDestination"],
|
||||
"resources",
|
||||
basename + ext
|
||||
)
|
||||
|
||||
def _process_texture(self, filepath, do_maketx, staging):
|
||||
"""Process a single texture file on disk for publishing.
|
||||
This will:
|
||||
1. Check whether it's already published, if so it will do hardlink
|
||||
2. If not published and maketx is enabled, generate a new .tx file.
|
||||
3. Compute the destination path for the source file.
|
||||
Args:
|
||||
filepath (str): The source file path to process.
|
||||
do_maketx (bool): Whether to produce a .tx file
|
||||
Returns:
|
||||
"""
|
||||
|
||||
fname, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
args = []
|
||||
if do_maketx:
|
||||
args.append("maketx")
|
||||
texture_hash = source_hash(filepath, *args)
|
||||
|
||||
# If source has been published before with the same settings,
|
||||
# then don't reprocess but hardlink from the original
|
||||
existing = find_paths_by_hash(texture_hash)
|
||||
if existing:
|
||||
self.log.info("Found hash in database, preparing hardlink..")
|
||||
source = next((p for p in existing if os.path.exists(p)), None)
|
||||
if filepath:
|
||||
return source, HARDLINK, texture_hash
|
||||
else:
|
||||
self.log.warning(
|
||||
"Paths not found on disk, "
|
||||
"skipping hardlink: %s" % (existing,)
|
||||
)
|
||||
|
||||
if do_maketx and ext != ".tx":
|
||||
# Produce .tx file in staging if source file is not .tx
|
||||
converted = os.path.join(
|
||||
staging,
|
||||
"resources",
|
||||
fname + ".tx"
|
||||
)
|
||||
|
||||
# Ensure folder exists
|
||||
if not os.path.exists(os.path.dirname(converted)):
|
||||
os.makedirs(os.path.dirname(converted))
|
||||
|
||||
self.log.info("Generating .tx file for %s .." % filepath)
|
||||
maketx(filepath, converted,
|
||||
# Include `source-hash` as string metadata
|
||||
"-sattrib", "sourceHash", texture_hash)
|
||||
|
||||
return converted, COPY, texture_hash
|
||||
|
||||
return filepath, COPY, texture_hash
|
||||
|
|
|
|||
|
|
@ -15,7 +15,9 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
|
|||
|
||||
label = "Maya ASCII (Raw)"
|
||||
hosts = ["maya"]
|
||||
families = ["mayaAscii"]
|
||||
families = ["mayaAscii",
|
||||
"setdress",
|
||||
"layout"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""Submit available render layers to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -104,9 +104,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
|
|
@ -231,14 +231,15 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"MAYA_MODULE_PATH",
|
||||
"ARNOLD_PLUGIN_PATH",
|
||||
"AVALON_SCHEMA",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"PYBLISHPLUGINPATH",
|
||||
|
||||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"REDSHIFT_MAYAEXTENSIONSPATH",
|
||||
"REDSHIFT_DISABLEOUTPUTLOCKFILES"
|
||||
"VRAY_FOR_MAYA2018_PLUGINS_X64",
|
||||
"VRAY_PLUGINS_X64",
|
||||
"VRAY_USE_THREAD_AFFINITY",
|
||||
"SOLIDANGLE_LICENSE",
|
||||
"ARNOLD_LICENSE"
|
||||
"MAYA_MODULE_PATH",
|
||||
"TOOL_ENV"
|
||||
]
|
||||
|
|
@ -274,6 +275,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
|
||||
if key == "PYTHONPATH":
|
||||
clean_path = clean_path.replace('python2', 'python3')
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE']))
|
||||
|
|
@ -301,7 +305,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
|
|
@ -28,13 +28,13 @@ class VraySubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
context = instance.context
|
||||
|
||||
deadline_url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
filepath = context.data["currentFile"]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import pype.api
|
|||
import pype.maya.action
|
||||
|
||||
|
||||
class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
|
||||
class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin):
|
||||
"""Ensure namespaces are not nested
|
||||
|
||||
In the outliner an item in a normal namespace looks as following:
|
||||
|
|
@ -15,9 +15,9 @@ class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
label = "Validate Setdress Namespaces"
|
||||
label = "Validate Assembly Namespaces"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["setdress"]
|
||||
families = ["assembly"]
|
||||
actions = [pype.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue