Merge branch 'feature/set_status_to_assetversion' into 2.x/develop

This commit is contained in:
Milan Kolar 2020-07-27 13:13:18 +02:00
commit 20ff2829ca
31 changed files with 860 additions and 360 deletions

View file

@ -69,15 +69,39 @@ def override_component_mask_commands():
def override_toolbox_ui():
"""Add custom buttons in Toolbox as replacement for Maya web help icon."""
inventory = None
loader = None
launch_workfiles_app = None
mayalookassigner = None
try:
import avalon.tools.sceneinventory as inventory
except Exception:
log.warning("Could not import SceneInventory tool")
try:
import avalon.tools.loader as loader
except Exception:
log.warning("Could not import Loader tool")
try:
from avalon.maya.pipeline import launch_workfiles_app
except Exception:
log.warning("Could not import Workfiles tool")
try:
import mayalookassigner
except Exception:
log.warning("Could not import Maya Look assigner tool")
import avalon.tools.sceneinventory as inventory
import avalon.tools.loader as loader
from avalon.maya.pipeline import launch_workfiles_app
import mayalookassigner
from pype.api import resources
icons = resources.get_resource("icons")
if not any((
mayalookassigner, launch_workfiles_app, loader, inventory
)):
return
# Ensure the maya web icon on toolbox exists
web_button = "ToolBox|MainToolboxLayout|mayaWebButton"
if not mc.iconTextButton(web_button, query=True, exists=True):
@ -96,65 +120,65 @@ def override_toolbox_ui():
# Create our controls
background_color = (0.267, 0.267, 0.267)
controls = []
if mayalookassigner:
controls.append(
mc.iconTextButton(
"pype_toolbox_lookmanager",
annotation="Look Manager",
label="Look Manager",
image=os.path.join(icons, "lookmanager.png"),
command=lambda: mayalookassigner.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent
)
)
control = mc.iconTextButton(
"pype_toolbox_lookmanager",
annotation="Look Manager",
label="Look Manager",
image=os.path.join(icons, "lookmanager.png"),
command=lambda: mayalookassigner.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent)
controls.append(control)
if launch_workfiles_app:
controls.append(
mc.iconTextButton(
"pype_toolbox_workfiles",
annotation="Work Files",
label="Work Files",
image=os.path.join(icons, "workfiles.png"),
command=lambda: launch_workfiles_app(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent
)
)
control = mc.iconTextButton(
"pype_toolbox_workfiles",
annotation="Work Files",
label="Work Files",
image=os.path.join(icons, "workfiles.png"),
command=lambda: launch_workfiles_app(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent)
controls.append(control)
if loader:
controls.append(
mc.iconTextButton(
"pype_toolbox_loader",
annotation="Loader",
label="Loader",
image=os.path.join(icons, "loader.png"),
command=lambda: loader.show(use_context=True),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent
)
)
control = mc.iconTextButton(
"pype_toolbox_loader",
annotation="Loader",
label="Loader",
image=os.path.join(icons, "loader.png"),
command=lambda: loader.show(use_context=True),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent)
controls.append(control)
control = mc.iconTextButton(
"pype_toolbox_manager",
annotation="Inventory",
label="Inventory",
image=os.path.join(icons, "inventory.png"),
command=lambda: inventory.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent)
controls.append(control)
# control = mc.iconTextButton(
# "pype_toolbox",
# annotation="Kredenc",
# label="Kredenc",
# image=os.path.join(icons, "kredenc_logo.png"),
# bgc=background_color,
# width=icon_size,
# height=icon_size,
# parent=parent)
# controls.append(control)
if inventory:
controls.append(
mc.iconTextButton(
"pype_toolbox_manager",
annotation="Inventory",
label="Inventory",
image=os.path.join(icons, "inventory.png"),
command=lambda: inventory.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent
)
)
# Add the buttons on the bottom and stack
# them above each other with side padding

View file

@ -534,7 +534,9 @@ $.pype = {
if (instances === null) {
return null;
}
if (audioOnly === true) {
// make only audio representations
if (audioOnly === 'true') {
$.pype.log('? looping if audio True');
for (var i = 0; i < instances.length; i++) {
var subsetToRepresentations = instances[i].subsetToRepresentations;

View file

@ -1,6 +1,3 @@
from .clockify_api import ClockifyAPI
from .widget_settings import ClockifySettings
from .widget_message import MessageWidget
from .clockify import ClockifyModule
CLASS_DEFINIION = ClockifyModule

View file

@ -3,11 +3,12 @@ import threading
from pype.api import Logger
from avalon import style
from Qt import QtWidgets
from . import ClockifySettings, ClockifyAPI, MessageWidget
from .widgets import ClockifySettings, MessageWidget
from .clockify_api import ClockifyAPI
from .constants import CLOCKIFY_FTRACK_USER_PATH
class ClockifyModule:
workspace_name = None
def __init__(self, main_parent=None, parent=None):
@ -20,7 +21,7 @@ class ClockifyModule:
self.main_parent = main_parent
self.parent = parent
self.clockapi = ClockifyAPI()
self.clockapi = ClockifyAPI(master_parent=self)
self.message_widget = None
self.widget_settings = ClockifySettings(main_parent, self)
self.widget_settings_required = None
@ -31,8 +32,6 @@ class ClockifyModule:
self.bool_api_key_set = False
self.bool_workspace_set = False
self.bool_timer_run = False
self.clockapi.set_master(self)
self.bool_api_key_set = self.clockapi.set_api()
def tray_start(self):
@ -50,14 +49,12 @@ class ClockifyModule:
def process_modules(self, modules):
if 'FtrackModule' in modules:
actions_path = os.path.sep.join([
os.path.dirname(__file__),
'ftrack_actions'
])
current = os.environ.get('FTRACK_ACTIONS_PATH', '')
if current:
current += os.pathsep
os.environ['FTRACK_ACTIONS_PATH'] = current + actions_path
os.environ['FTRACK_ACTIONS_PATH'] = (
current + CLOCKIFY_FTRACK_USER_PATH
)
if 'AvalonApps' in modules:
from launcher import lib
@ -195,9 +192,10 @@ class ClockifyModule:
).format(project_name))
msg = (
"Project <b>\"{}\"</b> is not in Clockify Workspace <b>\"{}\"</b>."
"Project <b>\"{}\"</b> is not"
" in Clockify Workspace <b>\"{}\"</b>."
"<br><br>Please inform your Project Manager."
).format(project_name, str(self.clockapi.workspace))
).format(project_name, str(self.clockapi.workspace_name))
self.message_widget = MessageWidget(
self.main_parent, msg, "Clockify - Info Message"

View file

@ -1,35 +1,39 @@
import os
import re
import time
import requests
import json
import datetime
import appdirs
from .constants import (
CLOCKIFY_ENDPOINT, ADMIN_PERMISSION_NAMES, CREDENTIALS_JSON_PATH
)
class Singleton(type):
_instances = {}
def time_check(obj):
if obj.request_counter < 10:
obj.request_counter += 1
return
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(
Singleton, cls
).__call__(*args, **kwargs)
return cls._instances[cls]
wait_time = 1 - (time.time() - obj.request_time)
if wait_time > 0:
time.sleep(wait_time)
obj.request_time = time.time()
obj.request_counter = 0
class ClockifyAPI(metaclass=Singleton):
endpoint = "https://api.clockify.me/api/"
headers = {"X-Api-Key": None}
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
file_name = 'clockify.json'
fpath = os.path.join(app_dir, file_name)
admin_permission_names = ['WORKSPACE_OWN', 'WORKSPACE_ADMIN']
master_parent = None
workspace = None
workspace_id = None
def set_master(self, master_parent):
class ClockifyAPI:
def __init__(self, api_key=None, master_parent=None):
self.workspace_name = None
self.workspace_id = None
self.master_parent = master_parent
self.api_key = api_key
self.request_counter = 0
self.request_time = time.time()
@property
def headers(self):
return {"X-Api-Key": self.api_key}
def verify_api(self):
for key, value in self.headers.items():
@ -42,7 +46,7 @@ class ClockifyAPI(metaclass=Singleton):
api_key = self.get_api_key()
if api_key is not None and self.validate_api_key(api_key) is True:
self.headers["X-Api-Key"] = api_key
self.api_key = api_key
self.set_workspace()
if self.master_parent:
self.master_parent.signed_in()
@ -52,8 +56,9 @@ class ClockifyAPI(metaclass=Singleton):
def validate_api_key(self, api_key):
test_headers = {'X-Api-Key': api_key}
action_url = 'workspaces/'
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=test_headers
)
if response.status_code != 200:
@ -69,25 +74,27 @@ class ClockifyAPI(metaclass=Singleton):
action_url = "/workspaces/{}/users/{}/permissions".format(
workspace_id, user_id
)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
user_permissions = response.json()
for perm in user_permissions:
if perm['name'] in self.admin_permission_names:
if perm['name'] in ADMIN_PERMISSION_NAMES:
return True
return False
def get_user_id(self):
action_url = 'v1/user/'
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
# this regex is neccessary: UNICODE strings are crashing
# during json serialization
id_regex ='\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
id_regex = '\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
result = re.findall(id_regex, str(response.content))
if len(result) != 1:
# replace with log and better message?
@ -98,9 +105,9 @@ class ClockifyAPI(metaclass=Singleton):
def set_workspace(self, name=None):
if name is None:
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
self.workspace = name
self.workspace_name = name
self.workspace_id = None
if self.workspace is None:
if self.workspace_name is None:
return
try:
result = self.validate_workspace()
@ -115,7 +122,7 @@ class ClockifyAPI(metaclass=Singleton):
def validate_workspace(self, name=None):
if name is None:
name = self.workspace
name = self.workspace_name
all_workspaces = self.get_workspaces()
if name in all_workspaces:
return all_workspaces[name]
@ -124,25 +131,26 @@ class ClockifyAPI(metaclass=Singleton):
def get_api_key(self):
api_key = None
try:
file = open(self.fpath, 'r')
file = open(CREDENTIALS_JSON_PATH, 'r')
api_key = json.load(file).get('api_key', None)
if api_key == '':
api_key = None
except Exception:
file = open(self.fpath, 'w')
file = open(CREDENTIALS_JSON_PATH, 'w')
file.close()
return api_key
def save_api_key(self, api_key):
data = {'api_key': api_key}
file = open(self.fpath, 'w')
file = open(CREDENTIALS_JSON_PATH, 'w')
file.write(json.dumps(data))
file.close()
def get_workspaces(self):
action_url = 'workspaces/'
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
return {
@ -153,8 +161,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
@ -168,8 +177,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/projects/{}/'.format(
workspace_id, project_id
)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
@ -179,8 +189,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags/'.format(workspace_id)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
@ -194,8 +205,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
workspace_id, project_id
)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
@ -276,8 +288,9 @@ class ClockifyAPI(metaclass=Singleton):
"taskId": task_id,
"tagIds": tag_ids
}
time_check(self)
response = requests.post(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -293,8 +306,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/timeEntries/inProgress'.format(
workspace_id
)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
try:
@ -323,8 +337,9 @@ class ClockifyAPI(metaclass=Singleton):
"tagIds": current["tagIds"],
"end": self.get_current_time()
}
time_check(self)
response = requests.put(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -336,8 +351,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
time_check(self)
response = requests.get(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
return response.json()[:quantity]
@ -348,8 +364,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, tid
)
time_check(self)
response = requests.delete(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
return response.json()
@ -369,8 +386,9 @@ class ClockifyAPI(metaclass=Singleton):
"color": "#f44336",
"billable": "true"
}
time_check(self)
response = requests.post(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -379,8 +397,9 @@ class ClockifyAPI(metaclass=Singleton):
def add_workspace(self, name):
action_url = 'workspaces/'
body = {"name": name}
time_check(self)
response = requests.post(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -398,8 +417,9 @@ class ClockifyAPI(metaclass=Singleton):
"name": name,
"projectId": project_id
}
time_check(self)
response = requests.post(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -412,8 +432,9 @@ class ClockifyAPI(metaclass=Singleton):
body = {
"name": name
}
time_check(self)
response = requests.post(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
)
@ -427,8 +448,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = '/workspaces/{}/projects/{}'.format(
workspace_id, project_id
)
time_check(self)
response = requests.delete(
self.endpoint + action_url,
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
)
return response.json()

View file

@ -0,0 +1,17 @@
import os
import appdirs
CLOCKIFY_FTRACK_SERVER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "server"
)
CLOCKIFY_FTRACK_USER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "user"
)
CREDENTIALS_JSON_PATH = os.path.normpath(os.path.join(
appdirs.user_data_dir("pype-app", "pype"),
"clockify.json"
))
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/"

View file

@ -0,0 +1,166 @@
import os
import json
from pype.modules.ftrack.lib import BaseAction
from pype.modules.clockify.clockify_api import ClockifyAPI
class SyncClocifyServer(BaseAction):
'''Synchronise project names and task types.'''
identifier = "clockify.sync.server"
label = "Sync To Clockify (server)"
description = "Synchronise data to Clockify workspace"
discover_role_list = ["Pypeclub", "Administrator", "project Manager"]
def __init__(self, *args, **kwargs):
super(SyncClocifyServer, self).__init__(*args, **kwargs)
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
api_key = os.environ.get("CLOCKIFY_API_KEY")
self.clockapi = ClockifyAPI(api_key)
self.clockapi.set_workspace(workspace_name)
if api_key is None:
modified_key = "None"
else:
str_len = int(len(api_key) / 2)
start_replace = int(len(api_key) / 4)
modified_key = ""
for idx in range(len(api_key)):
if idx >= start_replace and idx < start_replace + str_len:
replacement = "X"
else:
replacement = api_key[idx]
modified_key += replacement
self.log.info(
"Clockify info. Workspace: \"{}\" API key: \"{}\"".format(
str(workspace_name), str(modified_key)
)
)
def discover(self, session, entities, event):
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
# Get user and check his roles
user_id = event.get("source", {}).get("user", {}).get("id")
if not user_id:
return False
user = session.query("User where id is \"{}\"".format(user_id)).first()
if not user:
return False
for role in user["user_security_roles"]:
if role["security_role"]["name"] in self.discover_role_list:
return True
return False
def register(self):
self.session.event_hub.subscribe(
"topic=ftrack.action.discover",
self._discover,
priority=self.priority
)
launch_subscription = (
"topic=ftrack.action.launch and data.actionIdentifier={}"
).format(self.identifier)
self.session.event_hub.subscribe(launch_subscription, self._launch)
def launch(self, session, entities, event):
if self.clockapi.workspace_id is None:
return {
"success": False,
"message": "Clockify Workspace or API key are not set!"
}
if self.clockapi.validate_workspace_perm() is False:
return {
"success": False,
"message": "Missing permissions for this action!"
}
# JOB SETTINGS
user_id = event["source"]["user"]["id"]
user = session.query("User where id is " + user_id).one()
job = session.create("Job", {
"user": user,
"status": "running",
"data": json.dumps({"description": "Sync Ftrack to Clockify"})
})
session.commit()
project_entity = entities[0]
if project_entity.entity_type.lower() != "project":
project_entity = self.get_project_from_entity(project_entity)
project_name = project_entity["full_name"]
self.log.info(
"Synchronization of project \"{}\" to clockify begins.".format(
project_name
)
)
task_types = (
project_entity["project_schema"]["_task_type_schema"]["types"]
)
task_type_names = [
task_type["name"] for task_type in task_types
]
try:
clockify_projects = self.clockapi.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if "id" not in response:
self.log.warning(
"Project \"{}\" can't be created. Response: {}".format(
project_name, response
)
)
return {
"success": False,
"message": (
"Can't create clockify project \"{}\"."
" Unexpected error."
).format(project_name)
}
clockify_workspace_tags = self.clockapi.get_tags()
for task_type_name in task_type_names:
if task_type_name in clockify_workspace_tags:
self.log.debug(
"Task \"{}\" already exist".format(task_type_name)
)
continue
response = self.clockapi.add_tag(task_type_name)
if "id" not in response:
self.log.warning(
"Task \"{}\" can't be created. Response: {}".format(
task_type_name, response
)
)
job["status"] = "done"
except Exception:
self.log.warning(
"Synchronization to clockify failed.",
exc_info=True
)
finally:
if job["status"] != "done":
job["status"] = "failed"
session.commit()
return True
def register(session, **kw):
SyncClocifyServer(session).register()

View file

@ -1,15 +1,15 @@
import json
from pype.modules.ftrack.lib import BaseAction, statics_icon
from pype.modules.clockify import ClockifyAPI
from pype.modules.clockify.clockify_api import ClockifyAPI
class SyncClocify(BaseAction):
class SyncClocifyLocal(BaseAction):
'''Synchronise project names and task types.'''
#: Action identifier.
identifier = 'clockify.sync'
identifier = 'clockify.sync.local'
#: Action label.
label = 'Sync To Clockify'
label = 'Sync To Clockify (local)'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: roles that are allowed to register this action
@ -119,4 +119,4 @@ class SyncClocify(BaseAction):
def register(session, **kw):
SyncClocify(session).register()
SyncClocifyLocal(session).register()

View file

@ -1,6 +1,6 @@
from avalon import api, io
from pype.api import Logger
from pype.modules.clockify import ClockifyAPI
from pype.modules.clockify.clockify_api import ClockifyAPI
log = Logger().get_logger(__name__, "clockify_start")

View file

@ -1,5 +1,5 @@
from avalon import api, io
from pype.modules.clockify import ClockifyAPI
from pype.modules.clockify.clockify_api import ClockifyAPI
from pype.api import Logger
log = Logger().get_logger(__name__, "clockify_sync")

View file

@ -1,92 +0,0 @@
from Qt import QtCore, QtGui, QtWidgets
from avalon import style
from pype.api import resources
class MessageWidget(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
closed = QtCore.Signal()
def __init__(self, parent=None, messages=[], title="Message"):
super(MessageWidget, self).__init__()
self._parent = parent
# Icon
if parent and hasattr(parent, 'icon'):
self.setWindowIcon(parent.icon)
else:
icon = QtGui.QIcon(resources.pype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
# Style
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._ui_layout(messages))
self.setWindowTitle(title)
def _ui_layout(self, messages):
if not messages:
messages = ["*Misssing messages (This is a bug)*", ]
elif not isinstance(messages, (tuple, list)):
messages = [messages, ]
main_layout = QtWidgets.QVBoxLayout(self)
labels = []
for message in messages:
label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True)
labels.append(label)
main_layout.addWidget(label)
btn_close = QtWidgets.QPushButton("Close")
btn_close.setToolTip('Close this window')
btn_close.clicked.connect(self.on_close_clicked)
btn_group = QtWidgets.QHBoxLayout()
btn_group.addStretch(1)
btn_group.addWidget(btn_close)
main_layout.addLayout(btn_group)
self.labels = labels
self.btn_group = btn_group
self.btn_close = btn_close
self.main_layout = main_layout
return main_layout
def on_close_clicked(self):
self.close()
def close(self, *args, **kwargs):
self.closed.emit()
super(MessageWidget, self).close(*args, **kwargs)

View file

@ -1,9 +1,97 @@
import os
from Qt import QtCore, QtGui, QtWidgets
from avalon import style
from pype.api import resources
class MessageWidget(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
closed = QtCore.Signal()
def __init__(self, parent=None, messages=[], title="Message"):
super(MessageWidget, self).__init__()
self._parent = parent
# Icon
if parent and hasattr(parent, 'icon'):
self.setWindowIcon(parent.icon)
else:
icon = QtGui.QIcon(resources.pype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
# Style
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._ui_layout(messages))
self.setWindowTitle(title)
def _ui_layout(self, messages):
if not messages:
messages = ["*Misssing messages (This is a bug)*", ]
elif not isinstance(messages, (tuple, list)):
messages = [messages, ]
main_layout = QtWidgets.QVBoxLayout(self)
labels = []
for message in messages:
label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True)
labels.append(label)
main_layout.addWidget(label)
btn_close = QtWidgets.QPushButton("Close")
btn_close.setToolTip('Close this window')
btn_close.clicked.connect(self.on_close_clicked)
btn_group = QtWidgets.QHBoxLayout()
btn_group.addStretch(1)
btn_group.addWidget(btn_close)
main_layout.addLayout(btn_group)
self.labels = labels
self.btn_group = btn_group
self.btn_close = btn_close
self.main_layout = main_layout
return main_layout
def on_close_clicked(self):
self.close()
def close(self, *args, **kwargs):
self.closed.emit()
super(MessageWidget, self).close(*args, **kwargs)
class ClockifySettings(QtWidgets.QWidget):
SIZE_W = 300

View file

@ -1,10 +1,8 @@
import os
import time
import traceback
from pype.modules.ftrack import BaseAction
from pype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
from pype.api import config
class SyncToAvalonServer(BaseAction):
@ -38,17 +36,6 @@ class SyncToAvalonServer(BaseAction):
variant = "- Sync To Avalon (Server)"
#: Action description.
description = "Send data from Ftrack to Avalon"
#: Action icon.
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
os.environ.get(
"PYPE_STATICS_SERVER",
"http://localhost:{}".format(
config.get_presets().get("services", {}).get(
"rest_api", {}
).get("default_port", 8021)
)
)
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View file

@ -84,6 +84,9 @@ class VersionToTaskStatus(BaseEvent):
if not task:
continue
if version["asset"]["type"]["short"].lower() == "scene":
continue
project_schema = task["project"]["project_schema"]
# Get all available statuses for Task
statuses = project_schema.get_statuses("Task", task["type_id"])

View file

@ -522,6 +522,21 @@ def main(argv):
help="Load creadentials from apps dir",
action="store_true"
)
parser.add_argument(
"-clockifyapikey", type=str,
help=(
"Enter API key for Clockify actions."
" (default from environment: $CLOCKIFY_API_KEY)"
)
)
parser.add_argument(
"-clockifyworkspace", type=str,
help=(
"Enter workspace for Clockify."
" (default from module presets or "
"environment: $CLOCKIFY_WORKSPACE)"
)
)
ftrack_url = os.environ.get('FTRACK_SERVER')
username = os.environ.get('FTRACK_API_USER')
api_key = os.environ.get('FTRACK_API_KEY')
@ -546,6 +561,12 @@ def main(argv):
if kwargs.ftrackapikey:
api_key = kwargs.ftrackapikey
if kwargs.clockifyworkspace:
os.environ["CLOCKIFY_WORKSPACE"] = kwargs.clockifyworkspace
if kwargs.clockifyapikey:
os.environ["CLOCKIFY_API_KEY"] = kwargs.clockifyapikey
legacy = kwargs.legacy
# Check url regex and accessibility
ftrack_url = check_ftrack_url(ftrack_url)

View file

@ -9,7 +9,7 @@ from pype.modules.ftrack.ftrack_server.lib import (
SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER
)
import ftrack_api
from pype.api import Logger
from pype.api import Logger, config
log = Logger().get_logger("Event processor")
@ -55,6 +55,42 @@ def register(session):
)
def clockify_module_registration():
module_name = "Clockify"
menu_items = config.get_presets()["tray"]["menu_items"]
if not menu_items["item_usage"][module_name]:
return
api_key = os.environ.get("CLOCKIFY_API_KEY")
if not api_key:
log.warning("Clockify API key is not set.")
return
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
if not workspace_name:
workspace_name = (
menu_items
.get("attributes", {})
.get(module_name, {})
.get("workspace_name", {})
)
if not workspace_name:
log.warning("Clockify Workspace is not set.")
return
os.environ["CLOCKIFY_WORKSPACE"] = workspace_name
from pype.modules.clockify.constants import CLOCKIFY_FTRACK_SERVER_PATH
current = os.environ.get("FTRACK_EVENTS_PATH") or ""
if current:
current += os.pathsep
os.environ["FTRACK_EVENTS_PATH"] = current + CLOCKIFY_FTRACK_SERVER_PATH
return True
def main(args):
port = int(args[-1])
# Create a TCP/IP socket
@ -66,6 +102,11 @@ def main(args):
sock.connect(server_address)
sock.sendall(b"CreatedProcess")
try:
clockify_module_registration()
except Exception:
log.info("Clockify registration failed.", exc_info=True)
try:
session = SocketSession(
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub

View file

@ -8,7 +8,9 @@ import getpass
from pype import lib as pypelib
from pype.api import config, Anatomy
from .ftrack_action_handler import BaseAction
from avalon.api import last_workfile, HOST_WORKFILE_EXTENSIONS
from avalon.api import (
last_workfile, HOST_WORKFILE_EXTENSIONS, should_start_last_workfile
)
class AppAction(BaseAction):
@ -84,7 +86,7 @@ class AppAction(BaseAction):
if (
len(entities) != 1
or entities[0].entity_type.lower() != 'task'
or entities[0].entity_type.lower() != "task"
):
return False
@ -92,21 +94,31 @@ class AppAction(BaseAction):
if entity["parent"].entity_type.lower() == "project":
return False
ft_project = self.get_project_from_entity(entity)
database = pypelib.get_avalon_database()
project_name = ft_project["full_name"]
avalon_project = database[project_name].find_one({
"type": "project"
})
avalon_project_apps = event["data"].get("avalon_project_apps", None)
avalon_project_doc = event["data"].get("avalon_project_doc", None)
if avalon_project_apps is None:
if avalon_project_doc is None:
ft_project = self.get_project_from_entity(entity)
database = pypelib.get_avalon_database()
project_name = ft_project["full_name"]
avalon_project_doc = database[project_name].find_one({
"type": "project"
}) or False
event["data"]["avalon_project_doc"] = avalon_project_doc
if not avalon_project:
if not avalon_project_doc:
return False
project_apps_config = avalon_project_doc["config"].get("apps", [])
avalon_project_apps = [
app["name"] for app in project_apps_config
] or False
event["data"]["avalon_project_apps"] = avalon_project_apps
if not avalon_project_apps:
return False
project_apps = avalon_project["config"].get("apps", [])
apps = [app["name"] for app in project_apps]
if self.identifier in apps:
return True
return False
return self.identifier in avalon_project_apps
def _launch(self, event):
entities = self._translate_event(event)
@ -142,6 +154,9 @@ class AppAction(BaseAction):
"""
entity = entities[0]
task_name = entity["name"]
project_name = entity["project"]["full_name"]
database = pypelib.get_avalon_database()
@ -164,7 +179,7 @@ class AppAction(BaseAction):
"name": entity["project"]["full_name"],
"code": entity["project"]["name"]
},
"task": entity["name"],
"task": task_name,
"asset": asset_name,
"app": host_name,
"hierarchy": hierarchy
@ -210,14 +225,28 @@ class AppAction(BaseAction):
prep_env.update({
"AVALON_PROJECT": project_name,
"AVALON_ASSET": asset_name,
"AVALON_TASK": entity["name"],
"AVALON_APP": self.identifier.split("_")[0],
"AVALON_TASK": task_name,
"AVALON_APP": host_name,
"AVALON_APP_NAME": self.identifier,
"AVALON_HIERARCHY": hierarchy,
"AVALON_WORKDIR": workdir
})
if last_workfile_path and os.path.exists(last_workfile_path):
start_last_workfile = should_start_last_workfile(
project_name, host_name, task_name
)
# Store boolean as "0"(False) or "1"(True)
prep_env["AVALON_OPEN_LAST_WORKFILE"] = (
str(int(bool(start_last_workfile)))
)
if (
start_last_workfile
and last_workfile_path
and os.path.exists(last_workfile_path)
):
prep_env["AVALON_LAST_WORKFILE"] = last_workfile_path
prep_env.update(anatomy.roots_obj.root_environments())
# collect all parents from the task

View file

@ -10,9 +10,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.495
families = ["render.farm"]
# Presets
anatomy_render_key = None
anatomy_publish_render_key = None
def process(self, instance):
anatomy = instance.context.data["anatomy"]
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
anatomy_data["family"] = "render"
padding = anatomy.templates.get("frame_padding", 4)
anatomy_data.update({
"frame": f"%0{padding}d",
@ -21,12 +26,28 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
anatomy_filled = anatomy.format(anatomy_data)
render_dir = anatomy_filled["render_tmp"]["folder"]
render_path = anatomy_filled["render_tmp"]["path"]
# get anatomy rendering keys
anatomy_render_key = self.anatomy_render_key or "render"
anatomy_publish_render_key = self.anatomy_publish_render_key or "render"
# get folder and path for rendering images from celaction
render_dir = anatomy_filled[anatomy_render_key]["folder"]
render_path = anatomy_filled[anatomy_render_key]["path"]
# create dir if it doesnt exists
os.makedirs(render_dir, exist_ok=True)
try:
if not os.path.isdir(render_dir):
os.makedirs(render_dir, exist_ok=True)
except OSError:
# directory is not available
self.log.warning("Path is unreachable: `{}`".format(render_dir))
# add rendering path to instance data
instance.data["path"] = render_path
# get anatomy for published renders folder path
if anatomy_filled.get(anatomy_publish_render_key):
instance.data["publishRenderFolder"] = anatomy_filled[
anatomy_publish_render_key]["folder"]
self.log.info(f"Render output path set to: `{render_path}`")

View file

@ -74,6 +74,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
resolution_width = instance.data["resolutionWidth"]
resolution_height = instance.data["resolutionHeight"]
render_dir = os.path.normpath(os.path.dirname(render_path))
render_path = os.path.normpath(render_path)
script_name = os.path.basename(script_path)
jobname = "%s - %s" % (script_name, instance.name)
@ -98,6 +99,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
args = [
f"<QUOTE>{script_path}<QUOTE>",
"-a",
"-16",
"-s <STARTFRAME>",
"-e <ENDFRAME>",
f"-d <QUOTE>{render_dir}<QUOTE>",
@ -135,8 +137,10 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
# Optional, enable double-click to preview rendered
# frames from Deadline Monitor
"OutputFilename0": output_filename_0.replace("\\", "/")
"OutputFilename0": output_filename_0.replace("\\", "/"),
# # Asset dependency to wait for at least the scene file to sync.
# "AssetDependency0": script_path
},
"PluginInfo": {
# Input

View file

@ -96,6 +96,6 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
task_entity = None
self.log.warning("Task name is not set.")
context.data["ftrackProject"] = asset_entity
context.data["ftrackProject"] = project_entity
context.data["ftrackEntity"] = asset_entity
context.data["ftrackTask"] = task_entity

View file

@ -54,8 +54,52 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
self.log.debug(query)
return query
def process(self, instance):
def _set_task_status(self, instance, task_entity, session):
project_entity = instance.context.data.get("ftrackProject")
if not project_entity:
self.log.info("Task status won't be set, project is not known.")
return
if not task_entity:
self.log.info("Task status won't be set, task is not known.")
return
status_name = instance.context.data.get("ftrackStatus")
if not status_name:
self.log.info("Ftrack status name is not set.")
return
self.log.debug(
"Ftrack status name will be (maybe) set to \"{}\"".format(
status_name
)
)
project_schema = project_entity["project_schema"]
task_statuses = project_schema.get_statuses(
"Task", task_entity["type_id"]
)
task_statuses_by_low_name = {
status["name"].lower(): status for status in task_statuses
}
status = task_statuses_by_low_name.get(status_name.lower())
if not status:
self.log.warning((
"Task status \"{}\" won't be set,"
" status is now allowed on task type \"{}\"."
).format(status_name, task_entity["type"]["name"]))
return
self.log.info("Setting task status to \"{}\"".format(status_name))
task_entity["status"] = status
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
def process(self, instance):
session = instance.context.data["ftrackSession"]
if instance.data.get("ftrackTask"):
task = instance.data["ftrackTask"]
@ -78,9 +122,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
info_msg += ", metadata: {metadata}."
used_asset_versions = []
self._set_task_status(instance, task, session)
# Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []):
# AssetType
# Get existing entity.
assettype_data = {"short": "upload"}
@ -94,9 +140,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Create a new entity if none exits.
if not assettype_entity:
assettype_entity = session.create("AssetType", assettype_data)
self.log.debug(
"Created new AssetType with data: ".format(assettype_data)
)
self.log.debug("Created new AssetType with data: {}".format(
assettype_data
))
# Asset
# Get existing entity.

View file

@ -1,9 +1,13 @@
import sys
import six
import pyblish.api
from avalon import io
try:
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_AUTO_SYNC
except Exception:
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
@ -39,15 +43,32 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
if "hierarchyContext" not in context.data:
return
self.session = self.context.data["ftrackSession"]
project_name = self.context.data["projectEntity"]["name"]
query = 'Project where full_name is "{}"'.format(project_name)
project = self.session.query(query).one()
auto_sync_state = project[
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
if not io.Session:
io.install()
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
self.import_to_ftrack(input_data)
# disable termporarily ftrack project's autosyncing
if auto_sync_state:
self.auto_sync_off(project)
try:
# import ftrack hierarchy
self.import_to_ftrack(input_data)
except Exception:
raise
finally:
if auto_sync_state:
self.auto_sync_on(project)
def import_to_ftrack(self, input_data, parent=None):
for entity_name in input_data:
@ -217,3 +238,28 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
six.reraise(tp, value, tb)
return entity
def auto_sync_off(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False
self.log.info("Ftrack autosync swithed off")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise
def auto_sync_on(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True
self.log.info("Ftrack autosync swithed on")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise

View file

@ -551,12 +551,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
import shutil
try:
copyfile(src, dst)
except (OSError, AttributeError) as e:
self.log.warning(e)
# try it again with shutil
import shutil
except shutil.SameFileError as sfe:
self.log.critical("files are the same {} to {}".format(src, dst))
os.remove(dst)
try:
shutil.copyfile(src, dst)
self.log.debug("Copying files with shutil...")
@ -740,6 +740,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
value += 1
if value > highest_value:
matching_profiles = {}
highest_value = value
if value == highest_value:

View file

@ -12,7 +12,15 @@ from avalon.vendor import requests, clique
import pyblish.api
def _get_script():
def _get_script(path):
# pass input path if exists
if path:
if os.path.exists(path):
return str(path)
else:
raise
"""Get path to the image sequence script."""
try:
from pathlib import Path
@ -192,6 +200,38 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
plugin_python_version = "3.7"
# script path for publish_filesequence.py
publishing_script = None
def _create_metadata_path(self, instance):
ins_data = instance.data
# Ensure output dir exists
output_dir = ins_data.get("publishRenderFolder", ins_data["outputDir"])
try:
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
except OSError:
# directory is not available
self.log.warning("Path is unreachable: `{}`".format(output_dir))
metadata_filename = "{}_metadata.json".format(ins_data["subset"])
metadata_path = os.path.join(output_dir, metadata_filename)
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, roothless_mtdt_p = self.anatomy.find_root_template_from_path(
metadata_path)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
roothless_mtdt_p = metadata_path
return (metadata_path, roothless_mtdt_p)
def _submit_deadline_post_job(self, instance, job):
"""Submit publish job to Deadline.
@ -205,17 +245,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
job_name = "Publish - {subset}".format(subset=subset)
output_dir = instance.data["outputDir"]
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, rootless_path = (
self.anatomy.find_root_template_from_path(output_dir)
)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
rootless_path = output_dir
# Generate the payload for Deadline submission
payload = {
@ -239,7 +268,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
},
"PluginInfo": {
"Version": self.plugin_python_version,
"ScriptFile": _get_script(),
"ScriptFile": _get_script(self.publishing_script),
"Arguments": "",
"SingleFrameOnly": "True",
},
@ -249,11 +278,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(rootless_path, metadata_filename)
metadata_path, roothless_metadata_path = self._create_metadata_path(
instance)
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path
environment["PYPE_METADATA_FILE"] = roothless_metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
try:
@ -854,14 +883,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
}
publish_job.update({"ftrack": ftrack})
# Ensure output dir exists
output_dir = instance.data["outputDir"]
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
metadata_path, roothless_metadata_path = self._create_metadata_path(
instance)
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(output_dir, metadata_filename)
self.log.info("Writing json file: {}".format(metadata_path))
with open(metadata_path, "w") as f:
json.dump(publish_job, f, indent=4, sort_keys=True)

View file

@ -9,7 +9,7 @@ from avalon import api, harmony
class ImportTemplateLoader(api.Loader):
"""Import templates."""
families = ["harmony.template"]
families = ["harmony.template", "workfile"]
representations = ["*"]
label = "Import Template"

View file

@ -111,13 +111,22 @@ class ExtractRender(pyblish.api.InstancePlugin):
# Generate mov.
mov_path = os.path.join(path, instance.data["name"] + ".mov")
args = [
"ffmpeg", "-y",
"-i", audio_path,
"-i",
os.path.join(path, collection.head + "%04d" + collection.tail),
mov_path
]
if os.path.isfile(audio_path):
args = [
"ffmpeg", "-y",
"-i", audio_path,
"-i",
os.path.join(path, collection.head + "%04d" + collection.tail),
mov_path
]
else:
args = [
"ffmpeg", "-y",
"-i",
os.path.join(path, collection.head + "%04d" + collection.tail),
mov_path
]
process = subprocess.Popen(
args,
stdout=subprocess.PIPE,

View file

@ -0,0 +1,37 @@
import json
import os
import pyblish.api
import avalon.harmony
import pype.hosts.harmony
class ValidateAudio(pyblish.api.InstancePlugin):
"""Ensures that there is an audio file in the scene. If you are sure that you want to send render without audio, you can disable this validator before clicking on "publish" """
order = pyblish.api.ValidatorOrder
label = "Validate Audio"
families = ["render"]
hosts = ["harmony"]
optional = True
def process(self, instance):
# Collect scene data.
func = """function func(write_node)
{
return [
sound.getSoundtrackAll().path()
]
}
func
"""
result = avalon.harmony.send(
{"function": func, "args": [instance[0]]}
)["result"]
audio_path = result[0]
msg = "You are missing audio file:\n{}".format(audio_path)
assert os.path.isfile(audio_path), msg

View file

@ -49,6 +49,24 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
render_path = instance.data['path']
script_path = context.data["currentFile"]
for item in context:
if "workfile" in item.data["families"]:
msg = "Workfile (scene) must be published along"
assert item.data["publish"] is True, msg
template_data = item.data.get("anatomyData")
rep = item.data.get("representations")[0].get("name")
template_data["representation"] = rep
template_data["ext"] = rep
template_data["comment"] = None
anatomy_filled = context.data["anatomy"].format(template_data)
template_filled = anatomy_filled["publish"]["path"]
script_path = os.path.normpath(template_filled)
self.log.info(
"Using published scene for render {}".format(script_path)
)
# exception for slate workflow
if "slate" in instance.data["families"]:
self._frame_start -= 1

View file

@ -1,55 +0,0 @@
import sys
import pyblish.api
import pype.api
import avalon.api
try:
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_AUTO_SYNC
except Exception:
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
"""Ensure that autosync value in ftrack project is set to False.
In case was set to True and event server with the sync to avalon event
is running will cause integration to avalon will be override.
"""
order = pyblish.api.ValidatorOrder
families = ['clip']
label = 'Ftrack project\'s auto sync off'
actions = [pype.api.RepairContextAction]
def process(self, context):
invalid = self.get_invalid(context)
assert not invalid, (
"Ftrack Project has 'Auto sync' set to On."
" That may cause issues during integration."
)
@staticmethod
def get_invalid(context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
if project["custom_attributes"][CUST_ATTR_AUTO_SYNC]:
return project
@classmethod
def repair(cls, context):
session = context.data["ftrackSession"]
invalid = cls.get_invalid(context)
if not invalid:
cls.log.info("Project 'Auto sync' already fixed.")
return
invalid["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
raise

View file

@ -183,7 +183,18 @@ class Controller(QtCore.QObject):
plugins = pyblish.api.discover()
targets = pyblish.logic.registered_targets() or ["default"]
self.plugins = pyblish.logic.plugins_by_targets(plugins, targets)
plugins_by_targets = pyblish.logic.plugins_by_targets(plugins, targets)
_plugins = []
for plugin in plugins_by_targets:
# Skip plugin if is not optional and not active
if (
not getattr(plugin, "optional", False)
and not getattr(plugin, "active", True)
):
continue
_plugins.append(plugin)
self.plugins = _plugins
def on_published(self):
if self.is_running:

View file

@ -4,6 +4,11 @@ import platform
from avalon import style
from Qt import QtCore, QtGui, QtWidgets, QtSvg
from pype.api import config, Logger, resources
import pype.version
try:
import configparser
except Exception:
import ConfigParser as configparser
class TrayManager:
@ -100,6 +105,8 @@ class TrayManager:
if items and self.services_submenu is not None:
self.add_separator(self.tray_widget.menu)
self._add_version_item()
# Add Exit action to menu
aExit = QtWidgets.QAction("&Exit", self.tray_widget)
aExit.triggered.connect(self.tray_widget.exit)
@ -109,6 +116,34 @@ class TrayManager:
self.connect_modules()
self.start_modules()
def _add_version_item(self):
config_file_path = os.path.join(
os.environ["PYPE_SETUP_PATH"], "pypeapp", "config.ini"
)
default_config = {}
if os.path.exists(config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
try:
default_config = config["CLIENT"]
except Exception:
pass
subversion = default_config.get("subversion")
client_name = default_config.get("client_name")
version_string = pype.version.__version__
if subversion:
version_string += " ({})".format(subversion)
if client_name:
version_string += ", {}".format(client_name)
version_action = QtWidgets.QAction(version_string, self.tray_widget)
self.tray_widget.menu.addAction(version_action)
self.add_separator(self.tray_widget.menu)
def process_items(self, items, parent_menu):
""" Loop through items and add them to parent_menu.