Merge branch 'develop' into feature/241-config_gui

This commit is contained in:
iLLiCiTiT 2020-07-27 17:14:06 +02:00
commit e867c88ac2
56 changed files with 1096 additions and 613 deletions

25
.gitignore vendored
View file

@ -5,6 +5,31 @@ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
# Mac Stuff
###########
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# Documentation # Documentation
############### ###############
/docs/build /docs/build

View file

@ -106,7 +106,7 @@ class CelactionPrelaunchHook(PypeHook):
f"--project {project}", f"--project {project}",
f"--asset {asset}", f"--asset {asset}",
f"--task {task}", f"--task {task}",
"--currentFile \"*SCENE*\"", "--currentFile \\\"\"*SCENE*\"\\\"",
"--chunk *CHUNK*", "--chunk *CHUNK*",
"--frameStart *START*", "--frameStart *START*",
"--frameEnd *END*", "--frameEnd *END*",

View file

@ -45,8 +45,9 @@ def get_unique_number(
def prepare_data(data, container_name): def prepare_data(data, container_name):
name = data.name name = data.name
data = data.make_local() local_data = data.make_local()
data.name = f"{name}:{container_name}" local_data.name = f"{name}:{container_name}"
return local_data
def create_blender_context(active: Optional[bpy.types.Object] = None, def create_blender_context(active: Optional[bpy.types.Object] = None,

View file

@ -46,9 +46,6 @@ def cli():
parser.add_argument("--resolutionHeight", parser.add_argument("--resolutionHeight",
help=("Height of resolution")) help=("Height of resolution"))
# parser.add_argument("--programDir",
# help=("Directory with celaction program installation"))
celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__
@ -78,7 +75,7 @@ def _prepare_publish_environments():
env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR")
env["AVALON_HIERARCHY"] = hierarchy env["AVALON_HIERARCHY"] = hierarchy
env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "") env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "")
env["AVALON_APP"] = publish_host env["AVALON_APP"] = f"hosts.{publish_host}"
env["AVALON_APP_NAME"] = "celaction_local" env["AVALON_APP_NAME"] = "celaction_local"
env["PYBLISH_HOSTS"] = publish_host env["PYBLISH_HOSTS"] = publish_host

View file

@ -69,17 +69,38 @@ def override_component_mask_commands():
def override_toolbox_ui(): def override_toolbox_ui():
"""Add custom buttons in Toolbox as replacement for Maya web help icon.""" """Add custom buttons in Toolbox as replacement for Maya web help icon."""
inventory = None
loader = None
launch_workfiles_app = None
mayalookassigner = None
try:
import avalon.tools.sceneinventory as inventory
except Exception:
log.warning("Could not import SceneInventory tool")
import pype try:
res = os.path.join(os.path.dirname(os.path.dirname(pype.__file__)), import avalon.tools.loader as loader
"res") except Exception:
icons = os.path.join(res, "icons") log.warning("Could not import Loader tool")
import avalon.tools.sceneinventory as inventory try:
import avalon.tools.loader as loader from avalon.maya.pipeline import launch_workfiles_app
from avalon.maya.pipeline import launch_workfiles_app except Exception:
import mayalookassigner log.warning("Could not import Workfiles tool")
try:
import mayalookassigner
except Exception:
log.warning("Could not import Maya Look assigner tool")
from pype.api import resources
icons = resources.get_resource("icons")
if not any((
mayalookassigner, launch_workfiles_app, loader, inventory
)):
return
# Ensure the maya web icon on toolbox exists # Ensure the maya web icon on toolbox exists
web_button = "ToolBox|MainToolboxLayout|mayaWebButton" web_button = "ToolBox|MainToolboxLayout|mayaWebButton"
@ -99,65 +120,65 @@ def override_toolbox_ui():
# Create our controls # Create our controls
background_color = (0.267, 0.267, 0.267) background_color = (0.267, 0.267, 0.267)
controls = [] controls = []
if mayalookassigner:
controls.append(
mc.iconTextButton(
"pype_toolbox_lookmanager",
annotation="Look Manager",
label="Look Manager",
image=os.path.join(icons, "lookmanager.png"),
command=lambda: mayalookassigner.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent
)
)
control = mc.iconTextButton( if launch_workfiles_app:
"pype_toolbox_lookmanager", controls.append(
annotation="Look Manager", mc.iconTextButton(
label="Look Manager", "pype_toolbox_workfiles",
image=os.path.join(icons, "lookmanager.png"), annotation="Work Files",
command=lambda: mayalookassigner.show(), label="Work Files",
bgc=background_color, image=os.path.join(icons, "workfiles.png"),
width=icon_size, command=lambda: launch_workfiles_app(),
height=icon_size, bgc=background_color,
parent=parent) width=icon_size,
controls.append(control) height=icon_size,
parent=parent
)
)
control = mc.iconTextButton( if loader:
"pype_toolbox_workfiles", controls.append(
annotation="Work Files", mc.iconTextButton(
label="Work Files", "pype_toolbox_loader",
image=os.path.join(icons, "workfiles.png"), annotation="Loader",
command=lambda: launch_workfiles_app(), label="Loader",
bgc=background_color, image=os.path.join(icons, "loader.png"),
width=icon_size, command=lambda: loader.show(use_context=True),
height=icon_size, bgc=background_color,
parent=parent) width=icon_size,
controls.append(control) height=icon_size,
parent=parent
)
)
control = mc.iconTextButton( if inventory:
"pype_toolbox_loader", controls.append(
annotation="Loader", mc.iconTextButton(
label="Loader", "pype_toolbox_manager",
image=os.path.join(icons, "loader.png"), annotation="Inventory",
command=lambda: loader.show(use_context=True), label="Inventory",
bgc=background_color, image=os.path.join(icons, "inventory.png"),
width=icon_size, command=lambda: inventory.show(),
height=icon_size, bgc=background_color,
parent=parent) width=icon_size,
controls.append(control) height=icon_size,
parent=parent
control = mc.iconTextButton( )
"pype_toolbox_manager", )
annotation="Inventory",
label="Inventory",
image=os.path.join(icons, "inventory.png"),
command=lambda: inventory.show(),
bgc=background_color,
width=icon_size,
height=icon_size,
parent=parent)
controls.append(control)
# control = mc.iconTextButton(
# "pype_toolbox",
# annotation="Kredenc",
# label="Kredenc",
# image=os.path.join(icons, "kredenc_logo.png"),
# bgc=background_color,
# width=icon_size,
# height=icon_size,
# parent=parent)
# controls.append(control)
# Add the buttons on the bottom and stack # Add the buttons on the bottom and stack
# them above each other with side padding # them above each other with side padding

View file

@ -1,6 +1,7 @@
import os import os
import nuke import nuke
from avalon.nuke import lib as anlib from avalon.nuke import lib as anlib
from pype.api import resources
def set_context_favorites(favorites={}): def set_context_favorites(favorites={}):
@ -9,9 +10,7 @@ def set_context_favorites(favorites={}):
Argumets: Argumets:
favorites (dict): couples of {name:path} favorites (dict): couples of {name:path}
""" """
dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) icon_path = resources.get_resource("icons", "folder-favorite3.png")
icon_path = os.path.join(dir, 'res', 'icons', 'folder-favorite3.png')
for name, path in favorites.items(): for name, path in favorites.items():
nuke.addFavoriteDir( nuke.addFavoriteDir(
name, name,

View file

@ -534,7 +534,9 @@ $.pype = {
if (instances === null) { if (instances === null) {
return null; return null;
} }
if (audioOnly === true) {
// make only audio representations
if (audioOnly === 'true') {
$.pype.log('? looping if audio True'); $.pype.log('? looping if audio True');
for (var i = 0; i < instances.length; i++) { for (var i = 0; i < instances.length; i++) {
var subsetToRepresentations = instances[i].subsetToRepresentations; var subsetToRepresentations = instances[i].subsetToRepresentations;

View file

@ -1,14 +1,7 @@
from .clockify_api import ClockifyAPI
from .widget_settings import ClockifySettings
from .widget_message import MessageWidget
from .clockify import ClockifyModule from .clockify import ClockifyModule
__all__ = [ CLASS_DEFINIION = ClockifyModule
"ClockifyAPI",
"ClockifySettings",
"ClockifyModule",
"MessageWidget"
]
def tray_init(tray_widget, main_widget): def tray_init(tray_widget, main_widget):
return ClockifyModule(main_widget, tray_widget) return ClockifyModule(main_widget, tray_widget)

View file

@ -3,17 +3,25 @@ import threading
from pype.api import Logger from pype.api import Logger
from avalon import style from avalon import style
from Qt import QtWidgets from Qt import QtWidgets
from . import ClockifySettings, ClockifyAPI, MessageWidget from .widgets import ClockifySettings, MessageWidget
from .clockify_api import ClockifyAPI
from .constants import CLOCKIFY_FTRACK_USER_PATH
class ClockifyModule: class ClockifyModule:
workspace_name = None
def __init__(self, main_parent=None, parent=None): def __init__(self, main_parent=None, parent=None):
if not self.workspace_name:
raise Exception("Clockify Workspace is not set in config.")
os.environ["CLOCKIFY_WORKSPACE"] = self.workspace_name
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray") self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
self.main_parent = main_parent self.main_parent = main_parent
self.parent = parent self.parent = parent
self.clockapi = ClockifyAPI() self.clockapi = ClockifyAPI(master_parent=self)
self.message_widget = None self.message_widget = None
self.widget_settings = ClockifySettings(main_parent, self) self.widget_settings = ClockifySettings(main_parent, self)
self.widget_settings_required = None self.widget_settings_required = None
@ -24,8 +32,6 @@ class ClockifyModule:
self.bool_api_key_set = False self.bool_api_key_set = False
self.bool_workspace_set = False self.bool_workspace_set = False
self.bool_timer_run = False self.bool_timer_run = False
self.clockapi.set_master(self)
self.bool_api_key_set = self.clockapi.set_api() self.bool_api_key_set = self.clockapi.set_api()
def tray_start(self): def tray_start(self):
@ -43,14 +49,12 @@ class ClockifyModule:
def process_modules(self, modules): def process_modules(self, modules):
if 'FtrackModule' in modules: if 'FtrackModule' in modules:
actions_path = os.path.sep.join([
os.path.dirname(__file__),
'ftrack_actions'
])
current = os.environ.get('FTRACK_ACTIONS_PATH', '') current = os.environ.get('FTRACK_ACTIONS_PATH', '')
if current: if current:
current += os.pathsep current += os.pathsep
os.environ['FTRACK_ACTIONS_PATH'] = current + actions_path os.environ['FTRACK_ACTIONS_PATH'] = (
current + CLOCKIFY_FTRACK_USER_PATH
)
if 'AvalonApps' in modules: if 'AvalonApps' in modules:
from launcher import lib from launcher import lib
@ -188,9 +192,10 @@ class ClockifyModule:
).format(project_name)) ).format(project_name))
msg = ( msg = (
"Project <b>\"{}\"</b> is not in Clockify Workspace <b>\"{}\"</b>." "Project <b>\"{}\"</b> is not"
" in Clockify Workspace <b>\"{}\"</b>."
"<br><br>Please inform your Project Manager." "<br><br>Please inform your Project Manager."
).format(project_name, str(self.clockapi.workspace)) ).format(project_name, str(self.clockapi.workspace_name))
self.message_widget = MessageWidget( self.message_widget = MessageWidget(
self.main_parent, msg, "Clockify - Info Message" self.main_parent, msg, "Clockify - Info Message"

View file

@ -1,35 +1,39 @@
import os import os
import re import re
import time
import requests import requests
import json import json
import datetime import datetime
import appdirs from .constants import (
CLOCKIFY_ENDPOINT, ADMIN_PERMISSION_NAMES, CREDENTIALS_JSON_PATH
)
class Singleton(type): def time_check(obj):
_instances = {} if obj.request_counter < 10:
obj.request_counter += 1
return
def __call__(cls, *args, **kwargs): wait_time = 1 - (time.time() - obj.request_time)
if cls not in cls._instances: if wait_time > 0:
cls._instances[cls] = super( time.sleep(wait_time)
Singleton, cls
).__call__(*args, **kwargs) obj.request_time = time.time()
return cls._instances[cls] obj.request_counter = 0
class ClockifyAPI(metaclass=Singleton): class ClockifyAPI:
endpoint = "https://api.clockify.me/api/" def __init__(self, api_key=None, master_parent=None):
headers = {"X-Api-Key": None} self.workspace_name = None
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype')) self.workspace_id = None
file_name = 'clockify.json'
fpath = os.path.join(app_dir, file_name)
admin_permission_names = ['WORKSPACE_OWN', 'WORKSPACE_ADMIN']
master_parent = None
workspace = None
workspace_id = None
def set_master(self, master_parent):
self.master_parent = master_parent self.master_parent = master_parent
self.api_key = api_key
self.request_counter = 0
self.request_time = time.time()
@property
def headers(self):
return {"X-Api-Key": self.api_key}
def verify_api(self): def verify_api(self):
for key, value in self.headers.items(): for key, value in self.headers.items():
@ -42,7 +46,7 @@ class ClockifyAPI(metaclass=Singleton):
api_key = self.get_api_key() api_key = self.get_api_key()
if api_key is not None and self.validate_api_key(api_key) is True: if api_key is not None and self.validate_api_key(api_key) is True:
self.headers["X-Api-Key"] = api_key self.api_key = api_key
self.set_workspace() self.set_workspace()
if self.master_parent: if self.master_parent:
self.master_parent.signed_in() self.master_parent.signed_in()
@ -52,8 +56,9 @@ class ClockifyAPI(metaclass=Singleton):
def validate_api_key(self, api_key): def validate_api_key(self, api_key):
test_headers = {'X-Api-Key': api_key} test_headers = {'X-Api-Key': api_key}
action_url = 'workspaces/' action_url = 'workspaces/'
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=test_headers headers=test_headers
) )
if response.status_code != 200: if response.status_code != 200:
@ -69,25 +74,27 @@ class ClockifyAPI(metaclass=Singleton):
action_url = "/workspaces/{}/users/{}/permissions".format( action_url = "/workspaces/{}/users/{}/permissions".format(
workspace_id, user_id workspace_id, user_id
) )
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
user_permissions = response.json() user_permissions = response.json()
for perm in user_permissions: for perm in user_permissions:
if perm['name'] in self.admin_permission_names: if perm['name'] in ADMIN_PERMISSION_NAMES:
return True return True
return False return False
def get_user_id(self): def get_user_id(self):
action_url = 'v1/user/' action_url = 'v1/user/'
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
# this regex is neccessary: UNICODE strings are crashing # this regex is neccessary: UNICODE strings are crashing
# during json serialization # during json serialization
id_regex ='\"{1}id\"{1}\:{1}\"{1}\w+\"{1}' id_regex = '\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
result = re.findall(id_regex, str(response.content)) result = re.findall(id_regex, str(response.content))
if len(result) != 1: if len(result) != 1:
# replace with log and better message? # replace with log and better message?
@ -98,9 +105,9 @@ class ClockifyAPI(metaclass=Singleton):
def set_workspace(self, name=None): def set_workspace(self, name=None):
if name is None: if name is None:
name = os.environ.get('CLOCKIFY_WORKSPACE', None) name = os.environ.get('CLOCKIFY_WORKSPACE', None)
self.workspace = name self.workspace_name = name
self.workspace_id = None self.workspace_id = None
if self.workspace is None: if self.workspace_name is None:
return return
try: try:
result = self.validate_workspace() result = self.validate_workspace()
@ -115,7 +122,7 @@ class ClockifyAPI(metaclass=Singleton):
def validate_workspace(self, name=None): def validate_workspace(self, name=None):
if name is None: if name is None:
name = self.workspace name = self.workspace_name
all_workspaces = self.get_workspaces() all_workspaces = self.get_workspaces()
if name in all_workspaces: if name in all_workspaces:
return all_workspaces[name] return all_workspaces[name]
@ -124,25 +131,26 @@ class ClockifyAPI(metaclass=Singleton):
def get_api_key(self): def get_api_key(self):
api_key = None api_key = None
try: try:
file = open(self.fpath, 'r') file = open(CREDENTIALS_JSON_PATH, 'r')
api_key = json.load(file).get('api_key', None) api_key = json.load(file).get('api_key', None)
if api_key == '': if api_key == '':
api_key = None api_key = None
except Exception: except Exception:
file = open(self.fpath, 'w') file = open(CREDENTIALS_JSON_PATH, 'w')
file.close() file.close()
return api_key return api_key
def save_api_key(self, api_key): def save_api_key(self, api_key):
data = {'api_key': api_key} data = {'api_key': api_key}
file = open(self.fpath, 'w') file = open(CREDENTIALS_JSON_PATH, 'w')
file.write(json.dumps(data)) file.write(json.dumps(data))
file.close() file.close()
def get_workspaces(self): def get_workspaces(self):
action_url = 'workspaces/' action_url = 'workspaces/'
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
return { return {
@ -153,8 +161,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None: if workspace_id is None:
workspace_id = self.workspace_id workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id) action_url = 'workspaces/{}/projects/'.format(workspace_id)
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
@ -168,8 +177,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/projects/{}/'.format( action_url = 'workspaces/{}/projects/{}/'.format(
workspace_id, project_id workspace_id, project_id
) )
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
@ -179,8 +189,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None: if workspace_id is None:
workspace_id = self.workspace_id workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags/'.format(workspace_id) action_url = 'workspaces/{}/tags/'.format(workspace_id)
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
@ -194,8 +205,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/projects/{}/tasks/'.format( action_url = 'workspaces/{}/projects/{}/tasks/'.format(
workspace_id, project_id workspace_id, project_id
) )
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
@ -276,8 +288,9 @@ class ClockifyAPI(metaclass=Singleton):
"taskId": task_id, "taskId": task_id,
"tagIds": tag_ids "tagIds": tag_ids
} }
time_check(self)
response = requests.post( response = requests.post(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -293,8 +306,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/timeEntries/inProgress'.format( action_url = 'workspaces/{}/timeEntries/inProgress'.format(
workspace_id workspace_id
) )
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
try: try:
@ -323,8 +337,9 @@ class ClockifyAPI(metaclass=Singleton):
"tagIds": current["tagIds"], "tagIds": current["tagIds"],
"end": self.get_current_time() "end": self.get_current_time()
} }
time_check(self)
response = requests.put( response = requests.put(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -336,8 +351,9 @@ class ClockifyAPI(metaclass=Singleton):
if workspace_id is None: if workspace_id is None:
workspace_id = self.workspace_id workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id) action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
time_check(self)
response = requests.get( response = requests.get(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
return response.json()[:quantity] return response.json()[:quantity]
@ -348,8 +364,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = 'workspaces/{}/timeEntries/{}'.format( action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, tid workspace_id, tid
) )
time_check(self)
response = requests.delete( response = requests.delete(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers headers=self.headers
) )
return response.json() return response.json()
@ -363,14 +380,15 @@ class ClockifyAPI(metaclass=Singleton):
"clientId": "", "clientId": "",
"isPublic": "false", "isPublic": "false",
"estimate": { "estimate": {
# "estimate": "3600", "estimate": 0,
"type": "AUTO" "type": "AUTO"
}, },
"color": "#f44336", "color": "#f44336",
"billable": "true" "billable": "true"
} }
time_check(self)
response = requests.post( response = requests.post(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -379,8 +397,9 @@ class ClockifyAPI(metaclass=Singleton):
def add_workspace(self, name): def add_workspace(self, name):
action_url = 'workspaces/' action_url = 'workspaces/'
body = {"name": name} body = {"name": name}
time_check(self)
response = requests.post( response = requests.post(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -398,8 +417,9 @@ class ClockifyAPI(metaclass=Singleton):
"name": name, "name": name,
"projectId": project_id "projectId": project_id
} }
time_check(self)
response = requests.post( response = requests.post(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -412,8 +432,9 @@ class ClockifyAPI(metaclass=Singleton):
body = { body = {
"name": name "name": name
} }
time_check(self)
response = requests.post( response = requests.post(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
json=body json=body
) )
@ -427,8 +448,9 @@ class ClockifyAPI(metaclass=Singleton):
action_url = '/workspaces/{}/projects/{}'.format( action_url = '/workspaces/{}/projects/{}'.format(
workspace_id, project_id workspace_id, project_id
) )
time_check(self)
response = requests.delete( response = requests.delete(
self.endpoint + action_url, CLOCKIFY_ENDPOINT + action_url,
headers=self.headers, headers=self.headers,
) )
return response.json() return response.json()

View file

@ -0,0 +1,17 @@
import os
import appdirs
CLOCKIFY_FTRACK_SERVER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "server"
)
CLOCKIFY_FTRACK_USER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "user"
)
CREDENTIALS_JSON_PATH = os.path.normpath(os.path.join(
appdirs.user_data_dir("pype-app", "pype"),
"clockify.json"
))
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/"

View file

@ -0,0 +1,166 @@
import os
import json
from pype.modules.ftrack.lib import BaseAction
from pype.modules.clockify.clockify_api import ClockifyAPI
class SyncClocifyServer(BaseAction):
'''Synchronise project names and task types.'''
identifier = "clockify.sync.server"
label = "Sync To Clockify (server)"
description = "Synchronise data to Clockify workspace"
discover_role_list = ["Pypeclub", "Administrator", "project Manager"]
def __init__(self, *args, **kwargs):
super(SyncClocifyServer, self).__init__(*args, **kwargs)
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
api_key = os.environ.get("CLOCKIFY_API_KEY")
self.clockapi = ClockifyAPI(api_key)
self.clockapi.set_workspace(workspace_name)
if api_key is None:
modified_key = "None"
else:
str_len = int(len(api_key) / 2)
start_replace = int(len(api_key) / 4)
modified_key = ""
for idx in range(len(api_key)):
if idx >= start_replace and idx < start_replace + str_len:
replacement = "X"
else:
replacement = api_key[idx]
modified_key += replacement
self.log.info(
"Clockify info. Workspace: \"{}\" API key: \"{}\"".format(
str(workspace_name), str(modified_key)
)
)
def discover(self, session, entities, event):
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
# Get user and check his roles
user_id = event.get("source", {}).get("user", {}).get("id")
if not user_id:
return False
user = session.query("User where id is \"{}\"".format(user_id)).first()
if not user:
return False
for role in user["user_security_roles"]:
if role["security_role"]["name"] in self.discover_role_list:
return True
return False
def register(self):
self.session.event_hub.subscribe(
"topic=ftrack.action.discover",
self._discover,
priority=self.priority
)
launch_subscription = (
"topic=ftrack.action.launch and data.actionIdentifier={}"
).format(self.identifier)
self.session.event_hub.subscribe(launch_subscription, self._launch)
def launch(self, session, entities, event):
if self.clockapi.workspace_id is None:
return {
"success": False,
"message": "Clockify Workspace or API key are not set!"
}
if self.clockapi.validate_workspace_perm() is False:
return {
"success": False,
"message": "Missing permissions for this action!"
}
# JOB SETTINGS
user_id = event["source"]["user"]["id"]
user = session.query("User where id is " + user_id).one()
job = session.create("Job", {
"user": user,
"status": "running",
"data": json.dumps({"description": "Sync Ftrack to Clockify"})
})
session.commit()
project_entity = entities[0]
if project_entity.entity_type.lower() != "project":
project_entity = self.get_project_from_entity(project_entity)
project_name = project_entity["full_name"]
self.log.info(
"Synchronization of project \"{}\" to clockify begins.".format(
project_name
)
)
task_types = (
project_entity["project_schema"]["_task_type_schema"]["types"]
)
task_type_names = [
task_type["name"] for task_type in task_types
]
try:
clockify_projects = self.clockapi.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if "id" not in response:
self.log.warning(
"Project \"{}\" can't be created. Response: {}".format(
project_name, response
)
)
return {
"success": False,
"message": (
"Can't create clockify project \"{}\"."
" Unexpected error."
).format(project_name)
}
clockify_workspace_tags = self.clockapi.get_tags()
for task_type_name in task_type_names:
if task_type_name in clockify_workspace_tags:
self.log.debug(
"Task \"{}\" already exist".format(task_type_name)
)
continue
response = self.clockapi.add_tag(task_type_name)
if "id" not in response:
self.log.warning(
"Task \"{}\" can't be created. Response: {}".format(
task_type_name, response
)
)
job["status"] = "done"
except Exception:
self.log.warning(
"Synchronization to clockify failed.",
exc_info=True
)
finally:
if job["status"] != "done":
job["status"] = "failed"
session.commit()
return True
def register(session, **kw):
SyncClocifyServer(session).register()

View file

@ -0,0 +1,122 @@
import json
from pype.modules.ftrack.lib import BaseAction, statics_icon
from pype.modules.clockify.clockify_api import ClockifyAPI
class SyncClocifyLocal(BaseAction):
'''Synchronise project names and task types.'''
#: Action identifier.
identifier = 'clockify.sync.local'
#: Action label.
label = 'Sync To Clockify (local)'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator", "project Manager"]
#: icon
icon = statics_icon("app_icons", "clockify-white.png")
#: CLockifyApi
clockapi = ClockifyAPI()
def discover(self, session, entities, event):
if (
len(entities) == 1
and entities[0].entity_type.lower() == "project"
):
return True
return False
def launch(self, session, entities, event):
self.clockapi.set_api()
if self.clockapi.workspace_id is None:
return {
"success": False,
"message": "Clockify Workspace or API key are not set!"
}
if self.clockapi.validate_workspace_perm() is False:
return {
"success": False,
"message": "Missing permissions for this action!"
}
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Ftrack to Clockify'
})
})
session.commit()
project_entity = entities[0]
if project_entity.entity_type.lower() != "project":
project_entity = self.get_project_from_entity(project_entity)
project_name = project_entity["full_name"]
self.log.info(
"Synchronization of project \"{}\" to clockify begins.".format(
project_name
)
)
task_types = (
project_entity["project_schema"]["_task_type_schema"]["types"]
)
task_type_names = [
task_type["name"] for task_type in task_types
]
try:
clockify_projects = self.clockapi.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if "id" not in response:
self.log.warning(
"Project \"{}\" can't be created. Response: {}".format(
project_name, response
)
)
return {
"success": False,
"message": (
"Can't create clockify project \"{}\"."
" Unexpected error."
).format(project_name)
}
clockify_workspace_tags = self.clockapi.get_tags()
for task_type_name in task_type_names:
if task_type_name in clockify_workspace_tags:
self.log.debug(
"Task \"{}\" already exist".format(task_type_name)
)
continue
response = self.clockapi.add_tag(task_type_name)
if "id" not in response:
self.log.warning(
"Task \"{}\" can't be created. Response: {}".format(
task_type_name, response
)
)
job["status"] = "done"
except Exception:
pass
finally:
if job["status"] != "done":
job["status"] = "failed"
session.commit()
return True
def register(session, **kw):
SyncClocifyLocal(session).register()

View file

@ -1,155 +0,0 @@
import os
import sys
import argparse
import logging
import json
import ftrack_api
from pype.modules.ftrack import BaseAction, MissingPermision
from pype.modules.clockify import ClockifyAPI
class SyncClocify(BaseAction):
'''Synchronise project names and task types.'''
#: Action identifier.
identifier = 'clockify.sync'
#: Action label.
label = 'Sync To Clockify'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator", "project Manager"]
#: icon
icon = '{}/app_icons/clockify-white.png'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: CLockifyApi
clockapi = ClockifyAPI()
def preregister(self):
if self.clockapi.workspace_id is None:
return "Clockify Workspace or API key are not set!"
if self.clockapi.validate_workspace_perm() is False:
raise MissingPermision('Clockify')
return True
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def launch(self, session, entities, event):
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Ftrack to Clockify'
})
})
session.commit()
try:
entity = entities[0]
if entity.entity_type.lower() == 'project':
project = entity
else:
project = entity['project']
project_name = project['full_name']
task_types = []
for task_type in project['project_schema']['_task_type_schema'][
'types'
]:
task_types.append(task_type['name'])
clockify_projects = self.clockapi.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if 'id' not in response:
self.log.error('Project {} can\'t be created'.format(
project_name
))
return {
'success': False,
'message': 'Can\'t create project, unexpected error'
}
project_id = response['id']
else:
project_id = clockify_projects[project_name]
clockify_workspace_tags = self.clockapi.get_tags()
for task_type in task_types:
if task_type not in clockify_workspace_tags:
response = self.clockapi.add_tag(task_type)
if 'id' not in response:
self.log.error('Task {} can\'t be created'.format(
task_type
))
continue
except Exception:
job['status'] = 'failed'
session.commit()
return False
job['status'] = 'done'
session.commit()
return True
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncClocify(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -1,6 +1,6 @@
from avalon import api, io from avalon import api, io
from pype.api import Logger from pype.api import Logger
from pype.modules.clockify import ClockifyAPI from pype.modules.clockify.clockify_api import ClockifyAPI
log = Logger().get_logger(__name__, "clockify_start") log = Logger().get_logger(__name__, "clockify_start")

View file

@ -1,5 +1,5 @@
from avalon import api, io from avalon import api, io
from pype.modules.clockify import ClockifyAPI from pype.modules.clockify.clockify_api import ClockifyAPI
from pype.api import Logger from pype.api import Logger
log = Logger().get_logger(__name__, "clockify_sync") log = Logger().get_logger(__name__, "clockify_sync")

View file

@ -1,92 +0,0 @@
from Qt import QtCore, QtGui, QtWidgets
from avalon import style
from pype.api import resources
class MessageWidget(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
closed = QtCore.Signal()
def __init__(self, parent=None, messages=[], title="Message"):
super(MessageWidget, self).__init__()
self._parent = parent
# Icon
if parent and hasattr(parent, 'icon'):
self.setWindowIcon(parent.icon)
else:
icon = QtGui.QIcon(resources.pype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
# Style
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._ui_layout(messages))
self.setWindowTitle(title)
def _ui_layout(self, messages):
if not messages:
messages = ["*Misssing messages (This is a bug)*", ]
elif not isinstance(messages, (tuple, list)):
messages = [messages, ]
main_layout = QtWidgets.QVBoxLayout(self)
labels = []
for message in messages:
label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True)
labels.append(label)
main_layout.addWidget(label)
btn_close = QtWidgets.QPushButton("Close")
btn_close.setToolTip('Close this window')
btn_close.clicked.connect(self.on_close_clicked)
btn_group = QtWidgets.QHBoxLayout()
btn_group.addStretch(1)
btn_group.addWidget(btn_close)
main_layout.addLayout(btn_group)
self.labels = labels
self.btn_group = btn_group
self.btn_close = btn_close
self.main_layout = main_layout
return main_layout
def on_close_clicked(self):
self.close()
def close(self, *args, **kwargs):
self.closed.emit()
super(MessageWidget, self).close(*args, **kwargs)

View file

@ -1,9 +1,97 @@
import os
from Qt import QtCore, QtGui, QtWidgets from Qt import QtCore, QtGui, QtWidgets
from avalon import style from avalon import style
from pype.api import resources from pype.api import resources
class MessageWidget(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
closed = QtCore.Signal()
def __init__(self, parent=None, messages=[], title="Message"):
super(MessageWidget, self).__init__()
self._parent = parent
# Icon
if parent and hasattr(parent, 'icon'):
self.setWindowIcon(parent.icon)
else:
icon = QtGui.QIcon(resources.pype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
# Style
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._ui_layout(messages))
self.setWindowTitle(title)
def _ui_layout(self, messages):
if not messages:
messages = ["*Misssing messages (This is a bug)*", ]
elif not isinstance(messages, (tuple, list)):
messages = [messages, ]
main_layout = QtWidgets.QVBoxLayout(self)
labels = []
for message in messages:
label = QtWidgets.QLabel(message)
label.setFont(self.font)
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
label.setTextFormat(QtCore.Qt.RichText)
label.setWordWrap(True)
labels.append(label)
main_layout.addWidget(label)
btn_close = QtWidgets.QPushButton("Close")
btn_close.setToolTip('Close this window')
btn_close.clicked.connect(self.on_close_clicked)
btn_group = QtWidgets.QHBoxLayout()
btn_group.addStretch(1)
btn_group.addWidget(btn_close)
main_layout.addLayout(btn_group)
self.labels = labels
self.btn_group = btn_group
self.btn_close = btn_close
self.main_layout = main_layout
return main_layout
def on_close_clicked(self):
self.close()
def close(self, *args, **kwargs):
self.closed.emit()
super(MessageWidget, self).close(*args, **kwargs)
class ClockifySettings(QtWidgets.QWidget): class ClockifySettings(QtWidgets.QWidget):
SIZE_W = 300 SIZE_W = 300

View file

@ -1,10 +1,8 @@
import os
import time import time
import traceback import traceback
from pype.modules.ftrack import BaseAction from pype.modules.ftrack import BaseAction
from pype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory from pype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
from pype.api import config
class SyncToAvalonServer(BaseAction): class SyncToAvalonServer(BaseAction):
@ -38,17 +36,6 @@ class SyncToAvalonServer(BaseAction):
variant = "- Sync To Avalon (Server)" variant = "- Sync To Avalon (Server)"
#: Action description. #: Action description.
description = "Send data from Ftrack to Avalon" description = "Send data from Ftrack to Avalon"
#: Action icon.
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
os.environ.get(
"PYPE_STATICS_SERVER",
"http://localhost:{}".format(
config.get_presets().get("services", {}).get(
"rest_api", {}
).get("default_port", 8021)
)
)
)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)

View file

@ -84,6 +84,9 @@ class VersionToTaskStatus(BaseEvent):
if not task: if not task:
continue continue
if version["asset"]["type"]["short"].lower() == "scene":
continue
project_schema = task["project"]["project_schema"] project_schema = task["project"]["project_schema"]
# Get all available statuses for Task # Get all available statuses for Task
statuses = project_schema.get_statuses("Task", task["type_id"]) statuses = project_schema.get_statuses("Task", task["type_id"])

View file

@ -522,6 +522,21 @@ def main(argv):
help="Load creadentials from apps dir", help="Load creadentials from apps dir",
action="store_true" action="store_true"
) )
parser.add_argument(
"-clockifyapikey", type=str,
help=(
"Enter API key for Clockify actions."
" (default from environment: $CLOCKIFY_API_KEY)"
)
)
parser.add_argument(
"-clockifyworkspace", type=str,
help=(
"Enter workspace for Clockify."
" (default from module presets or "
"environment: $CLOCKIFY_WORKSPACE)"
)
)
ftrack_url = os.environ.get('FTRACK_SERVER') ftrack_url = os.environ.get('FTRACK_SERVER')
username = os.environ.get('FTRACK_API_USER') username = os.environ.get('FTRACK_API_USER')
api_key = os.environ.get('FTRACK_API_KEY') api_key = os.environ.get('FTRACK_API_KEY')
@ -546,6 +561,12 @@ def main(argv):
if kwargs.ftrackapikey: if kwargs.ftrackapikey:
api_key = kwargs.ftrackapikey api_key = kwargs.ftrackapikey
if kwargs.clockifyworkspace:
os.environ["CLOCKIFY_WORKSPACE"] = kwargs.clockifyworkspace
if kwargs.clockifyapikey:
os.environ["CLOCKIFY_API_KEY"] = kwargs.clockifyapikey
legacy = kwargs.legacy legacy = kwargs.legacy
# Check url regex and accessibility # Check url regex and accessibility
ftrack_url = check_ftrack_url(ftrack_url) ftrack_url = check_ftrack_url(ftrack_url)

View file

@ -11,7 +11,7 @@ from pype.api import Logger
class SocketThread(threading.Thread): class SocketThread(threading.Thread):
"""Thread that checks suprocess of storer of processor of events""" """Thread that checks suprocess of storer of processor of events"""
MAX_TIMEOUT = 35 MAX_TIMEOUT = int(os.environ.get("PYPE_FTRACK_SOCKET_TIMEOUT", 45))
def __init__(self, name, port, filepath, additional_args=[]): def __init__(self, name, port, filepath, additional_args=[]):
super(SocketThread, self).__init__() super(SocketThread, self).__init__()

View file

@ -9,7 +9,7 @@ from pype.modules.ftrack.ftrack_server.lib import (
SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER
) )
import ftrack_api import ftrack_api
from pype.api import Logger from pype.api import Logger, config
log = Logger().get_logger("Event processor") log = Logger().get_logger("Event processor")
@ -55,6 +55,42 @@ def register(session):
) )
def clockify_module_registration():
module_name = "Clockify"
menu_items = config.get_presets()["tray"]["menu_items"]
if not menu_items["item_usage"][module_name]:
return
api_key = os.environ.get("CLOCKIFY_API_KEY")
if not api_key:
log.warning("Clockify API key is not set.")
return
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
if not workspace_name:
workspace_name = (
menu_items
.get("attributes", {})
.get(module_name, {})
.get("workspace_name", {})
)
if not workspace_name:
log.warning("Clockify Workspace is not set.")
return
os.environ["CLOCKIFY_WORKSPACE"] = workspace_name
from pype.modules.clockify.constants import CLOCKIFY_FTRACK_SERVER_PATH
current = os.environ.get("FTRACK_EVENTS_PATH") or ""
if current:
current += os.pathsep
os.environ["FTRACK_EVENTS_PATH"] = current + CLOCKIFY_FTRACK_SERVER_PATH
return True
def main(args): def main(args):
port = int(args[-1]) port = int(args[-1])
# Create a TCP/IP socket # Create a TCP/IP socket
@ -66,6 +102,11 @@ def main(args):
sock.connect(server_address) sock.connect(server_address)
sock.sendall(b"CreatedProcess") sock.sendall(b"CreatedProcess")
try:
clockify_module_registration()
except Exception:
log.info("Clockify registration failed.", exc_info=True)
try: try:
session = SocketSession( session = SocketSession(
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub

View file

@ -8,7 +8,9 @@ import getpass
from pype import lib as pypelib from pype import lib as pypelib
from pype.api import config, Anatomy from pype.api import config, Anatomy
from .ftrack_action_handler import BaseAction from .ftrack_action_handler import BaseAction
from avalon.api import last_workfile, HOST_WORKFILE_EXTENSIONS from avalon.api import (
last_workfile, HOST_WORKFILE_EXTENSIONS, should_start_last_workfile
)
class AppAction(BaseAction): class AppAction(BaseAction):
@ -84,7 +86,7 @@ class AppAction(BaseAction):
if ( if (
len(entities) != 1 len(entities) != 1
or entities[0].entity_type.lower() != 'task' or entities[0].entity_type.lower() != "task"
): ):
return False return False
@ -92,21 +94,31 @@ class AppAction(BaseAction):
if entity["parent"].entity_type.lower() == "project": if entity["parent"].entity_type.lower() == "project":
return False return False
ft_project = self.get_project_from_entity(entity) avalon_project_apps = event["data"].get("avalon_project_apps", None)
database = pypelib.get_avalon_database() avalon_project_doc = event["data"].get("avalon_project_doc", None)
project_name = ft_project["full_name"] if avalon_project_apps is None:
avalon_project = database[project_name].find_one({ if avalon_project_doc is None:
"type": "project" ft_project = self.get_project_from_entity(entity)
}) database = pypelib.get_avalon_database()
project_name = ft_project["full_name"]
avalon_project_doc = database[project_name].find_one({
"type": "project"
}) or False
event["data"]["avalon_project_doc"] = avalon_project_doc
if not avalon_project: if not avalon_project_doc:
return False
project_apps_config = avalon_project_doc["config"].get("apps", [])
avalon_project_apps = [
app["name"] for app in project_apps_config
] or False
event["data"]["avalon_project_apps"] = avalon_project_apps
if not avalon_project_apps:
return False return False
project_apps = avalon_project["config"].get("apps", []) return self.identifier in avalon_project_apps
apps = [app["name"] for app in project_apps]
if self.identifier in apps:
return True
return False
def _launch(self, event): def _launch(self, event):
entities = self._translate_event(event) entities = self._translate_event(event)
@ -142,6 +154,9 @@ class AppAction(BaseAction):
""" """
entity = entities[0] entity = entities[0]
task_name = entity["name"]
project_name = entity["project"]["full_name"] project_name = entity["project"]["full_name"]
database = pypelib.get_avalon_database() database = pypelib.get_avalon_database()
@ -164,7 +179,7 @@ class AppAction(BaseAction):
"name": entity["project"]["full_name"], "name": entity["project"]["full_name"],
"code": entity["project"]["name"] "code": entity["project"]["name"]
}, },
"task": entity["name"], "task": task_name,
"asset": asset_name, "asset": asset_name,
"app": host_name, "app": host_name,
"hierarchy": hierarchy "hierarchy": hierarchy
@ -210,14 +225,28 @@ class AppAction(BaseAction):
prep_env.update({ prep_env.update({
"AVALON_PROJECT": project_name, "AVALON_PROJECT": project_name,
"AVALON_ASSET": asset_name, "AVALON_ASSET": asset_name,
"AVALON_TASK": entity["name"], "AVALON_TASK": task_name,
"AVALON_APP": self.identifier.split("_")[0], "AVALON_APP": host_name,
"AVALON_APP_NAME": self.identifier, "AVALON_APP_NAME": self.identifier,
"AVALON_HIERARCHY": hierarchy, "AVALON_HIERARCHY": hierarchy,
"AVALON_WORKDIR": workdir "AVALON_WORKDIR": workdir
}) })
if last_workfile_path:
start_last_workfile = should_start_last_workfile(
project_name, host_name, task_name
)
# Store boolean as "0"(False) or "1"(True)
prep_env["AVALON_OPEN_LAST_WORKFILE"] = (
str(int(bool(start_last_workfile)))
)
if (
start_last_workfile
and last_workfile_path
and os.path.exists(last_workfile_path)
):
prep_env["AVALON_LAST_WORKFILE"] = last_workfile_path prep_env["AVALON_LAST_WORKFILE"] = last_workfile_path
prep_env.update(anatomy.roots_obj.root_environments()) prep_env.update(anatomy.roots_obj.root_environments())
# collect all parents from the task # collect all parents from the task

View file

@ -174,22 +174,16 @@ class BlendActionLoader(pype.hosts.blender.plugin.AssetLoader):
strips = [] strips = []
for obj in collection_metadata["objects"]: for obj in list(collection_metadata["objects"]):
# Get all the strips that use the action # Get all the strips that use the action
arm_objs = [ arm_objs = [
arm for arm in bpy.data.objects if arm.type == 'ARMATURE'] arm for arm in bpy.data.objects if arm.type == 'ARMATURE']
for armature_obj in arm_objs: for armature_obj in arm_objs:
if armature_obj.animation_data is not None: if armature_obj.animation_data is not None:
for track in armature_obj.animation_data.nla_tracks: for track in armature_obj.animation_data.nla_tracks:
for strip in track.strips: for strip in track.strips:
if strip.action == obj.animation_data.action: if strip.action == obj.animation_data.action:
strips.append(strip) strips.append(strip)
bpy.data.actions.remove(obj.animation_data.action) bpy.data.actions.remove(obj.animation_data.action)
@ -277,22 +271,16 @@ class BlendActionLoader(pype.hosts.blender.plugin.AssetLoader):
objects = collection_metadata["objects"] objects = collection_metadata["objects"]
lib_container = collection_metadata["lib_container"] lib_container = collection_metadata["lib_container"]
for obj in objects: for obj in list(objects):
# Get all the strips that use the action # Get all the strips that use the action
arm_objs = [ arm_objs = [
arm for arm in bpy.data.objects if arm.type == 'ARMATURE'] arm for arm in bpy.data.objects if arm.type == 'ARMATURE']
for armature_obj in arm_objs: for armature_obj in arm_objs:
if armature_obj.animation_data is not None: if armature_obj.animation_data is not None:
for track in armature_obj.animation_data.nla_tracks: for track in armature_obj.animation_data.nla_tracks:
for strip in track.strips: for strip in track.strips:
if strip.action == obj.animation_data.action: if strip.action == obj.animation_data.action:
track.strips.remove(strip) track.strips.remove(strip)
bpy.data.actions.remove(obj.animation_data.action) bpy.data.actions.remove(obj.animation_data.action)

View file

@ -30,9 +30,7 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
color = "orange" color = "orange"
def _remove(self, objects, lib_container): def _remove(self, objects, lib_container):
for obj in list(objects):
for obj in objects:
if obj.type == 'ARMATURE': if obj.type == 'ARMATURE':
bpy.data.armatures.remove(obj.data) bpy.data.armatures.remove(obj.data)
elif obj.type == 'MESH': elif obj.type == 'MESH':

View file

@ -28,8 +28,7 @@ class BlendCameraLoader(pype.hosts.blender.plugin.AssetLoader):
color = "orange" color = "orange"
def _remove(self, objects, lib_container): def _remove(self, objects, lib_container):
for obj in list(objects):
for obj in objects:
bpy.data.cameras.remove(obj.data) bpy.data.cameras.remove(obj.data)
bpy.data.collections.remove(bpy.data.collections[lib_container]) bpy.data.collections.remove(bpy.data.collections[lib_container])
@ -51,26 +50,26 @@ class BlendCameraLoader(pype.hosts.blender.plugin.AssetLoader):
objects_list = [] objects_list = []
for obj in camera_container.objects: for obj in camera_container.objects:
obj = obj.make_local() local_obj = obj.make_local()
obj.data.make_local() local_obj.data.make_local()
if not obj.get(blender.pipeline.AVALON_PROPERTY): if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
obj[blender.pipeline.AVALON_PROPERTY] = dict() local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
avalon_info = obj[blender.pipeline.AVALON_PROPERTY] avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
avalon_info.update({"container_name": container_name}) avalon_info.update({"container_name": container_name})
if actions[0] is not None: if actions[0] is not None:
if obj.animation_data is None: if local_obj.animation_data is None:
obj.animation_data_create() local_obj.animation_data_create()
obj.animation_data.action = actions[0] local_obj.animation_data.action = actions[0]
if actions[1] is not None: if actions[1] is not None:
if obj.data.animation_data is None: if local_obj.data.animation_data is None:
obj.data.animation_data_create() local_obj.data.animation_data_create()
obj.data.animation_data.action = actions[1] local_obj.data.animation_data.action = actions[1]
objects_list.append(obj) objects_list.append(local_obj)
camera_container.pop(blender.pipeline.AVALON_PROPERTY) camera_container.pop(blender.pipeline.AVALON_PROPERTY)
@ -190,7 +189,16 @@ class BlendCameraLoader(pype.hosts.blender.plugin.AssetLoader):
camera = objects[0] camera = objects[0]
actions = (camera.animation_data.action, camera.data.animation_data.action) camera_action = None
camera_data_action = None
if camera.animation_data and camera.animation_data.action:
camera_action = camera.animation_data.action
if camera.data.animation_data and camera.data.animation_data.action:
camera_data_action = camera.data.animation_data.action
actions = (camera_action, camera_data_action)
self._remove(objects, lib_container) self._remove(objects, lib_container)

View file

@ -21,7 +21,7 @@ class BlendLayoutLoader(plugin.AssetLoader):
color = "orange" color = "orange"
def _remove(self, objects, obj_container): def _remove(self, objects, obj_container):
for obj in objects: for obj in list(objects):
if obj.type == 'ARMATURE': if obj.type == 'ARMATURE':
bpy.data.armatures.remove(obj.data) bpy.data.armatures.remove(obj.data)
elif obj.type == 'MESH': elif obj.type == 'MESH':
@ -79,21 +79,21 @@ class BlendLayoutLoader(plugin.AssetLoader):
# The armature is unparented for all the non-local meshes, # The armature is unparented for all the non-local meshes,
# when it is made local. # when it is made local.
for obj in objects + armatures: for obj in objects + armatures:
obj.make_local() local_obj = obj.make_local()
if obj.data: if obj.data:
obj.data.make_local() obj.data.make_local()
if not obj.get(blender.pipeline.AVALON_PROPERTY): if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
obj[blender.pipeline.AVALON_PROPERTY] = dict() local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
avalon_info = obj[blender.pipeline.AVALON_PROPERTY] avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
avalon_info.update({"container_name": container_name}) avalon_info.update({"container_name": container_name})
action = actions.get(obj.name, None) action = actions.get(local_obj.name, None)
if local_obj.type == 'ARMATURE' and action is not None:
local_obj.animation_data.action = action
if obj.type == 'ARMATURE' and action is not None:
obj.animation_data.action = action
layout_container.pop(blender.pipeline.AVALON_PROPERTY) layout_container.pop(blender.pipeline.AVALON_PROPERTY)
bpy.ops.object.select_all(action='DESELECT') bpy.ops.object.select_all(action='DESELECT')
@ -222,7 +222,8 @@ class BlendLayoutLoader(plugin.AssetLoader):
for obj in objects: for obj in objects:
if obj.type == 'ARMATURE': if obj.type == 'ARMATURE':
actions[obj.name] = obj.animation_data.action if obj.animation_data and obj.animation_data.action:
actions[obj.name] = obj.animation_data.action
self._remove(objects, obj_container) self._remove(objects, obj_container)

View file

@ -25,8 +25,8 @@ class BlendModelLoader(plugin.AssetLoader):
color = "orange" color = "orange"
def _remove(self, objects, container): def _remove(self, objects, container):
for obj in objects: for obj in list(objects):
for material_slot in obj.material_slots: for material_slot in list(obj.material_slots):
bpy.data.materials.remove(material_slot.material) bpy.data.materials.remove(material_slot.material)
bpy.data.meshes.remove(obj.data) bpy.data.meshes.remove(obj.data)
@ -53,16 +53,16 @@ class BlendModelLoader(plugin.AssetLoader):
model_container.name = container_name model_container.name = container_name
for obj in model_container.objects: for obj in model_container.objects:
plugin.prepare_data(obj, container_name) local_obj = plugin.prepare_data(obj, container_name)
plugin.prepare_data(obj.data, container_name) plugin.prepare_data(local_obj.data, container_name)
for material_slot in obj.material_slots: for material_slot in local_obj.material_slots:
plugin.prepare_data(material_slot.material, container_name) plugin.prepare_data(material_slot.material, container_name)
if not obj.get(blender.pipeline.AVALON_PROPERTY): if not obj.get(blender.pipeline.AVALON_PROPERTY):
obj[blender.pipeline.AVALON_PROPERTY] = dict() local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
avalon_info = obj[blender.pipeline.AVALON_PROPERTY] avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
avalon_info.update({"container_name": container_name}) avalon_info.update({"container_name": container_name})
model_container.pop(blender.pipeline.AVALON_PROPERTY) model_container.pop(blender.pipeline.AVALON_PROPERTY)

View file

@ -25,7 +25,7 @@ class BlendRigLoader(plugin.AssetLoader):
color = "orange" color = "orange"
def _remove(self, objects, obj_container): def _remove(self, objects, obj_container):
for obj in objects: for obj in list(objects):
if obj.type == 'ARMATURE': if obj.type == 'ARMATURE':
bpy.data.armatures.remove(obj.data) bpy.data.armatures.remove(obj.data)
elif obj.type == 'MESH': elif obj.type == 'MESH':
@ -63,25 +63,25 @@ class BlendRigLoader(plugin.AssetLoader):
] ]
for child in rig_container.children: for child in rig_container.children:
plugin.prepare_data(child, container_name) local_child = plugin.prepare_data(child, container_name)
meshes.extend(child.objects) meshes.extend(local_child.objects)
# Link meshes first, then armatures. # Link meshes first, then armatures.
# The armature is unparented for all the non-local meshes, # The armature is unparented for all the non-local meshes,
# when it is made local. # when it is made local.
for obj in meshes + armatures: for obj in meshes + armatures:
plugin.prepare_data(obj, container_name) local_obj = plugin.prepare_data(obj, container_name)
plugin.prepare_data(obj.data, container_name) plugin.prepare_data(local_obj.data, container_name)
if not obj.get(blender.pipeline.AVALON_PROPERTY): if not local_obj.get(blender.pipeline.AVALON_PROPERTY):
obj[blender.pipeline.AVALON_PROPERTY] = dict() local_obj[blender.pipeline.AVALON_PROPERTY] = dict()
avalon_info = obj[blender.pipeline.AVALON_PROPERTY] avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY]
avalon_info.update({"container_name": container_name}) avalon_info.update({"container_name": container_name})
if obj.type == 'ARMATURE' and action is not None: if local_obj.type == 'ARMATURE' and action is not None:
obj.animation_data.action = action local_obj.animation_data.action = action
rig_container.pop(blender.pipeline.AVALON_PROPERTY) rig_container.pop(blender.pipeline.AVALON_PROPERTY)
bpy.ops.object.select_all(action='DESELECT') bpy.ops.object.select_all(action='DESELECT')
@ -214,7 +214,9 @@ class BlendRigLoader(plugin.AssetLoader):
armatures = [obj for obj in objects if obj.type == 'ARMATURE'] armatures = [obj for obj in objects if obj.type == 'ARMATURE']
assert(len(armatures) == 1) assert(len(armatures) == 1)
action = armatures[0].animation_data.action action = None
if armatures[0].animation_data and armatures[0].animation_data.action:
action = armatures[0].animation_data.action
parent = plugin.get_parent_collection(obj_container) parent = plugin.get_parent_collection(obj_container)

View file

@ -10,9 +10,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.495 order = pyblish.api.CollectorOrder + 0.495
families = ["render.farm"] families = ["render.farm"]
# Presets
anatomy_render_key = None
anatomy_publish_render_key = None
def process(self, instance): def process(self, instance):
anatomy = instance.context.data["anatomy"] anatomy = instance.context.data["anatomy"]
anatomy_data = copy.deepcopy(instance.data["anatomyData"]) anatomy_data = copy.deepcopy(instance.data["anatomyData"])
anatomy_data["family"] = "render"
padding = anatomy.templates.get("frame_padding", 4) padding = anatomy.templates.get("frame_padding", 4)
anatomy_data.update({ anatomy_data.update({
"frame": f"%0{padding}d", "frame": f"%0{padding}d",
@ -21,12 +26,28 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
anatomy_filled = anatomy.format(anatomy_data) anatomy_filled = anatomy.format(anatomy_data)
render_dir = anatomy_filled["render_tmp"]["folder"] # get anatomy rendering keys
render_path = anatomy_filled["render_tmp"]["path"] anatomy_render_key = self.anatomy_render_key or "render"
anatomy_publish_render_key = self.anatomy_publish_render_key or "render"
# get folder and path for rendering images from celaction
render_dir = anatomy_filled[anatomy_render_key]["folder"]
render_path = anatomy_filled[anatomy_render_key]["path"]
# create dir if it doesnt exists # create dir if it doesnt exists
os.makedirs(render_dir, exist_ok=True) try:
if not os.path.isdir(render_dir):
os.makedirs(render_dir, exist_ok=True)
except OSError:
# directory is not available
self.log.warning("Path is unreachable: `{}`".format(render_dir))
# add rendering path to instance data
instance.data["path"] = render_path instance.data["path"] = render_path
# get anatomy for published renders folder path
if anatomy_filled.get(anatomy_publish_render_key):
instance.data["publishRenderFolder"] = anatomy_filled[
anatomy_publish_render_key]["folder"]
self.log.info(f"Render output path set to: `{render_path}`") self.log.info(f"Render output path set to: `{render_path}`")

View file

@ -4,9 +4,9 @@ import pyblish.api
class VersionUpScene(pyblish.api.ContextPlugin): class VersionUpScene(pyblish.api.ContextPlugin):
order = pyblish.api.IntegratorOrder order = pyblish.api.IntegratorOrder + 0.5
label = 'Version Up Scene' label = 'Version Up Scene'
families = ['scene'] families = ['workfile']
optional = True optional = True
active = True active = True

View file

@ -74,6 +74,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
resolution_width = instance.data["resolutionWidth"] resolution_width = instance.data["resolutionWidth"]
resolution_height = instance.data["resolutionHeight"] resolution_height = instance.data["resolutionHeight"]
render_dir = os.path.normpath(os.path.dirname(render_path)) render_dir = os.path.normpath(os.path.dirname(render_path))
render_path = os.path.normpath(render_path)
script_name = os.path.basename(script_path) script_name = os.path.basename(script_path)
jobname = "%s - %s" % (script_name, instance.name) jobname = "%s - %s" % (script_name, instance.name)
@ -98,6 +99,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
args = [ args = [
f"<QUOTE>{script_path}<QUOTE>", f"<QUOTE>{script_path}<QUOTE>",
"-a", "-a",
"-16",
"-s <STARTFRAME>", "-s <STARTFRAME>",
"-e <ENDFRAME>", "-e <ENDFRAME>",
f"-d <QUOTE>{render_dir}<QUOTE>", f"-d <QUOTE>{render_dir}<QUOTE>",
@ -135,8 +137,10 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin):
# Optional, enable double-click to preview rendered # Optional, enable double-click to preview rendered
# frames from Deadline Monitor # frames from Deadline Monitor
"OutputFilename0": output_filename_0.replace("\\", "/") "OutputFilename0": output_filename_0.replace("\\", "/"),
# # Asset dependency to wait for at least the scene file to sync.
# "AssetDependency0": script_path
}, },
"PluginInfo": { "PluginInfo": {
# Input # Input

View file

@ -96,6 +96,6 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
task_entity = None task_entity = None
self.log.warning("Task name is not set.") self.log.warning("Task name is not set.")
context.data["ftrackProject"] = asset_entity context.data["ftrackProject"] = project_entity
context.data["ftrackEntity"] = asset_entity context.data["ftrackEntity"] = asset_entity
context.data["ftrackTask"] = task_entity context.data["ftrackTask"] = task_entity

View file

@ -54,8 +54,52 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
self.log.debug(query) self.log.debug(query)
return query return query
def process(self, instance): def _set_task_status(self, instance, task_entity, session):
project_entity = instance.context.data.get("ftrackProject")
if not project_entity:
self.log.info("Task status won't be set, project is not known.")
return
if not task_entity:
self.log.info("Task status won't be set, task is not known.")
return
status_name = instance.context.data.get("ftrackStatus")
if not status_name:
self.log.info("Ftrack status name is not set.")
return
self.log.debug(
"Ftrack status name will be (maybe) set to \"{}\"".format(
status_name
)
)
project_schema = project_entity["project_schema"]
task_statuses = project_schema.get_statuses(
"Task", task_entity["type_id"]
)
task_statuses_by_low_name = {
status["name"].lower(): status for status in task_statuses
}
status = task_statuses_by_low_name.get(status_name.lower())
if not status:
self.log.warning((
"Task status \"{}\" won't be set,"
" status is now allowed on task type \"{}\"."
).format(status_name, task_entity["type"]["name"]))
return
self.log.info("Setting task status to \"{}\"".format(status_name))
task_entity["status"] = status
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
def process(self, instance):
session = instance.context.data["ftrackSession"] session = instance.context.data["ftrackSession"]
if instance.data.get("ftrackTask"): if instance.data.get("ftrackTask"):
task = instance.data["ftrackTask"] task = instance.data["ftrackTask"]
@ -78,9 +122,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
info_msg += ", metadata: {metadata}." info_msg += ", metadata: {metadata}."
used_asset_versions = [] used_asset_versions = []
self._set_task_status(instance, task, session)
# Iterate over components and publish # Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []): for data in instance.data.get("ftrackComponentsList", []):
# AssetType # AssetType
# Get existing entity. # Get existing entity.
assettype_data = {"short": "upload"} assettype_data = {"short": "upload"}
@ -94,9 +140,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Create a new entity if none exits. # Create a new entity if none exits.
if not assettype_entity: if not assettype_entity:
assettype_entity = session.create("AssetType", assettype_data) assettype_entity = session.create("AssetType", assettype_data)
self.log.debug( self.log.debug("Created new AssetType with data: {}".format(
"Created new AssetType with data: ".format(assettype_data) assettype_data
) ))
# Asset # Asset
# Get existing entity. # Get existing entity.

View file

@ -1,9 +1,13 @@
import sys import sys
import six import six
import pyblish.api import pyblish.api
from avalon import io from avalon import io
try:
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_AUTO_SYNC
except Exception:
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
""" """
@ -39,15 +43,32 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
if "hierarchyContext" not in context.data: if "hierarchyContext" not in context.data:
return return
self.session = self.context.data["ftrackSession"]
project_name = self.context.data["projectEntity"]["name"]
query = 'Project where full_name is "{}"'.format(project_name)
project = self.session.query(query).one()
auto_sync_state = project[
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
if not io.Session: if not io.Session:
io.install() io.install()
self.ft_project = None self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"] input_data = context.data["hierarchyContext"]
self.import_to_ftrack(input_data) # disable termporarily ftrack project's autosyncing
if auto_sync_state:
self.auto_sync_off(project)
try:
# import ftrack hierarchy
self.import_to_ftrack(input_data)
except Exception:
raise
finally:
if auto_sync_state:
self.auto_sync_on(project)
def import_to_ftrack(self, input_data, parent=None): def import_to_ftrack(self, input_data, parent=None):
for entity_name in input_data: for entity_name in input_data:
@ -217,3 +238,28 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
six.reraise(tp, value, tb) six.reraise(tp, value, tb)
return entity return entity
def auto_sync_off(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False
self.log.info("Ftrack autosync swithed off")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise
def auto_sync_on(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True
self.log.info("Ftrack autosync swithed on")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise

View file

@ -516,12 +516,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance: the instance to integrate instance: the instance to integrate
""" """
transfers = instance.data.get("transfers", list()) transfers = instance.data.get("transfers", list())
for src, dest in transfers:
if os.path.normpath(src) != os.path.normpath(dest):
self.copy_file(src, dest)
transfers = instance.data.get("transfers", list())
for src, dest in transfers: for src, dest in transfers:
self.copy_file(src, dest) self.copy_file(src, dest)
@ -559,12 +553,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical # copy file with speedcopy and check if size of files are simetrical
while True: while True:
import shutil
try: try:
copyfile(src, dst) copyfile(src, dst)
except (OSError, AttributeError) as e: except shutil.SameFileError as sfe:
self.log.warning(e) self.log.critical("files are the same {} to {}".format(src, dst))
# try it again with shutil os.remove(dst)
import shutil
try: try:
shutil.copyfile(src, dst) shutil.copyfile(src, dst)
self.log.debug("Copying files with shutil...") self.log.debug("Copying files with shutil...")
@ -748,6 +742,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
value += 1 value += 1
if value > highest_value: if value > highest_value:
matching_profiles = {}
highest_value = value highest_value = value
if value == highest_value: if value == highest_value:

View file

@ -12,7 +12,15 @@ from avalon.vendor import requests, clique
import pyblish.api import pyblish.api
def _get_script(): def _get_script(path):
# pass input path if exists
if path:
if os.path.exists(path):
return str(path)
else:
raise
"""Get path to the image sequence script.""" """Get path to the image sequence script."""
try: try:
from pathlib import Path from pathlib import Path
@ -192,6 +200,38 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
families_transfer = ["render3d", "render2d", "ftrack", "slate"] families_transfer = ["render3d", "render2d", "ftrack", "slate"]
plugin_python_version = "3.7" plugin_python_version = "3.7"
# script path for publish_filesequence.py
publishing_script = None
def _create_metadata_path(self, instance):
ins_data = instance.data
# Ensure output dir exists
output_dir = ins_data.get("publishRenderFolder", ins_data["outputDir"])
try:
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
except OSError:
# directory is not available
self.log.warning("Path is unreachable: `{}`".format(output_dir))
metadata_filename = "{}_metadata.json".format(ins_data["subset"])
metadata_path = os.path.join(output_dir, metadata_filename)
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, roothless_mtdt_p = self.anatomy.find_root_template_from_path(
metadata_path)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
roothless_mtdt_p = metadata_path
return (metadata_path, roothless_mtdt_p)
def _submit_deadline_post_job(self, instance, job): def _submit_deadline_post_job(self, instance, job):
"""Submit publish job to Deadline. """Submit publish job to Deadline.
@ -205,17 +245,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
job_name = "Publish - {subset}".format(subset=subset) job_name = "Publish - {subset}".format(subset=subset)
output_dir = instance.data["outputDir"] output_dir = instance.data["outputDir"]
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, rootless_path = (
self.anatomy.find_root_template_from_path(output_dir)
)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
rootless_path = output_dir
# Generate the payload for Deadline submission # Generate the payload for Deadline submission
payload = { payload = {
@ -239,7 +268,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
}, },
"PluginInfo": { "PluginInfo": {
"Version": self.plugin_python_version, "Version": self.plugin_python_version,
"ScriptFile": _get_script(), "ScriptFile": _get_script(self.publishing_script),
"Arguments": "", "Arguments": "",
"SingleFrameOnly": "True", "SingleFrameOnly": "True",
}, },
@ -249,11 +278,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent # Transfer the environment from the original job to this dependent
# job so they use the same environment # job so they use the same environment
metadata_filename = "{}_metadata.json".format(subset) metadata_path, roothless_metadata_path = self._create_metadata_path(
metadata_path = os.path.join(rootless_path, metadata_filename) instance)
environment = job["Props"].get("Env", {}) environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path environment["PYPE_METADATA_FILE"] = roothless_metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"] environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1" environment["PYPE_LOG_NO_COLORS"] = "1"
try: try:
@ -488,7 +517,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if bake_render_path: if bake_render_path:
preview = False preview = False
if "celaction" in self.hosts: if "celaction" in pyblish.api.registered_hosts():
preview = True preview = True
staging = os.path.dirname(list(collection)[0]) staging = os.path.dirname(list(collection)[0])
@ -847,14 +876,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
} }
publish_job.update({"ftrack": ftrack}) publish_job.update({"ftrack": ftrack})
# Ensure output dir exists metadata_path, roothless_metadata_path = self._create_metadata_path(
output_dir = instance.data["outputDir"] instance)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(output_dir, metadata_filename)
self.log.info("Writing json file: {}".format(metadata_path)) self.log.info("Writing json file: {}".format(metadata_path))
with open(metadata_path, "w") as f: with open(metadata_path, "w") as f:
json.dump(publish_job, f, indent=4, sort_keys=True) json.dump(publish_job, f, indent=4, sort_keys=True)

View file

@ -9,7 +9,7 @@ from avalon import api, harmony
class ImportTemplateLoader(api.Loader): class ImportTemplateLoader(api.Loader):
"""Import templates.""" """Import templates."""
families = ["harmony.template"] families = ["harmony.template", "workfile"]
representations = ["*"] representations = ["*"]
label = "Import Template" label = "Import Template"

View file

@ -111,13 +111,22 @@ class ExtractRender(pyblish.api.InstancePlugin):
# Generate mov. # Generate mov.
mov_path = os.path.join(path, instance.data["name"] + ".mov") mov_path = os.path.join(path, instance.data["name"] + ".mov")
args = [ if os.path.isfile(audio_path):
"ffmpeg", "-y", args = [
"-i", audio_path, "ffmpeg", "-y",
"-i", "-i", audio_path,
os.path.join(path, collection.head + "%04d" + collection.tail), "-i",
mov_path os.path.join(path, collection.head + "%04d" + collection.tail),
] mov_path
]
else:
args = [
"ffmpeg", "-y",
"-i",
os.path.join(path, collection.head + "%04d" + collection.tail),
mov_path
]
process = subprocess.Popen( process = subprocess.Popen(
args, args,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,

View file

@ -0,0 +1,37 @@
import json
import os
import pyblish.api
import avalon.harmony
import pype.hosts.harmony
class ValidateAudio(pyblish.api.InstancePlugin):
"""Ensures that there is an audio file in the scene. If you are sure that you want to send render without audio, you can disable this validator before clicking on "publish" """
order = pyblish.api.ValidatorOrder
label = "Validate Audio"
families = ["render"]
hosts = ["harmony"]
optional = True
def process(self, instance):
# Collect scene data.
func = """function func(write_node)
{
return [
sound.getSoundtrackAll().path()
]
}
func
"""
result = avalon.harmony.send(
{"function": func, "args": [instance[0]]}
)["result"]
audio_path = result[0]
msg = "You are missing audio file:\n{}".format(audio_path)
assert os.path.isfile(audio_path), msg

View file

@ -103,9 +103,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
instance (:class:`pyblish.api.Instance`): published instance. instance (:class:`pyblish.api.Instance`): published instance.
""" """
invalid = self.get_invalid(instance, invalid = self.get_invalid(instance)
self.SUFFIX_NAMING_TABLE,
self.ALLOW_IF_NOT_IN_SUFFIX_TABLE)
if invalid: if invalid:
raise ValueError("Incorrectly named geometry " raise ValueError("Incorrectly named geometry "
"transforms: {0}".format(invalid)) "transforms: {0}".format(invalid))

View file

@ -49,6 +49,24 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
render_path = instance.data['path'] render_path = instance.data['path']
script_path = context.data["currentFile"] script_path = context.data["currentFile"]
for item in context:
if "workfile" in item.data["families"]:
msg = "Workfile (scene) must be published along"
assert item.data["publish"] is True, msg
template_data = item.data.get("anatomyData")
rep = item.data.get("representations")[0].get("name")
template_data["representation"] = rep
template_data["ext"] = rep
template_data["comment"] = None
anatomy_filled = context.data["anatomy"].format(template_data)
template_filled = anatomy_filled["publish"]["path"]
script_path = os.path.normpath(template_filled)
self.log.info(
"Using published scene for render {}".format(script_path)
)
# exception for slate workflow # exception for slate workflow
if "slate" in instance.data["families"]: if "slate" in instance.data["families"]:
self._frame_start -= 1 self._frame_start -= 1

View file

@ -42,6 +42,8 @@ class ExtractReview(pype.api.Extractor):
staging_dir, photoshop.com_objects.JPEGSaveOptions(), True staging_dir, photoshop.com_objects.JPEGSaveOptions(), True
) )
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
instance.data["representations"].append({ instance.data["representations"].append({
"name": "jpg", "name": "jpg",
"ext": "jpg", "ext": "jpg",
@ -53,13 +55,13 @@ class ExtractReview(pype.api.Extractor):
# Generate thumbnail. # Generate thumbnail.
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
args = [ args = [
"ffmpeg", "-y", ffmpeg_path, "-y",
"-i", os.path.join(staging_dir, output_image), "-i", os.path.join(staging_dir, output_image),
"-vf", "scale=300:-1", "-vf", "scale=300:-1",
"-vframes", "1", "-vframes", "1",
thumbnail_path thumbnail_path
] ]
output = pype.lib._subprocess(args, cwd=os.environ["FFMPEG_PATH"]) output = pype.lib._subprocess(args)
self.log.debug(output) self.log.debug(output)
@ -74,12 +76,12 @@ class ExtractReview(pype.api.Extractor):
# Generate mov. # Generate mov.
mov_path = os.path.join(staging_dir, "review.mov") mov_path = os.path.join(staging_dir, "review.mov")
args = [ args = [
"ffmpeg", "-y", ffmpeg_path, "-y",
"-i", os.path.join(staging_dir, output_image), "-i", os.path.join(staging_dir, output_image),
"-vframes", "1", "-vframes", "1",
mov_path mov_path
] ]
output = pype.lib._subprocess(args, cwd=os.environ["FFMPEG_PATH"]) output = pype.lib._subprocess(args)
self.log.debug(output) self.log.debug(output)

View file

@ -1,52 +0,0 @@
import sys
import pyblish.api
import pype.api
import avalon.api
import six
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
"""Ensure that autosync value in ftrack project is set to False.
In case was set to True and event server with the sync to avalon event
is running will cause integration to avalon will be override.
"""
order = pyblish.api.ValidatorOrder
families = ['clip']
label = 'Ftrack project\'s auto sync off'
actions = [pype.api.RepairAction]
def process(self, context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
invalid = self.get_invalid(context)
assert not invalid, (
"Ftrack Project has 'Auto sync' set to On."
" That may cause issues during integration."
)
@staticmethod
def get_invalid(context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
return project
@classmethod
def repair(cls, context):
session = context.data["ftrackSession"]
invalid = cls.get_invalid(context)
invalid['custom_attributes']['avalon_auto_sync'] = False
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
raise

View file

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 6.8 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 7.8 KiB

After

Width:  |  Height:  |  Size: 7.8 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 408 B

After

Width:  |  Height:  |  Size: 408 B

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 205 B

After

Width:  |  Height:  |  Size: 205 B

Before After
Before After

View file

@ -183,7 +183,18 @@ class Controller(QtCore.QObject):
plugins = pyblish.api.discover() plugins = pyblish.api.discover()
targets = pyblish.logic.registered_targets() or ["default"] targets = pyblish.logic.registered_targets() or ["default"]
self.plugins = pyblish.logic.plugins_by_targets(plugins, targets) plugins_by_targets = pyblish.logic.plugins_by_targets(plugins, targets)
_plugins = []
for plugin in plugins_by_targets:
# Skip plugin if is not optional and not active
if (
not getattr(plugin, "optional", False)
and not getattr(plugin, "active", True)
):
continue
_plugins.append(plugin)
self.plugins = _plugins
def on_published(self): def on_published(self):
if self.is_running: if self.is_running:

View file

@ -4,6 +4,11 @@ import platform
from avalon import style from avalon import style
from Qt import QtCore, QtGui, QtWidgets, QtSvg from Qt import QtCore, QtGui, QtWidgets, QtSvg
from pype.api import config, Logger, resources from pype.api import config, Logger, resources
import pype.version
try:
import configparser
except Exception:
import ConfigParser as configparser
class TrayManager: class TrayManager:
@ -100,6 +105,8 @@ class TrayManager:
if items and self.services_submenu is not None: if items and self.services_submenu is not None:
self.add_separator(self.tray_widget.menu) self.add_separator(self.tray_widget.menu)
self._add_version_item()
# Add Exit action to menu # Add Exit action to menu
aExit = QtWidgets.QAction("&Exit", self.tray_widget) aExit = QtWidgets.QAction("&Exit", self.tray_widget)
aExit.triggered.connect(self.tray_widget.exit) aExit.triggered.connect(self.tray_widget.exit)
@ -109,6 +116,34 @@ class TrayManager:
self.connect_modules() self.connect_modules()
self.start_modules() self.start_modules()
def _add_version_item(self):
config_file_path = os.path.join(
os.environ["PYPE_SETUP_PATH"], "pypeapp", "config.ini"
)
default_config = {}
if os.path.exists(config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
try:
default_config = config["CLIENT"]
except Exception:
pass
subversion = default_config.get("subversion")
client_name = default_config.get("client_name")
version_string = pype.version.__version__
if subversion:
version_string += " ({})".format(subversion)
if client_name:
version_string += ", {}".format(client_name)
version_action = QtWidgets.QAction(version_string, self.tray_widget)
self.tray_widget.menu.addAction(version_action)
self.add_separator(self.tray_widget.menu)
def process_items(self, items, parent_menu): def process_items(self, items, parent_menu):
""" Loop through items and add them to parent_menu. """ Loop through items and add them to parent_menu.
@ -203,7 +238,7 @@ class TrayManager:
obj.set_qaction(action, self.icon_failed) obj.set_qaction(action, self.icon_failed)
self.modules[name] = obj self.modules[name] = obj
self.log.info("{} - Module imported".format(title)) self.log.info("{} - Module imported".format(title))
except ImportError as ie: except Exception as exc:
if self.services_submenu is None: if self.services_submenu is None:
self.services_submenu = QtWidgets.QMenu( self.services_submenu = QtWidgets.QMenu(
'Services', self.tray_widget.menu 'Services', self.tray_widget.menu
@ -212,7 +247,7 @@ class TrayManager:
action.setIcon(self.icon_failed) action.setIcon(self.icon_failed)
self.services_submenu.addAction(action) self.services_submenu.addAction(action)
self.log.warning( self.log.warning(
"{} - Module import Error: {}".format(title, str(ie)), "{} - Module import Error: {}".format(title, str(exc)),
exc_info=True exc_info=True
) )
return False return False

View file

@ -1 +1 @@
__version__ = "2.10.0" __version__ = "2.11.0"

Binary file not shown.