mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 05:42:15 +01:00
Merge develop
This commit is contained in:
parent
6405700ed9
commit
669a2256ef
248 changed files with 13531 additions and 8058 deletions
|
|
@ -6,34 +6,22 @@ import datetime
|
|||
import requests
|
||||
from .constants import (
|
||||
CLOCKIFY_ENDPOINT,
|
||||
ADMIN_PERMISSION_NAMES
|
||||
ADMIN_PERMISSION_NAMES,
|
||||
)
|
||||
|
||||
from openpype.lib.local_settings import OpenPypeSecureRegistry
|
||||
|
||||
|
||||
def time_check(obj):
|
||||
if obj.request_counter < 10:
|
||||
obj.request_counter += 1
|
||||
return
|
||||
|
||||
wait_time = 1 - (time.time() - obj.request_time)
|
||||
if wait_time > 0:
|
||||
time.sleep(wait_time)
|
||||
|
||||
obj.request_time = time.time()
|
||||
obj.request_counter = 0
|
||||
from openpype.lib import Logger
|
||||
|
||||
|
||||
class ClockifyAPI:
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
def __init__(self, api_key=None, master_parent=None):
|
||||
self.workspace_name = None
|
||||
self.workspace_id = None
|
||||
self.master_parent = master_parent
|
||||
self.api_key = api_key
|
||||
self.request_counter = 0
|
||||
self.request_time = time.time()
|
||||
|
||||
self._workspace_id = None
|
||||
self._user_id = None
|
||||
self._secure_registry = None
|
||||
|
||||
@property
|
||||
|
|
@ -44,11 +32,19 @@ class ClockifyAPI:
|
|||
|
||||
@property
|
||||
def headers(self):
|
||||
return {"X-Api-Key": self.api_key}
|
||||
return {"x-api-key": self.api_key}
|
||||
|
||||
@property
|
||||
def workspace_id(self):
|
||||
return self._workspace_id
|
||||
|
||||
@property
|
||||
def user_id(self):
|
||||
return self._user_id
|
||||
|
||||
def verify_api(self):
|
||||
for key, value in self.headers.items():
|
||||
if value is None or value.strip() == '':
|
||||
if value is None or value.strip() == "":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
@ -59,65 +55,55 @@ class ClockifyAPI:
|
|||
if api_key is not None and self.validate_api_key(api_key) is True:
|
||||
self.api_key = api_key
|
||||
self.set_workspace()
|
||||
self.set_user_id()
|
||||
if self.master_parent:
|
||||
self.master_parent.signed_in()
|
||||
return True
|
||||
return False
|
||||
|
||||
def validate_api_key(self, api_key):
|
||||
test_headers = {'X-Api-Key': api_key}
|
||||
action_url = 'workspaces/'
|
||||
time_check(self)
|
||||
test_headers = {"x-api-key": api_key}
|
||||
action_url = "user"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=test_headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=test_headers
|
||||
)
|
||||
if response.status_code != 200:
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_workspace_perm(self, workspace_id=None):
|
||||
user_id = self.get_user_id()
|
||||
def validate_workspace_permissions(self, workspace_id=None, user_id=None):
|
||||
if user_id is None:
|
||||
self.log.info("No user_id found during validation")
|
||||
return False
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "/workspaces/{}/users/{}/permissions".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
time_check(self)
|
||||
action_url = f"workspaces/{workspace_id}/users?includeRoles=1"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
user_permissions = response.json()
|
||||
for perm in user_permissions:
|
||||
if perm['name'] in ADMIN_PERMISSION_NAMES:
|
||||
data = response.json()
|
||||
for user in data:
|
||||
if user.get("id") == user_id:
|
||||
roles_data = user.get("roles")
|
||||
for entities in roles_data:
|
||||
if entities.get("role") in ADMIN_PERMISSION_NAMES:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_user_id(self):
|
||||
action_url = 'v1/user/'
|
||||
time_check(self)
|
||||
action_url = "user"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
# this regex is neccessary: UNICODE strings are crashing
|
||||
# during json serialization
|
||||
id_regex = '\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
|
||||
result = re.findall(id_regex, str(response.content))
|
||||
if len(result) != 1:
|
||||
# replace with log and better message?
|
||||
print('User ID was not found (this is a BUG!!!)')
|
||||
return None
|
||||
return json.loads('{'+result[0]+'}')['id']
|
||||
result = response.json()
|
||||
user_id = result.get("id", None)
|
||||
|
||||
return user_id
|
||||
|
||||
def set_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
name = os.environ.get("CLOCKIFY_WORKSPACE", None)
|
||||
self.workspace_name = name
|
||||
self.workspace_id = None
|
||||
if self.workspace_name is None:
|
||||
return
|
||||
try:
|
||||
|
|
@ -125,7 +111,7 @@ class ClockifyAPI:
|
|||
except Exception:
|
||||
result = False
|
||||
if result is not False:
|
||||
self.workspace_id = result
|
||||
self._workspace_id = result
|
||||
if self.master_parent is not None:
|
||||
self.master_parent.start_timer_check()
|
||||
return True
|
||||
|
|
@ -139,6 +125,14 @@ class ClockifyAPI:
|
|||
return all_workspaces[name]
|
||||
return False
|
||||
|
||||
def set_user_id(self):
|
||||
try:
|
||||
user_id = self.get_user_id()
|
||||
except Exception:
|
||||
user_id = None
|
||||
if user_id is not None:
|
||||
self._user_id = user_id
|
||||
|
||||
def get_api_key(self):
|
||||
return self.secure_registry.get_item("api_key", None)
|
||||
|
||||
|
|
@ -146,11 +140,9 @@ class ClockifyAPI:
|
|||
self.secure_registry.set_item("api_key", api_key)
|
||||
|
||||
def get_workspaces(self):
|
||||
action_url = 'workspaces/'
|
||||
time_check(self)
|
||||
action_url = "workspaces/"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return {
|
||||
workspace["name"]: workspace["id"] for workspace in response.json()
|
||||
|
|
@ -159,27 +151,22 @@ class ClockifyAPI:
|
|||
def get_projects(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/'.format(workspace_id)
|
||||
time_check(self)
|
||||
action_url = f"workspaces/{workspace_id}/projects"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
project["name"]: project["id"] for project in response.json()
|
||||
}
|
||||
if response.status_code != 403:
|
||||
result = response.json()
|
||||
return {project["name"]: project["id"] for project in result}
|
||||
|
||||
def get_project_by_id(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/'.format(
|
||||
action_url = "workspaces/{}/projects/{}".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
time_check(self)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
|
@ -187,32 +174,24 @@ class ClockifyAPI:
|
|||
def get_tags(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/tags/'.format(workspace_id)
|
||||
time_check(self)
|
||||
action_url = "workspaces/{}/tags".format(workspace_id)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
tag["name"]: tag["id"] for tag in response.json()
|
||||
}
|
||||
return {tag["name"]: tag["id"] for tag in response.json()}
|
||||
|
||||
def get_tasks(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
|
||||
action_url = "workspaces/{}/projects/{}/tasks".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
time_check(self)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return {
|
||||
task["name"]: task["id"] for task in response.json()
|
||||
}
|
||||
return {task["name"]: task["id"] for task in response.json()}
|
||||
|
||||
def get_workspace_id(self, workspace_name):
|
||||
all_workspaces = self.get_workspaces()
|
||||
|
|
@ -236,48 +215,64 @@ class ClockifyAPI:
|
|||
return None
|
||||
return all_tasks[tag_name]
|
||||
|
||||
def get_task_id(
|
||||
self, task_name, project_id, workspace_id=None
|
||||
):
|
||||
def get_task_id(self, task_name, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_tasks = self.get_tasks(
|
||||
project_id, workspace_id
|
||||
)
|
||||
all_tasks = self.get_tasks(project_id, workspace_id)
|
||||
if task_name not in all_tasks:
|
||||
return None
|
||||
return all_tasks[task_name]
|
||||
|
||||
def get_current_time(self):
|
||||
return str(datetime.datetime.utcnow().isoformat())+'Z'
|
||||
return str(datetime.datetime.utcnow().isoformat()) + "Z"
|
||||
|
||||
def start_time_entry(
|
||||
self, description, project_id, task_id=None, tag_ids=[],
|
||||
workspace_id=None, billable=True
|
||||
self,
|
||||
description,
|
||||
project_id,
|
||||
task_id=None,
|
||||
tag_ids=None,
|
||||
workspace_id=None,
|
||||
user_id=None,
|
||||
billable=True,
|
||||
):
|
||||
# Workspace
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
# User ID
|
||||
if user_id is None:
|
||||
user_id = self._user_id
|
||||
|
||||
# get running timer to check if we need to start it
|
||||
current_timer = self.get_in_progress()
|
||||
|
||||
# Check if is currently run another times and has same values
|
||||
current = self.get_in_progress(workspace_id)
|
||||
if current is not None:
|
||||
# DO not restart the timer, if it is already running for curent task
|
||||
if current_timer:
|
||||
current_timer_hierarchy = current_timer.get("description")
|
||||
current_project_id = current_timer.get("projectId")
|
||||
current_task_id = current_timer.get("taskId")
|
||||
if (
|
||||
current.get("description", None) == description and
|
||||
current.get("projectId", None) == project_id and
|
||||
current.get("taskId", None) == task_id
|
||||
description == current_timer_hierarchy
|
||||
and project_id == current_project_id
|
||||
and task_id == current_task_id
|
||||
):
|
||||
self.log.info(
|
||||
"Timer for the current project is already running"
|
||||
)
|
||||
self.bool_timer_run = True
|
||||
return self.bool_timer_run
|
||||
self.finish_time_entry(workspace_id)
|
||||
self.finish_time_entry()
|
||||
|
||||
# Convert billable to strings
|
||||
if billable:
|
||||
billable = 'true'
|
||||
billable = "true"
|
||||
else:
|
||||
billable = 'false'
|
||||
billable = "false"
|
||||
# Rest API Action
|
||||
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
start = self.get_current_time()
|
||||
body = {
|
||||
"start": start,
|
||||
|
|
@ -285,169 +280,135 @@ class ClockifyAPI:
|
|||
"description": description,
|
||||
"projectId": project_id,
|
||||
"taskId": task_id,
|
||||
"tagIds": tag_ids
|
||||
"tagIds": tag_ids,
|
||||
}
|
||||
time_check(self)
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
|
||||
success = False
|
||||
if response.status_code < 300:
|
||||
success = True
|
||||
return success
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_in_progress(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/inProgress'.format(
|
||||
workspace_id
|
||||
)
|
||||
time_check(self)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
def _get_current_timer_values(self, response):
|
||||
if response is None:
|
||||
return
|
||||
try:
|
||||
output = response.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
output = None
|
||||
return output
|
||||
return None
|
||||
if output and isinstance(output, list):
|
||||
return output[0]
|
||||
return None
|
||||
|
||||
def finish_time_entry(self, workspace_id=None):
|
||||
def get_in_progress(self, user_id=None, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
current = self.get_in_progress(workspace_id)
|
||||
if current is None:
|
||||
return
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
|
||||
current_id = current["id"]
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, current_id
|
||||
action_url = (
|
||||
f"workspaces/{workspace_id}/user/"
|
||||
f"{user_id}/time-entries?in-progress=1"
|
||||
)
|
||||
body = {
|
||||
"start": current["timeInterval"]["start"],
|
||||
"billable": current["billable"],
|
||||
"description": current["description"],
|
||||
"projectId": current["projectId"],
|
||||
"taskId": current["taskId"],
|
||||
"tagIds": current["tagIds"],
|
||||
"end": self.get_current_time()
|
||||
}
|
||||
time_check(self)
|
||||
response = requests.put(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return self._get_current_timer_values(response)
|
||||
|
||||
def finish_time_entry(self, workspace_id=None, user_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
current_timer = self.get_in_progress()
|
||||
if not current_timer:
|
||||
return
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
body = {"end": self.get_current_time()}
|
||||
response = requests.patch(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_time_entries(
|
||||
self, workspace_id=None, quantity=10
|
||||
):
|
||||
def get_time_entries(self, workspace_id=None, user_id=None, quantity=10):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
|
||||
time_check(self)
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return response.json()[:quantity]
|
||||
|
||||
def remove_time_entry(self, tid, workspace_id=None):
|
||||
def remove_time_entry(self, tid, workspace_id=None, user_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, tid
|
||||
action_url = "workspaces/{}/user/{}/time-entries/{}".format(
|
||||
workspace_id, user_id, tid
|
||||
)
|
||||
time_check(self)
|
||||
response = requests.delete(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_project(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/'.format(workspace_id)
|
||||
action_url = "workspaces/{}/projects".format(workspace_id)
|
||||
body = {
|
||||
"name": name,
|
||||
"clientId": "",
|
||||
"isPublic": "false",
|
||||
"estimate": {
|
||||
"estimate": 0,
|
||||
"type": "AUTO"
|
||||
},
|
||||
"estimate": {"estimate": 0, "type": "AUTO"},
|
||||
"color": "#f44336",
|
||||
"billable": "true"
|
||||
"billable": "true",
|
||||
}
|
||||
time_check(self)
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_workspace(self, name):
|
||||
action_url = 'workspaces/'
|
||||
action_url = "workspaces/"
|
||||
body = {"name": name}
|
||||
time_check(self)
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_task(
|
||||
self, name, project_id, workspace_id=None
|
||||
):
|
||||
def add_task(self, name, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
|
||||
action_url = "workspaces/{}/projects/{}/tasks".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
body = {
|
||||
"name": name,
|
||||
"projectId": project_id
|
||||
}
|
||||
time_check(self)
|
||||
body = {"name": name, "projectId": project_id}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_tag(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/tags'.format(workspace_id)
|
||||
body = {
|
||||
"name": name
|
||||
}
|
||||
time_check(self)
|
||||
action_url = "workspaces/{}/tags".format(workspace_id)
|
||||
body = {"name": name}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
json=body
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def delete_project(
|
||||
self, project_id, workspace_id=None
|
||||
):
|
||||
def delete_project(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = '/workspaces/{}/projects/{}'.format(
|
||||
action_url = "/workspaces/{}/projects/{}".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
time_check(self)
|
||||
response = requests.delete(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
|
|
@ -455,12 +416,12 @@ class ClockifyAPI:
|
|||
return response.json()
|
||||
|
||||
def convert_input(
|
||||
self, entity_id, entity_name, mode='Workspace', project_id=None
|
||||
self, entity_id, entity_name, mode="Workspace", project_id=None
|
||||
):
|
||||
if entity_id is None:
|
||||
error = False
|
||||
error_msg = 'Missing information "{}"'
|
||||
if mode.lower() == 'workspace':
|
||||
if mode.lower() == "workspace":
|
||||
if entity_id is None and entity_name is None:
|
||||
if self.workspace_id is not None:
|
||||
entity_id = self.workspace_id
|
||||
|
|
@ -471,14 +432,14 @@ class ClockifyAPI:
|
|||
else:
|
||||
if entity_id is None and entity_name is None:
|
||||
error = True
|
||||
elif mode.lower() == 'project':
|
||||
elif mode.lower() == "project":
|
||||
entity_id = self.get_project_id(entity_name)
|
||||
elif mode.lower() == 'task':
|
||||
elif mode.lower() == "task":
|
||||
entity_id = self.get_task_id(
|
||||
task_name=entity_name, project_id=project_id
|
||||
)
|
||||
else:
|
||||
raise TypeError('Unknown type')
|
||||
raise TypeError("Unknown type")
|
||||
# Raise error
|
||||
if error:
|
||||
raise ValueError(error_msg.format(mode))
|
||||
|
|
|
|||
|
|
@ -2,24 +2,13 @@ import os
|
|||
import threading
|
||||
import time
|
||||
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths
|
||||
)
|
||||
from openpype.modules import OpenPypeModule, ITrayModule, IPluginPaths
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
from .clockify_api import ClockifyAPI
|
||||
from .constants import (
|
||||
CLOCKIFY_FTRACK_USER_PATH,
|
||||
CLOCKIFY_FTRACK_SERVER_PATH
|
||||
)
|
||||
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
|
||||
class ClockifyModule(
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths
|
||||
):
|
||||
class ClockifyModule(OpenPypeModule, ITrayModule, IPluginPaths):
|
||||
name = "clockify"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
|
|
@ -33,18 +22,23 @@ class ClockifyModule(
|
|||
self.timer_manager = None
|
||||
self.MessageWidgetClass = None
|
||||
self.message_widget = None
|
||||
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
self._clockify_api = None
|
||||
|
||||
# TimersManager attributes
|
||||
# - set `timers_manager_connector` only in `tray_init`
|
||||
self.timers_manager_connector = None
|
||||
self._timers_manager_module = None
|
||||
|
||||
@property
|
||||
def clockify_api(self):
|
||||
if self._clockify_api is None:
|
||||
from .clockify_api import ClockifyAPI
|
||||
|
||||
self._clockify_api = ClockifyAPI(master_parent=self)
|
||||
return self._clockify_api
|
||||
|
||||
def get_global_environments(self):
|
||||
return {
|
||||
"CLOCKIFY_WORKSPACE": self.workspace_name
|
||||
}
|
||||
return {"CLOCKIFY_WORKSPACE": self.workspace_name}
|
||||
|
||||
def tray_init(self):
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
|
|
@ -52,7 +46,7 @@ class ClockifyModule(
|
|||
self.MessageWidgetClass = MessageWidget
|
||||
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(self.clockapi)
|
||||
self.widget_settings = ClockifySettings(self.clockify_api)
|
||||
self.widget_settings_required = None
|
||||
|
||||
self.thread_timer_check = None
|
||||
|
|
@ -61,7 +55,7 @@ class ClockifyModule(
|
|||
self.bool_api_key_set = False
|
||||
self.bool_workspace_set = False
|
||||
self.bool_timer_run = False
|
||||
self.bool_api_key_set = self.clockapi.set_api()
|
||||
self.bool_api_key_set = self.clockify_api.set_api()
|
||||
|
||||
# Define itself as TimersManager connector
|
||||
self.timers_manager_connector = self
|
||||
|
|
@ -71,12 +65,11 @@ class ClockifyModule(
|
|||
self.show_settings()
|
||||
return
|
||||
|
||||
self.bool_workspace_set = self.clockapi.workspace_id is not None
|
||||
self.bool_workspace_set = self.clockify_api.workspace_id is not None
|
||||
if self.bool_workspace_set is False:
|
||||
return
|
||||
|
||||
self.start_timer_check()
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
def tray_exit(self, *_a, **_kw):
|
||||
|
|
@ -85,23 +78,19 @@ class ClockifyModule(
|
|||
def get_plugin_paths(self):
|
||||
"""Implementaton of IPluginPaths to get plugin paths."""
|
||||
actions_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
"launcher_actions"
|
||||
os.path.dirname(os.path.abspath(__file__)), "launcher_actions"
|
||||
)
|
||||
return {
|
||||
"actions": [actions_path]
|
||||
}
|
||||
return {"actions": [actions_path]}
|
||||
|
||||
def get_ftrack_event_handler_paths(self):
|
||||
"""Function for Ftrack module to add ftrack event handler paths."""
|
||||
return {
|
||||
"user": [CLOCKIFY_FTRACK_USER_PATH],
|
||||
"server": [CLOCKIFY_FTRACK_SERVER_PATH]
|
||||
"server": [CLOCKIFY_FTRACK_SERVER_PATH],
|
||||
}
|
||||
|
||||
def clockify_timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
# Call `ITimersManager` method
|
||||
self.timer_stopped()
|
||||
|
||||
def start_timer_check(self):
|
||||
|
|
@ -122,45 +111,44 @@ class ClockifyModule(
|
|||
def check_running(self):
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
if self.clockify_api.get_in_progress() is not None:
|
||||
bool_timer_run = True
|
||||
|
||||
if self.bool_timer_run != bool_timer_run:
|
||||
if self.bool_timer_run is True:
|
||||
self.clockify_timer_stopped()
|
||||
elif self.bool_timer_run is False:
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
if not actual_timer:
|
||||
current_timer = self.clockify_api.get_in_progress()
|
||||
if current_timer is None:
|
||||
continue
|
||||
current_proj_id = current_timer.get("projectId")
|
||||
if not current_proj_id:
|
||||
continue
|
||||
|
||||
actual_proj_id = actual_timer["projectId"]
|
||||
if not actual_proj_id:
|
||||
continue
|
||||
|
||||
project = self.clockapi.get_project_by_id(actual_proj_id)
|
||||
project = self.clockify_api.get_project_by_id(
|
||||
current_proj_id
|
||||
)
|
||||
if project and project.get("code") == 501:
|
||||
continue
|
||||
|
||||
project_name = project["name"]
|
||||
project_name = project.get("name")
|
||||
|
||||
actual_timer_hierarchy = actual_timer["description"]
|
||||
hierarchy_items = actual_timer_hierarchy.split("/")
|
||||
current_timer_hierarchy = current_timer.get("description")
|
||||
if not current_timer_hierarchy:
|
||||
continue
|
||||
hierarchy_items = current_timer_hierarchy.split("/")
|
||||
# Each pype timer must have at least 2 items!
|
||||
if len(hierarchy_items) < 2:
|
||||
continue
|
||||
|
||||
task_name = hierarchy_items[-1]
|
||||
hierarchy = hierarchy_items[:-1]
|
||||
|
||||
task_type = None
|
||||
if len(actual_timer.get("tags", [])) > 0:
|
||||
task_type = actual_timer["tags"][0].get("name")
|
||||
data = {
|
||||
"task_name": task_name,
|
||||
"hierarchy": hierarchy,
|
||||
"project_name": project_name,
|
||||
"task_type": task_type
|
||||
}
|
||||
# Call `ITimersManager` method
|
||||
self.timer_started(data)
|
||||
|
||||
self.bool_timer_run = bool_timer_run
|
||||
|
|
@ -184,6 +172,7 @@ class ClockifyModule(
|
|||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
from qtpy import QtWidgets
|
||||
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
|
|
@ -204,7 +193,9 @@ class ClockifyModule(
|
|||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.input_api_key.setText(
|
||||
self.clockify_api.get_api_key()
|
||||
)
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
|
|
@ -218,72 +209,82 @@ class ClockifyModule(
|
|||
def timer_started(self, data):
|
||||
"""Tell TimersManager that timer started."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_started(self._module.id, data)
|
||||
self._timers_manager_module.timer_started(self.id, data)
|
||||
|
||||
def timer_stopped(self):
|
||||
"""Tell TimersManager that timer stopped."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_stopped(self._module.id)
|
||||
self._timers_manager_module.timer_stopped(self.id)
|
||||
|
||||
def stop_timer(self):
|
||||
"""Called from TimersManager to stop timer."""
|
||||
self.clockapi.finish_time_entry()
|
||||
self.clockify_api.finish_time_entry()
|
||||
|
||||
def start_timer(self, input_data):
|
||||
"""Called from TimersManager to start timer."""
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockapi.get_api_key():
|
||||
return
|
||||
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
actual_timer_hierarchy = None
|
||||
actual_project_id = None
|
||||
if actual_timer is not None:
|
||||
actual_timer_hierarchy = actual_timer.get("description")
|
||||
actual_project_id = actual_timer.get("projectId")
|
||||
|
||||
# Concatenate hierarchy and task to get description
|
||||
desc_items = [val for val in input_data.get("hierarchy", [])]
|
||||
desc_items.append(input_data["task_name"])
|
||||
description = "/".join(desc_items)
|
||||
|
||||
# Check project existence
|
||||
project_name = input_data["project_name"]
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
def _verify_project_exists(self, project_name):
|
||||
project_id = self.clockify_api.get_project_id(project_name)
|
||||
if not project_id:
|
||||
self.log.warning((
|
||||
"Project \"{}\" was not found in Clockify. Timer won't start."
|
||||
).format(project_name))
|
||||
self.log.warning(
|
||||
'Project "{}" was not found in Clockify. Timer won\'t start.'
|
||||
).format(project_name)
|
||||
|
||||
if not self.MessageWidgetClass:
|
||||
return
|
||||
|
||||
msg = (
|
||||
"Project <b>\"{}\"</b> is not"
|
||||
" in Clockify Workspace <b>\"{}\"</b>."
|
||||
'Project <b>"{}"</b> is not'
|
||||
' in Clockify Workspace <b>"{}"</b>.'
|
||||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockapi.workspace_name))
|
||||
).format(project_name, str(self.clockify_api.workspace_name))
|
||||
|
||||
self.message_widget = self.MessageWidgetClass(
|
||||
msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.on_message_widget_close)
|
||||
self.message_widget.show()
|
||||
return False
|
||||
return project_id
|
||||
|
||||
def start_timer(self, input_data):
|
||||
"""Called from TimersManager to start timer."""
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockify_api.get_api_key():
|
||||
return
|
||||
|
||||
if (
|
||||
actual_timer is not None and
|
||||
description == actual_timer_hierarchy and
|
||||
project_id == actual_project_id
|
||||
):
|
||||
task_name = input_data.get("task_name")
|
||||
|
||||
# Concatenate hierarchy and task to get description
|
||||
description_items = list(input_data.get("hierarchy", []))
|
||||
description_items.append(task_name)
|
||||
description = "/".join(description_items)
|
||||
|
||||
# Check project existence
|
||||
project_name = input_data.get("project_name")
|
||||
project_id = self._verify_project_exists(project_name)
|
||||
if not project_id:
|
||||
return
|
||||
|
||||
# Setup timer tags
|
||||
tag_ids = []
|
||||
task_tag_id = self.clockapi.get_tag_id(input_data["task_type"])
|
||||
tag_name = input_data.get("task_type")
|
||||
if not tag_name:
|
||||
# no task_type found in the input data
|
||||
# if the timer is restarted by idle time (bug?)
|
||||
asset_name = input_data["hierarchy"][-1]
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
task_info = asset_doc["data"]["tasks"][task_name]
|
||||
tag_name = task_info.get("type", "")
|
||||
if not tag_name:
|
||||
self.log.info("No tag information found for the timer")
|
||||
|
||||
task_tag_id = self.clockify_api.get_tag_id(tag_name)
|
||||
if task_tag_id is not None:
|
||||
tag_ids.append(task_tag_id)
|
||||
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
# Start timer
|
||||
self.clockify_api.start_time_entry(
|
||||
description,
|
||||
project_id,
|
||||
tag_ids=tag_ids,
|
||||
workspace_id=self.clockify_api.workspace_id,
|
||||
user_id=self.clockify_api.user_id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,4 +9,4 @@ CLOCKIFY_FTRACK_USER_PATH = os.path.join(
|
|||
)
|
||||
|
||||
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
|
||||
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/"
|
||||
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/v1/"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from openpype_modules.ftrack.lib import ServerAction
|
|||
from openpype_modules.clockify.clockify_api import ClockifyAPI
|
||||
|
||||
|
||||
class SyncClocifyServer(ServerAction):
|
||||
class SyncClockifyServer(ServerAction):
|
||||
'''Synchronise project names and task types.'''
|
||||
|
||||
identifier = "clockify.sync.server"
|
||||
|
|
@ -14,12 +14,12 @@ class SyncClocifyServer(ServerAction):
|
|||
role_list = ["Pypeclub", "Administrator", "project Manager"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncClocifyServer, self).__init__(*args, **kwargs)
|
||||
super(SyncClockifyServer, self).__init__(*args, **kwargs)
|
||||
|
||||
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
|
||||
api_key = os.environ.get("CLOCKIFY_API_KEY")
|
||||
self.clockapi = ClockifyAPI(api_key)
|
||||
self.clockapi.set_workspace(workspace_name)
|
||||
self.clockify_api = ClockifyAPI(api_key)
|
||||
self.clockify_api.set_workspace(workspace_name)
|
||||
if api_key is None:
|
||||
modified_key = "None"
|
||||
else:
|
||||
|
|
@ -48,13 +48,16 @@ class SyncClocifyServer(ServerAction):
|
|||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if self.clockapi.workspace_id is None:
|
||||
self.clockify_api.set_api()
|
||||
if self.clockify_api.workspace_id is None:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Clockify Workspace or API key are not set!"
|
||||
}
|
||||
|
||||
if self.clockapi.validate_workspace_perm() is False:
|
||||
if not self.clockify_api.validate_workspace_permissions(
|
||||
self.clockify_api.workspace_id, self.clockify_api.user_id
|
||||
):
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Missing permissions for this action!"
|
||||
|
|
@ -88,9 +91,9 @@ class SyncClocifyServer(ServerAction):
|
|||
task_type["name"] for task_type in task_types
|
||||
]
|
||||
try:
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
clockify_projects = self.clockify_api.get_projects()
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockapi.add_project(project_name)
|
||||
response = self.clockify_api.add_project(project_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Project \"{}\" can't be created. Response: {}".format(
|
||||
|
|
@ -105,7 +108,7 @@ class SyncClocifyServer(ServerAction):
|
|||
).format(project_name)
|
||||
}
|
||||
|
||||
clockify_workspace_tags = self.clockapi.get_tags()
|
||||
clockify_workspace_tags = self.clockify_api.get_tags()
|
||||
for task_type_name in task_type_names:
|
||||
if task_type_name in clockify_workspace_tags:
|
||||
self.log.debug(
|
||||
|
|
@ -113,7 +116,7 @@ class SyncClocifyServer(ServerAction):
|
|||
)
|
||||
continue
|
||||
|
||||
response = self.clockapi.add_tag(task_type_name)
|
||||
response = self.clockify_api.add_tag(task_type_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Task \"{}\" can't be created. Response: {}".format(
|
||||
|
|
@ -138,4 +141,4 @@ class SyncClocifyServer(ServerAction):
|
|||
|
||||
|
||||
def register(session, **kw):
|
||||
SyncClocifyServer(session).register()
|
||||
SyncClockifyServer(session).register()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
|||
from openpype_modules.clockify.clockify_api import ClockifyAPI
|
||||
|
||||
|
||||
class SyncClocifyLocal(BaseAction):
|
||||
class SyncClockifyLocal(BaseAction):
|
||||
'''Synchronise project names and task types.'''
|
||||
|
||||
#: Action identifier.
|
||||
|
|
@ -18,9 +18,9 @@ class SyncClocifyLocal(BaseAction):
|
|||
icon = statics_icon("app_icons", "clockify-white.png")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncClocifyLocal, self).__init__(*args, **kwargs)
|
||||
super(SyncClockifyLocal, self).__init__(*args, **kwargs)
|
||||
#: CLockifyApi
|
||||
self.clockapi = ClockifyAPI()
|
||||
self.clockify_api = ClockifyAPI()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if (
|
||||
|
|
@ -31,14 +31,18 @@ class SyncClocifyLocal(BaseAction):
|
|||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.clockapi.set_api()
|
||||
if self.clockapi.workspace_id is None:
|
||||
self.clockify_api.set_api()
|
||||
if self.clockify_api.workspace_id is None:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Clockify Workspace or API key are not set!"
|
||||
}
|
||||
|
||||
if self.clockapi.validate_workspace_perm() is False:
|
||||
if (
|
||||
self.clockify_api.validate_workspace_permissions(
|
||||
self.clockify_api.workspace_id, self.clockify_api.user_id)
|
||||
is False
|
||||
):
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Missing permissions for this action!"
|
||||
|
|
@ -74,9 +78,9 @@ class SyncClocifyLocal(BaseAction):
|
|||
task_type["name"] for task_type in task_types
|
||||
]
|
||||
try:
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
clockify_projects = self.clockify_api.get_projects()
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockapi.add_project(project_name)
|
||||
response = self.clockify_api.add_project(project_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Project \"{}\" can't be created. Response: {}".format(
|
||||
|
|
@ -91,7 +95,7 @@ class SyncClocifyLocal(BaseAction):
|
|||
).format(project_name)
|
||||
}
|
||||
|
||||
clockify_workspace_tags = self.clockapi.get_tags()
|
||||
clockify_workspace_tags = self.clockify_api.get_tags()
|
||||
for task_type_name in task_type_names:
|
||||
if task_type_name in clockify_workspace_tags:
|
||||
self.log.debug(
|
||||
|
|
@ -99,7 +103,7 @@ class SyncClocifyLocal(BaseAction):
|
|||
)
|
||||
continue
|
||||
|
||||
response = self.clockapi.add_tag(task_type_name)
|
||||
response = self.clockify_api.add_tag(task_type_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Task \"{}\" can't be created. Response: {}".format(
|
||||
|
|
@ -121,4 +125,4 @@ class SyncClocifyLocal(BaseAction):
|
|||
|
||||
|
||||
def register(session, **kw):
|
||||
SyncClocifyLocal(session).register()
|
||||
SyncClockifyLocal(session).register()
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ from openpype_modules.clockify.clockify_api import ClockifyAPI
|
|||
class ClockifyStart(LauncherAction):
|
||||
name = "clockify_start_timer"
|
||||
label = "Clockify - Start Timer"
|
||||
icon = "clockify_icon"
|
||||
icon = "app_icons/clockify.png"
|
||||
order = 500
|
||||
clockapi = ClockifyAPI()
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
|
|
@ -17,23 +17,39 @@ class ClockifyStart(LauncherAction):
|
|||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
user_id = self.clockify_api.user_id
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session["AVALON_TASK"]
|
||||
|
||||
description = asset_name
|
||||
asset_doc = get_asset_by_name(
|
||||
project_name, asset_name, fields=["data.parents"]
|
||||
)
|
||||
if asset_doc is not None:
|
||||
desc_items = asset_doc.get("data", {}).get("parents", [])
|
||||
desc_items.append(asset_name)
|
||||
desc_items.append(task_name)
|
||||
description = "/".join(desc_items)
|
||||
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(self.clockapi.get_tag_id(task_name))
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
# fetch asset docs
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
||||
# get task type to fill the timer tag
|
||||
task_info = asset_doc["data"]["tasks"][task_name]
|
||||
task_type = task_info["type"]
|
||||
|
||||
# check if the task has hierarchy and fill the
|
||||
parents_data = asset_doc["data"]
|
||||
if parents_data is not None:
|
||||
description_items = parents_data.get("parents", [])
|
||||
description_items.append(asset_name)
|
||||
description_items.append(task_name)
|
||||
description = "/".join(description_items)
|
||||
|
||||
project_id = self.clockify_api.get_project_id(
|
||||
project_name, workspace_id
|
||||
)
|
||||
tag_ids = []
|
||||
tag_name = task_type
|
||||
tag_ids.append(self.clockify_api.get_tag_id(tag_name, workspace_id))
|
||||
self.clockify_api.start_time_entry(
|
||||
description,
|
||||
project_id,
|
||||
tag_ids=tag_ids,
|
||||
workspace_id=workspace_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,20 +3,39 @@ from openpype_modules.clockify.clockify_api import ClockifyAPI
|
|||
from openpype.pipeline import LauncherAction
|
||||
|
||||
|
||||
class ClockifySync(LauncherAction):
|
||||
class ClockifyPermissionsCheckFailed(Exception):
|
||||
"""Timer start failed due to user permissions check.
|
||||
Message should be self explanatory as traceback won't be shown.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ClockifySync(LauncherAction):
|
||||
name = "sync_to_clockify"
|
||||
label = "Sync to Clockify"
|
||||
icon = "clockify_white_icon"
|
||||
icon = "app_icons/clockify-white.png"
|
||||
order = 500
|
||||
clockapi = ClockifyAPI()
|
||||
have_permissions = clockapi.validate_workspace_perm()
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
return self.have_permissions
|
||||
"""Check if there's some projects to sync"""
|
||||
try:
|
||||
next(get_projects())
|
||||
return True
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
user_id = self.clockify_api.user_id
|
||||
if not self.clockify_api.validate_workspace_permissions(
|
||||
workspace_id, user_id
|
||||
):
|
||||
raise ClockifyPermissionsCheckFailed(
|
||||
"Current CLockify user is missing permissions for this action!"
|
||||
)
|
||||
project_name = session.get("AVALON_PROJECT") or ""
|
||||
|
||||
projects_to_sync = []
|
||||
|
|
@ -30,24 +49,28 @@ class ClockifySync(LauncherAction):
|
|||
task_types = project["config"]["tasks"].keys()
|
||||
projects_info[project["name"]] = task_types
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
clockify_projects = self.clockify_api.get_projects(workspace_id)
|
||||
for project_name, task_types in projects_info.items():
|
||||
if project_name in clockify_projects:
|
||||
continue
|
||||
|
||||
response = self.clockapi.add_project(project_name)
|
||||
response = self.clockify_api.add_project(
|
||||
project_name, workspace_id
|
||||
)
|
||||
if "id" not in response:
|
||||
self.log.error("Project {} can't be created".format(
|
||||
project_name
|
||||
))
|
||||
self.log.error(
|
||||
"Project {} can't be created".format(project_name)
|
||||
)
|
||||
continue
|
||||
|
||||
clockify_workspace_tags = self.clockapi.get_tags()
|
||||
clockify_workspace_tags = self.clockify_api.get_tags(workspace_id)
|
||||
for task_type in task_types:
|
||||
if task_type not in clockify_workspace_tags:
|
||||
response = self.clockapi.add_tag(task_type)
|
||||
response = self.clockify_api.add_tag(
|
||||
task_type, workspace_id
|
||||
)
|
||||
if "id" not in response:
|
||||
self.log.error('Task {} can\'t be created'.format(
|
||||
task_type
|
||||
))
|
||||
self.log.error(
|
||||
"Task {} can't be created".format(task_type)
|
||||
)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -77,15 +77,15 @@ class MessageWidget(QtWidgets.QWidget):
|
|||
|
||||
|
||||
class ClockifySettings(QtWidgets.QWidget):
|
||||
SIZE_W = 300
|
||||
SIZE_W = 500
|
||||
SIZE_H = 130
|
||||
|
||||
loginSignal = QtCore.Signal(object, object, object)
|
||||
|
||||
def __init__(self, clockapi, optional=True):
|
||||
def __init__(self, clockify_api, optional=True):
|
||||
super(ClockifySettings, self).__init__()
|
||||
|
||||
self.clockapi = clockapi
|
||||
self.clockify_api = clockify_api
|
||||
self.optional = optional
|
||||
self.validated = False
|
||||
|
||||
|
|
@ -162,17 +162,17 @@ class ClockifySettings(QtWidgets.QWidget):
|
|||
def click_ok(self):
|
||||
api_key = self.input_api_key.text().strip()
|
||||
if self.optional is True and api_key == '':
|
||||
self.clockapi.save_api_key(None)
|
||||
self.clockapi.set_api(api_key)
|
||||
self.clockify_api.save_api_key(None)
|
||||
self.clockify_api.set_api(api_key)
|
||||
self.validated = False
|
||||
self._close_widget()
|
||||
return
|
||||
|
||||
validation = self.clockapi.validate_api_key(api_key)
|
||||
validation = self.clockify_api.validate_api_key(api_key)
|
||||
|
||||
if validation:
|
||||
self.clockapi.save_api_key(api_key)
|
||||
self.clockapi.set_api(api_key)
|
||||
self.clockify_api.save_api_key(api_key)
|
||||
self.clockify_api.set_api(api_key)
|
||||
self.validated = True
|
||||
self._close_widget()
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -3,21 +3,60 @@
|
|||
|
||||
"""
|
||||
import pyblish.api
|
||||
from openpype.lib import TextDef
|
||||
from openpype.pipeline.publish import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class CollectDeadlinePools(pyblish.api.InstancePlugin):
|
||||
class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Collect pools from instance if present, from Setting otherwise."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.420
|
||||
label = "Collect Deadline Pools"
|
||||
families = ["rendering", "render.farm", "renderFarm", "renderlayer"]
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender"]
|
||||
|
||||
primary_pool = None
|
||||
secondary_pool = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
# deadline.publish.CollectDeadlinePools
|
||||
settings = project_settings["deadline"]["publish"]["CollectDeadlinePools"] # noqa
|
||||
cls.primary_pool = settings.get("primary_pool", None)
|
||||
cls.secondary_pool = settings.get("secondary_pool", None)
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
if not instance.data.get("primaryPool"):
|
||||
instance.data["primaryPool"] = self.primary_pool or "none"
|
||||
instance.data["primaryPool"] = (
|
||||
attr_values.get("primaryPool") or self.primary_pool or "none"
|
||||
)
|
||||
|
||||
if not instance.data.get("secondaryPool"):
|
||||
instance.data["secondaryPool"] = self.secondary_pool or "none"
|
||||
instance.data["secondaryPool"] = (
|
||||
attr_values.get("secondaryPool") or self.secondary_pool or "none" # noqa
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
# TODO: Preferably this would be an enum for the user
|
||||
# but the Deadline server URL can be dynamic and
|
||||
# can be set per render instance. Since get_attribute_defs
|
||||
# can't be dynamic unfortunately EnumDef isn't possible (yet?)
|
||||
# pool_names = self.deadline_module.get_deadline_pools(deadline_url,
|
||||
# self.log)
|
||||
# secondary_pool_names = ["-"] + pool_names
|
||||
|
||||
return [
|
||||
TextDef("primaryPool",
|
||||
label="Primary Pool",
|
||||
default=cls.primary_pool),
|
||||
TextDef("secondaryPool",
|
||||
label="Secondary Pool",
|
||||
default=cls.secondary_pool)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.context.data.get("chunk")
|
||||
if chunk_size == 0:
|
||||
if not chunk_size:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
# search for %02d pattern in name, and padding number
|
||||
|
|
|
|||
|
|
@ -3,7 +3,15 @@ import getpass
|
|||
import copy
|
||||
|
||||
import attr
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import (
|
||||
TextDef,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.hosts.max.api.lib import (
|
||||
get_current_renderer,
|
||||
|
|
@ -22,7 +30,8 @@ class MaxPluginInfo(object):
|
|||
IgnoreInputs = attr.ib(default=True)
|
||||
|
||||
|
||||
class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
||||
class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
OpenPypePyblishPluginMixin):
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["max"]
|
||||
|
|
@ -31,14 +40,22 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
|
||||
use_published = True
|
||||
priority = 50
|
||||
tile_priority = 50
|
||||
chunk_size = 1
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = None
|
||||
deadline_pool = None
|
||||
deadline_pool_secondary = None
|
||||
framePerTask = 1
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
settings = project_settings["deadline"]["publish"]["MaxSubmitDeadline"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
cls.use_published = settings.get("use_published",
|
||||
cls.use_published)
|
||||
cls.priority = settings.get("priority",
|
||||
cls.priority)
|
||||
cls.chuck_size = settings.get("chunk_size", cls.chunk_size)
|
||||
cls.group = settings.get("group", cls.group)
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="3dsmax")
|
||||
|
|
@ -49,11 +66,11 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
|
||||
|
|
@ -71,13 +88,13 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
job_info.ChunkSize = instance.data.get("chunkSize", 1)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = instance.data.get("priority", self.priority)
|
||||
job_info.FramesPerTask = instance.data.get("framesPerTask", 1)
|
||||
|
||||
if self.group:
|
||||
job_info.Group = self.group
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
job_info.ChunkSize = attr_values.get("chunkSize", 1)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.Group = attr_values.get("group", self.group)
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
|
|
@ -216,3 +233,32 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
plugin_info.update(plugin_data)
|
||||
|
||||
return job_info, plugin_info
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MaxSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("use_published",
|
||||
default=cls.use_published,
|
||||
label="Use Published Scene"),
|
||||
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
|
||||
NumberDef("chunkSize",
|
||||
minimum=1,
|
||||
maximum=50,
|
||||
decimals=0,
|
||||
default=cls.chunk_size,
|
||||
label="Frame Per Task"),
|
||||
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
|
|||
|
|
@ -422,6 +422,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
assembly_job_info.Priority = instance.data.get(
|
||||
"tile_priority", self.tile_priority
|
||||
)
|
||||
assembly_job_info.TileJob = False
|
||||
|
||||
pool = instance.context.data["project_settings"]["deadline"]
|
||||
pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"]
|
||||
|
|
@ -450,15 +451,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
frame_assembly_job_info.ExtraInfo[0] = file_hash
|
||||
frame_assembly_job_info.ExtraInfo[1] = file
|
||||
frame_assembly_job_info.JobDependencies = tile_job_id
|
||||
frame_assembly_job_info.Frames = frame
|
||||
|
||||
# write assembly job config files
|
||||
now = datetime.now()
|
||||
|
||||
config_file = os.path.join(
|
||||
output_dir,
|
||||
"{}_config_{}.txt".format(
|
||||
os.path.splitext(file)[0],
|
||||
now.strftime("%Y_%m_%d_%H_%M_%S")
|
||||
datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
|
||||
)
|
||||
)
|
||||
try:
|
||||
|
|
@ -469,6 +469,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
self.log.warning("Path is unreachable: "
|
||||
"`{}`".format(output_dir))
|
||||
|
||||
assembly_plugin_info["ConfigFile"] = config_file
|
||||
|
||||
with open(config_file, "w") as cf:
|
||||
print("TileCount={}".format(tiles_count), file=cf)
|
||||
print("ImageFileName={}".format(file), file=cf)
|
||||
|
|
@ -477,25 +479,30 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
print("ImageHeight={}".format(
|
||||
instance.data.get("resolutionHeight")), file=cf)
|
||||
|
||||
with open(config_file, "a") as cf:
|
||||
# Need to reverse the order of the y tiles, because image
|
||||
# coordinates are calculated from bottom left corner.
|
||||
tiles = _format_tiles(
|
||||
file, 0,
|
||||
instance.data.get("tilesX"),
|
||||
instance.data.get("tilesY"),
|
||||
instance.data.get("resolutionWidth"),
|
||||
instance.data.get("resolutionHeight"),
|
||||
payload_plugin_info["OutputFilePrefix"]
|
||||
payload_plugin_info["OutputFilePrefix"],
|
||||
reversed_y=True
|
||||
)[1]
|
||||
for k, v in sorted(tiles.items()):
|
||||
print("{}={}".format(k, v), file=cf)
|
||||
|
||||
payload = self.assemble_payload(
|
||||
job_info=frame_assembly_job_info,
|
||||
plugin_info=assembly_plugin_info.copy(),
|
||||
# todo: aux file transfers don't work with deadline webservice
|
||||
# add config file as job auxFile
|
||||
# aux_files=[config_file]
|
||||
assembly_payloads.append(
|
||||
self.assemble_payload(
|
||||
job_info=frame_assembly_job_info,
|
||||
plugin_info=assembly_plugin_info.copy(),
|
||||
# This would fail if the client machine and webserice are
|
||||
# using different storage paths.
|
||||
aux_files=[config_file]
|
||||
)
|
||||
)
|
||||
assembly_payloads.append(payload)
|
||||
|
||||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
|
|
@ -505,6 +512,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
self.log.info(payload)
|
||||
assembly_job_id = self.submit(payload)
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
|
|
@ -764,8 +772,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
|
||||
|
||||
def _format_tiles(
|
||||
filename, index, tiles_x, tiles_y,
|
||||
width, height, prefix):
|
||||
filename,
|
||||
index,
|
||||
tiles_x,
|
||||
tiles_y,
|
||||
width,
|
||||
height,
|
||||
prefix,
|
||||
reversed_y=False
|
||||
):
|
||||
"""Generate tile entries for Deadline tile job.
|
||||
|
||||
Returns two dictionaries - one that can be directly used in Deadline
|
||||
|
|
@ -802,6 +817,7 @@ def _format_tiles(
|
|||
width (int): Width resolution of final image.
|
||||
height (int): Height resolution of final image.
|
||||
prefix (str): Image prefix.
|
||||
reversed_y (bool): Reverses the order of the y tiles.
|
||||
|
||||
Returns:
|
||||
(dict, dict): Tuple of two dictionaries - first can be used to
|
||||
|
|
@ -824,12 +840,16 @@ def _format_tiles(
|
|||
cfg["TilesCropped"] = "False"
|
||||
|
||||
tile = 0
|
||||
range_y = range(1, tiles_y + 1)
|
||||
reversed_y_range = list(reversed(range_y))
|
||||
for tile_x in range(1, tiles_x + 1):
|
||||
for tile_y in reversed(range(1, tiles_y + 1)):
|
||||
for i, tile_y in enumerate(range_y):
|
||||
tile_y_index = tile_y
|
||||
if reversed_y:
|
||||
tile_y_index = reversed_y_range[i]
|
||||
|
||||
tile_prefix = "_tile_{}x{}_{}x{}_".format(
|
||||
tile_x, tile_y,
|
||||
tiles_x,
|
||||
tiles_y
|
||||
tile_x, tile_y_index, tiles_x, tiles_y
|
||||
)
|
||||
|
||||
new_filename = "{}/{}{}".format(
|
||||
|
|
@ -844,11 +864,14 @@ def _format_tiles(
|
|||
right = (tile_x * w_space) - 1
|
||||
|
||||
# Job info
|
||||
out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa: E501
|
||||
key = "OutputFilename{}".format(index)
|
||||
out["JobInfo"][key] = new_filename
|
||||
|
||||
# Plugin Info
|
||||
out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \
|
||||
"/{}".format(tile_prefix).join(prefix.rsplit("/", 1))
|
||||
key = "RegionPrefix{}".format(str(tile))
|
||||
out["PluginInfo"][key] = "/{}".format(
|
||||
tile_prefix
|
||||
).join(prefix.rsplit("/", 1))
|
||||
out["PluginInfo"]["RegionTop{}".format(tile)] = top
|
||||
out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom
|
||||
out["PluginInfo"]["RegionLeft{}".format(tile)] = left
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
label = "Submit Nuke to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender.farm"]
|
||||
families = ["render", "prerender"]
|
||||
optional = True
|
||||
targets = ["local"]
|
||||
|
||||
|
|
@ -80,6 +80,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.info("Skipping local instance.")
|
||||
return
|
||||
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
|
|
@ -168,10 +172,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
resp.json()["_id"])
|
||||
|
||||
# redefinition of families
|
||||
if "render.farm" in families:
|
||||
if "render" in instance.data["family"]:
|
||||
instance.data['family'] = 'write'
|
||||
families.insert(0, "render2d")
|
||||
elif "prerender.farm" in families:
|
||||
elif "prerender" in instance.data["family"]:
|
||||
instance.data['family'] = 'write'
|
||||
families.insert(0, "prerender")
|
||||
instance.data["families"] = families
|
||||
|
|
|
|||
|
|
@ -756,6 +756,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
if not instance.data.get("farm"):
|
||||
self.log.info("Skipping local instance.")
|
||||
return
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
|
|
|
|||
|
|
@ -17,10 +17,18 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
|
||||
label = "Validate Deadline Pools"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["rendering", "render.farm", "renderFarm", "renderlayer"]
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.info("Skipping local instance.")
|
||||
return
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
self.log.info("deadline_url::{}".format(deadline_url))
|
||||
|
|
|
|||
|
|
@ -68,8 +68,15 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
# files to be in the folder that we might not want to use.
|
||||
missing = expected_files - existing_files
|
||||
if missing:
|
||||
raise RuntimeError("Missing expected files: {}".format(
|
||||
sorted(missing)))
|
||||
raise RuntimeError(
|
||||
"Missing expected files: {}\n"
|
||||
"Expected files: {}\n"
|
||||
"Existing files: {}".format(
|
||||
sorted(missing),
|
||||
sorted(expected_files),
|
||||
sorted(existing_files)
|
||||
)
|
||||
)
|
||||
|
||||
def _get_frame_list(self, original_job_id):
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
|
|
|||
|
|
@ -362,11 +362,11 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process = ProcessUtils.SpawnProcess(
|
||||
exe, args_str, os.path.dirname(exe)
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
ProcessUtils.WaitForExit(process, -1)
|
||||
if process.ExitCode != 0:
|
||||
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run OpenPype process to extract environments."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,10 @@ from Deadline.Scripting import (
|
|||
FileUtils, RepositoryUtils, SystemUtils)
|
||||
|
||||
|
||||
version_major = 1
|
||||
version_minor = 0
|
||||
version_patch = 0
|
||||
version_string = "{}.{}.{}".format(version_major, version_minor, version_patch)
|
||||
STRING_TAGS = {
|
||||
"format"
|
||||
}
|
||||
|
|
@ -264,6 +268,7 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
|
||||
def initialize_process(self):
|
||||
"""Initialization."""
|
||||
self.LogInfo("Plugin version: {}".format(version_string))
|
||||
self.SingleFramesOnly = True
|
||||
self.StdoutHandling = True
|
||||
self.renderer = self.GetPluginInfoEntryWithDefault(
|
||||
|
|
@ -320,12 +325,7 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
output_file = data["ImageFileName"]
|
||||
output_file = RepositoryUtils.CheckPathMapping(output_file)
|
||||
output_file = self.process_path(output_file)
|
||||
"""
|
||||
_, ext = os.path.splitext(output_file)
|
||||
if "exr" not in ext:
|
||||
self.FailRender(
|
||||
"[{}] Only EXR format is supported for now.".format(ext))
|
||||
"""
|
||||
|
||||
tile_info = []
|
||||
for tile in range(int(data["TileCount"])):
|
||||
tile_info.append({
|
||||
|
|
@ -336,11 +336,6 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
"width": int(data["Tile{}Width".format(tile)])
|
||||
})
|
||||
|
||||
# FFMpeg doesn't support tile coordinates at the moment.
|
||||
# arguments = self.tile_completer_ffmpeg_args(
|
||||
# int(data["ImageWidth"]), int(data["ImageHeight"]),
|
||||
# tile_info, output_file)
|
||||
|
||||
arguments = self.tile_oiio_args(
|
||||
int(data["ImageWidth"]), int(data["ImageHeight"]),
|
||||
tile_info, output_file)
|
||||
|
|
@ -362,20 +357,20 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
def pre_render_tasks(self):
|
||||
"""Load config file and do remapping."""
|
||||
self.LogInfo("OpenPype Tile Assembler starting...")
|
||||
scene_filename = self.GetDataFilename()
|
||||
config_file = self.GetPluginInfoEntry("ConfigFile")
|
||||
|
||||
temp_scene_directory = self.CreateTempDirectory(
|
||||
"thread" + str(self.GetThreadNumber()))
|
||||
temp_scene_filename = Path.GetFileName(scene_filename)
|
||||
temp_scene_filename = Path.GetFileName(config_file)
|
||||
self.config_file = Path.Combine(
|
||||
temp_scene_directory, temp_scene_filename)
|
||||
|
||||
if SystemUtils.IsRunningOnWindows():
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
scene_filename, self.config_file, "/", "\\")
|
||||
config_file, self.config_file, "/", "\\")
|
||||
else:
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
scene_filename, self.config_file, "\\", "/")
|
||||
config_file, self.config_file, "\\", "/")
|
||||
os.chmod(self.config_file, os.stat(self.config_file).st_mode)
|
||||
|
||||
def post_render_tasks(self):
|
||||
|
|
@ -459,75 +454,3 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
args.append(output_path)
|
||||
|
||||
return args
|
||||
|
||||
def tile_completer_ffmpeg_args(
|
||||
self, output_width, output_height, tiles_info, output_path):
|
||||
"""Generate ffmpeg arguments for tile assembly.
|
||||
|
||||
Expected inputs are tiled images.
|
||||
|
||||
Args:
|
||||
output_width (int): Width of output image.
|
||||
output_height (int): Height of output image.
|
||||
tiles_info (list): List of tile items, each item must be
|
||||
dictionary with `filepath`, `pos_x` and `pos_y` keys
|
||||
representing path to file and x, y coordinates on output
|
||||
image where top-left point of tile item should start.
|
||||
output_path (str): Path to file where should be output stored.
|
||||
|
||||
Returns:
|
||||
(list): ffmpeg arguments.
|
||||
|
||||
"""
|
||||
previous_name = "base"
|
||||
ffmpeg_args = []
|
||||
filter_complex_strs = []
|
||||
|
||||
filter_complex_strs.append("nullsrc=size={}x{}[{}]".format(
|
||||
output_width, output_height, previous_name
|
||||
))
|
||||
|
||||
new_tiles_info = {}
|
||||
for idx, tile_info in enumerate(tiles_info):
|
||||
# Add input and store input index
|
||||
filepath = tile_info["filepath"]
|
||||
ffmpeg_args.append("-i \"{}\"".format(filepath.replace("\\", "/")))
|
||||
|
||||
# Prepare initial filter complex arguments
|
||||
index_name = "input{}".format(idx)
|
||||
filter_complex_strs.append(
|
||||
"[{}]setpts=PTS-STARTPTS[{}]".format(idx, index_name)
|
||||
)
|
||||
tile_info["index"] = idx
|
||||
new_tiles_info[index_name] = tile_info
|
||||
|
||||
# Set frames to 1
|
||||
ffmpeg_args.append("-frames 1")
|
||||
|
||||
# Concatenation filter complex arguments
|
||||
global_index = 1
|
||||
total_index = len(new_tiles_info)
|
||||
for index_name, tile_info in new_tiles_info.items():
|
||||
item_str = (
|
||||
"[{previous_name}][{index_name}]overlay={pos_x}:{pos_y}"
|
||||
).format(
|
||||
previous_name=previous_name,
|
||||
index_name=index_name,
|
||||
pos_x=tile_info["pos_x"],
|
||||
pos_y=tile_info["pos_y"]
|
||||
)
|
||||
new_previous = "tmp{}".format(global_index)
|
||||
if global_index != total_index:
|
||||
item_str += "[{}]".format(new_previous)
|
||||
filter_complex_strs.append(item_str)
|
||||
previous_name = new_previous
|
||||
global_index += 1
|
||||
|
||||
joined_parts = ";".join(filter_complex_strs)
|
||||
filter_complex_str = "-filter_complex \"{}\"".format(joined_parts)
|
||||
|
||||
ffmpeg_args.append(filter_complex_str)
|
||||
ffmpeg_args.append("-y")
|
||||
ffmpeg_args.append("\"{}\"".format(output_path))
|
||||
|
||||
return ffmpeg_args
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ class AddonSettingsDef(JsonFilesSettingsDef):
|
|||
|
||||
|
||||
class ExampleAddon(OpenPypeAddOn, IPluginPaths, ITrayAction):
|
||||
"""This Addon has defined it's settings and interface.
|
||||
"""This Addon has defined its settings and interface.
|
||||
|
||||
This example has system settings with an enabled option. And use
|
||||
few other interfaces:
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from openpype_modules.ftrack.lib import (
|
|||
|
||||
|
||||
class PushHierValuesToNonHier(ServerAction):
|
||||
"""Action push hierarchical custom attribute values to non hierarchical.
|
||||
"""Action push hierarchical custom attribute values to non-hierarchical.
|
||||
|
||||
Hierarchical value is also pushed to their task entities.
|
||||
|
||||
|
|
@ -119,17 +119,109 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
self.join_query_keys(object_ids)
|
||||
)).all()
|
||||
|
||||
output = {}
|
||||
attrs_by_obj_id = collections.defaultdict(list)
|
||||
hiearchical = []
|
||||
for attr in attrs:
|
||||
if attr["is_hierarchical"]:
|
||||
hiearchical.append(attr)
|
||||
continue
|
||||
obj_id = attr["object_type_id"]
|
||||
if obj_id not in output:
|
||||
output[obj_id] = []
|
||||
output[obj_id].append(attr)
|
||||
return output, hiearchical
|
||||
attrs_by_obj_id[obj_id].append(attr)
|
||||
return attrs_by_obj_id, hiearchical
|
||||
|
||||
def query_attr_value(
|
||||
self,
|
||||
session,
|
||||
hier_attrs,
|
||||
attrs_by_obj_id,
|
||||
dst_object_type_ids,
|
||||
task_entity_ids,
|
||||
non_task_entity_ids,
|
||||
parent_id_by_entity_id
|
||||
):
|
||||
all_non_task_ids_with_parents = set()
|
||||
for entity_id in non_task_entity_ids:
|
||||
all_non_task_ids_with_parents.add(entity_id)
|
||||
_entity_id = entity_id
|
||||
while True:
|
||||
parent_id = parent_id_by_entity_id.get(_entity_id)
|
||||
if (
|
||||
parent_id is None
|
||||
or parent_id in all_non_task_ids_with_parents
|
||||
):
|
||||
break
|
||||
all_non_task_ids_with_parents.add(parent_id)
|
||||
_entity_id = parent_id
|
||||
|
||||
all_entity_ids = (
|
||||
set(all_non_task_ids_with_parents)
|
||||
| set(task_entity_ids)
|
||||
)
|
||||
attr_ids = {attr["id"] for attr in hier_attrs}
|
||||
for obj_id in dst_object_type_ids:
|
||||
attrs = attrs_by_obj_id.get(obj_id)
|
||||
if attrs is not None:
|
||||
for attr in attrs:
|
||||
attr_ids.add(attr["id"])
|
||||
|
||||
real_values_by_entity_id = {
|
||||
entity_id: {}
|
||||
for entity_id in all_entity_ids
|
||||
}
|
||||
|
||||
attr_values = query_custom_attributes(
|
||||
session, attr_ids, all_entity_ids, True
|
||||
)
|
||||
for item in attr_values:
|
||||
entity_id = item["entity_id"]
|
||||
attr_id = item["configuration_id"]
|
||||
real_values_by_entity_id[entity_id][attr_id] = item["value"]
|
||||
|
||||
# Fill hierarchical values
|
||||
hier_attrs_key_by_id = {
|
||||
hier_attr["id"]: hier_attr
|
||||
for hier_attr in hier_attrs
|
||||
}
|
||||
hier_values_per_entity_id = {}
|
||||
for entity_id in all_non_task_ids_with_parents:
|
||||
real_values = real_values_by_entity_id[entity_id]
|
||||
hier_values_per_entity_id[entity_id] = {}
|
||||
for attr_id, attr in hier_attrs_key_by_id.items():
|
||||
key = attr["key"]
|
||||
hier_values_per_entity_id[entity_id][key] = (
|
||||
real_values.get(attr_id)
|
||||
)
|
||||
|
||||
output = {}
|
||||
for entity_id in non_task_entity_ids:
|
||||
output[entity_id] = {}
|
||||
for attr in hier_attrs_key_by_id.values():
|
||||
key = attr["key"]
|
||||
value = hier_values_per_entity_id[entity_id][key]
|
||||
tried_ids = set()
|
||||
if value is None:
|
||||
tried_ids.add(entity_id)
|
||||
_entity_id = entity_id
|
||||
while value is None:
|
||||
parent_id = parent_id_by_entity_id.get(_entity_id)
|
||||
if not parent_id:
|
||||
break
|
||||
value = hier_values_per_entity_id[parent_id][key]
|
||||
if value is not None:
|
||||
break
|
||||
_entity_id = parent_id
|
||||
tried_ids.add(parent_id)
|
||||
|
||||
if value is None:
|
||||
value = attr["default"]
|
||||
|
||||
if value is not None:
|
||||
for ent_id in tried_ids:
|
||||
hier_values_per_entity_id[ent_id][key] = value
|
||||
|
||||
output[entity_id][key] = value
|
||||
|
||||
return real_values_by_entity_id, output
|
||||
|
||||
def propagate_values(self, session, event, selected_entities):
|
||||
ftrack_settings = self.get_ftrack_settings(
|
||||
|
|
@ -156,29 +248,24 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
}
|
||||
|
||||
task_object_type = object_types_by_low_name["task"]
|
||||
destination_object_types = [task_object_type]
|
||||
dst_object_type_ids = {task_object_type["id"]}
|
||||
for ent_type in interest_entity_types:
|
||||
obj_type = object_types_by_low_name.get(ent_type)
|
||||
if obj_type and obj_type not in destination_object_types:
|
||||
destination_object_types.append(obj_type)
|
||||
|
||||
destination_object_type_ids = set(
|
||||
obj_type["id"]
|
||||
for obj_type in destination_object_types
|
||||
)
|
||||
if obj_type:
|
||||
dst_object_type_ids.add(obj_type["id"])
|
||||
|
||||
interest_attributes = action_settings["interest_attributes"]
|
||||
# Find custom attributes definitions
|
||||
attrs_by_obj_id, hier_attrs = self.attrs_configurations(
|
||||
session, destination_object_type_ids, interest_attributes
|
||||
session, dst_object_type_ids, interest_attributes
|
||||
)
|
||||
# Filter destination object types if they have any object specific
|
||||
# custom attribute
|
||||
for obj_id in tuple(destination_object_type_ids):
|
||||
for obj_id in tuple(dst_object_type_ids):
|
||||
if obj_id not in attrs_by_obj_id:
|
||||
destination_object_type_ids.remove(obj_id)
|
||||
dst_object_type_ids.remove(obj_id)
|
||||
|
||||
if not destination_object_type_ids:
|
||||
if not dst_object_type_ids:
|
||||
# TODO report that there are not matching custom attributes
|
||||
return {
|
||||
"success": True,
|
||||
|
|
@ -192,14 +279,14 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
session,
|
||||
selected_ids,
|
||||
project_entity,
|
||||
destination_object_type_ids
|
||||
dst_object_type_ids
|
||||
)
|
||||
|
||||
self.log.debug("Preparing whole project hierarchy by ids.")
|
||||
|
||||
entities_by_obj_id = {
|
||||
obj_id: []
|
||||
for obj_id in destination_object_type_ids
|
||||
for obj_id in dst_object_type_ids
|
||||
}
|
||||
|
||||
self.log.debug("Filtering Task entities.")
|
||||
|
|
@ -223,10 +310,16 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
"message": "Nothing to do in your selection."
|
||||
}
|
||||
|
||||
self.log.debug("Getting Hierarchical custom attribute values parents.")
|
||||
hier_values_by_entity_id = self.get_hier_values(
|
||||
self.log.debug("Getting Custom attribute values.")
|
||||
(
|
||||
real_values_by_entity_id,
|
||||
hier_values_by_entity_id
|
||||
) = self.query_attr_value(
|
||||
session,
|
||||
hier_attrs,
|
||||
attrs_by_obj_id,
|
||||
dst_object_type_ids,
|
||||
task_entity_ids,
|
||||
non_task_entity_ids,
|
||||
parent_id_by_entity_id
|
||||
)
|
||||
|
|
@ -237,7 +330,8 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
hier_attrs,
|
||||
task_entity_ids,
|
||||
hier_values_by_entity_id,
|
||||
parent_id_by_entity_id
|
||||
parent_id_by_entity_id,
|
||||
real_values_by_entity_id
|
||||
)
|
||||
|
||||
self.log.debug("Setting values to entities themselves.")
|
||||
|
|
@ -245,7 +339,8 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
session,
|
||||
entities_by_obj_id,
|
||||
attrs_by_obj_id,
|
||||
hier_values_by_entity_id
|
||||
hier_values_by_entity_id,
|
||||
real_values_by_entity_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
@ -322,112 +417,64 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
|
||||
return parent_id_by_entity_id, filtered_entities
|
||||
|
||||
def get_hier_values(
|
||||
self,
|
||||
session,
|
||||
hier_attrs,
|
||||
focus_entity_ids,
|
||||
parent_id_by_entity_id
|
||||
):
|
||||
all_ids_with_parents = set()
|
||||
for entity_id in focus_entity_ids:
|
||||
all_ids_with_parents.add(entity_id)
|
||||
_entity_id = entity_id
|
||||
while True:
|
||||
parent_id = parent_id_by_entity_id.get(_entity_id)
|
||||
if (
|
||||
not parent_id
|
||||
or parent_id in all_ids_with_parents
|
||||
):
|
||||
break
|
||||
all_ids_with_parents.add(parent_id)
|
||||
_entity_id = parent_id
|
||||
|
||||
hier_attr_ids = tuple(hier_attr["id"] for hier_attr in hier_attrs)
|
||||
hier_attrs_key_by_id = {
|
||||
hier_attr["id"]: hier_attr["key"]
|
||||
for hier_attr in hier_attrs
|
||||
}
|
||||
|
||||
values_per_entity_id = {}
|
||||
for entity_id in all_ids_with_parents:
|
||||
values_per_entity_id[entity_id] = {}
|
||||
for key in hier_attrs_key_by_id.values():
|
||||
values_per_entity_id[entity_id][key] = None
|
||||
|
||||
values = query_custom_attributes(
|
||||
session, hier_attr_ids, all_ids_with_parents, True
|
||||
)
|
||||
for item in values:
|
||||
entity_id = item["entity_id"]
|
||||
key = hier_attrs_key_by_id[item["configuration_id"]]
|
||||
|
||||
values_per_entity_id[entity_id][key] = item["value"]
|
||||
|
||||
output = {}
|
||||
for entity_id in focus_entity_ids:
|
||||
output[entity_id] = {}
|
||||
for key in hier_attrs_key_by_id.values():
|
||||
value = values_per_entity_id[entity_id][key]
|
||||
tried_ids = set()
|
||||
if value is None:
|
||||
tried_ids.add(entity_id)
|
||||
_entity_id = entity_id
|
||||
while value is None:
|
||||
parent_id = parent_id_by_entity_id.get(_entity_id)
|
||||
if not parent_id:
|
||||
break
|
||||
value = values_per_entity_id[parent_id][key]
|
||||
if value is not None:
|
||||
break
|
||||
_entity_id = parent_id
|
||||
tried_ids.add(parent_id)
|
||||
|
||||
if value is not None:
|
||||
for ent_id in tried_ids:
|
||||
values_per_entity_id[ent_id][key] = value
|
||||
|
||||
output[entity_id][key] = value
|
||||
return output
|
||||
|
||||
def set_task_attr_values(
|
||||
self,
|
||||
session,
|
||||
hier_attrs,
|
||||
task_entity_ids,
|
||||
hier_values_by_entity_id,
|
||||
parent_id_by_entity_id
|
||||
parent_id_by_entity_id,
|
||||
real_values_by_entity_id
|
||||
):
|
||||
hier_attr_id_by_key = {
|
||||
attr["key"]: attr["id"]
|
||||
for attr in hier_attrs
|
||||
}
|
||||
filtered_task_ids = set()
|
||||
for task_id in task_entity_ids:
|
||||
parent_id = parent_id_by_entity_id.get(task_id) or {}
|
||||
parent_id = parent_id_by_entity_id.get(task_id)
|
||||
parent_values = hier_values_by_entity_id.get(parent_id)
|
||||
if not parent_values:
|
||||
continue
|
||||
if parent_values:
|
||||
filtered_task_ids.add(task_id)
|
||||
|
||||
if not filtered_task_ids:
|
||||
return
|
||||
|
||||
for task_id in filtered_task_ids:
|
||||
parent_id = parent_id_by_entity_id[task_id]
|
||||
parent_values = hier_values_by_entity_id[parent_id]
|
||||
hier_values_by_entity_id[task_id] = {}
|
||||
real_task_attr_values = real_values_by_entity_id[task_id]
|
||||
for key, value in parent_values.items():
|
||||
hier_values_by_entity_id[task_id][key] = value
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
configuration_id = hier_attr_id_by_key[key]
|
||||
_entity_key = collections.OrderedDict([
|
||||
("configuration_id", configuration_id),
|
||||
("entity_id", task_id)
|
||||
])
|
||||
|
||||
session.recorded_operations.push(
|
||||
ftrack_api.operation.UpdateEntityOperation(
|
||||
"ContextCustomAttributeValue",
|
||||
op = None
|
||||
if configuration_id not in real_task_attr_values:
|
||||
op = ftrack_api.operation.CreateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
_entity_key,
|
||||
{"value": value}
|
||||
)
|
||||
elif real_task_attr_values[configuration_id] != value:
|
||||
op = ftrack_api.operation.UpdateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
_entity_key,
|
||||
"value",
|
||||
ftrack_api.symbol.NOT_SET,
|
||||
real_task_attr_values[configuration_id],
|
||||
value
|
||||
)
|
||||
)
|
||||
if len(session.recorded_operations) > 100:
|
||||
session.commit()
|
||||
|
||||
if op is not None:
|
||||
session.recorded_operations.push(op)
|
||||
if len(session.recorded_operations) > 100:
|
||||
session.commit()
|
||||
|
||||
session.commit()
|
||||
|
||||
|
|
@ -436,39 +483,68 @@ class PushHierValuesToNonHier(ServerAction):
|
|||
session,
|
||||
entities_by_obj_id,
|
||||
attrs_by_obj_id,
|
||||
hier_values_by_entity_id
|
||||
hier_values_by_entity_id,
|
||||
real_values_by_entity_id
|
||||
):
|
||||
"""Push values from hierarchical custom attributes to non-hierarchical.
|
||||
|
||||
Args:
|
||||
session (ftrack_api.Sessison): Session which queried entities,
|
||||
values and which is used for change propagation.
|
||||
entities_by_obj_id (dict[str, list[str]]): TypedContext
|
||||
ftrack entity ids where the attributes are propagated by their
|
||||
object ids.
|
||||
attrs_by_obj_id (dict[str, ftrack_api.Entity]): Objects of
|
||||
'CustomAttributeConfiguration' by their ids.
|
||||
hier_values_by_entity_id (doc[str, dict[str, Any]]): Attribute
|
||||
values by entity id and by their keys.
|
||||
real_values_by_entity_id (doc[str, dict[str, Any]]): Real attribute
|
||||
values of entities.
|
||||
"""
|
||||
|
||||
for object_id, entity_ids in entities_by_obj_id.items():
|
||||
attrs = attrs_by_obj_id.get(object_id)
|
||||
if not attrs or not entity_ids:
|
||||
continue
|
||||
|
||||
for attr in attrs:
|
||||
for entity_id in entity_ids:
|
||||
value = (
|
||||
hier_values_by_entity_id
|
||||
.get(entity_id, {})
|
||||
.get(attr["key"])
|
||||
)
|
||||
for entity_id in entity_ids:
|
||||
real_values = real_values_by_entity_id.get(entity_id)
|
||||
hier_values = hier_values_by_entity_id.get(entity_id)
|
||||
if hier_values is None:
|
||||
continue
|
||||
|
||||
for attr in attrs:
|
||||
attr_id = attr["id"]
|
||||
attr_key = attr["key"]
|
||||
value = hier_values.get(attr_key)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
_entity_key = collections.OrderedDict([
|
||||
("configuration_id", attr["id"]),
|
||||
("configuration_id", attr_id),
|
||||
("entity_id", entity_id)
|
||||
])
|
||||
|
||||
session.recorded_operations.push(
|
||||
ftrack_api.operation.UpdateEntityOperation(
|
||||
"ContextCustomAttributeValue",
|
||||
op = None
|
||||
if attr_id not in real_values:
|
||||
op = ftrack_api.operation.CreateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
_entity_key,
|
||||
{"value": value}
|
||||
)
|
||||
elif real_values[attr_id] != value:
|
||||
op = ftrack_api.operation.UpdateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
_entity_key,
|
||||
"value",
|
||||
ftrack_api.symbol.NOT_SET,
|
||||
real_values[attr_id],
|
||||
value
|
||||
)
|
||||
)
|
||||
if len(session.recorded_operations) > 100:
|
||||
session.commit()
|
||||
|
||||
if op is not None:
|
||||
session.recorded_operations.push(op)
|
||||
if len(session.recorded_operations) > 100:
|
||||
session.commit()
|
||||
|
||||
session.commit()
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -124,6 +124,11 @@ class AppplicationsAction(BaseAction):
|
|||
if not avalon_project_apps:
|
||||
return False
|
||||
|
||||
settings = self.get_project_settings_from_event(
|
||||
event, avalon_project_doc["name"])
|
||||
|
||||
only_available = settings["applications"]["only_available"]
|
||||
|
||||
items = []
|
||||
for app_name in avalon_project_apps:
|
||||
app = self.application_manager.applications.get(app_name)
|
||||
|
|
@ -133,6 +138,10 @@ class AppplicationsAction(BaseAction):
|
|||
if app.group.name in CUSTOM_LAUNCH_APP_GROUPS:
|
||||
continue
|
||||
|
||||
# Skip applications without valid executables
|
||||
if only_available and not app.find_executable():
|
||||
continue
|
||||
|
||||
app_icon = app.icon
|
||||
if app_icon and self.icon_url:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -7,23 +7,22 @@ Provides:
|
|||
"""
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.lib import filter_profiles
|
||||
|
||||
|
||||
class CollectFtrackFamily(pyblish.api.InstancePlugin):
|
||||
"""Adds explicitly 'ftrack' to families to upload instance to FTrack.
|
||||
|
||||
Uses selection by combination of hosts/families/tasks names via
|
||||
profiles resolution.
|
||||
|
||||
Triggered everywhere, checks instance against configured.
|
||||
|
||||
Checks advanced filtering which works on 'families' not on main
|
||||
'family', as some variants dynamically resolves addition of ftrack
|
||||
based on 'families' (editorial drives it by presence of 'review')
|
||||
"""
|
||||
Adds explicitly 'ftrack' to families to upload instance to FTrack.
|
||||
|
||||
Uses selection by combination of hosts/families/tasks names via
|
||||
profiles resolution.
|
||||
|
||||
Triggered everywhere, checks instance against configured.
|
||||
|
||||
Checks advanced filtering which works on 'families' not on main
|
||||
'family', as some variants dynamically resolves addition of ftrack
|
||||
based on 'families' (editorial drives it by presence of 'review')
|
||||
"""
|
||||
label = "Collect Ftrack Family"
|
||||
order = pyblish.api.CollectorOrder + 0.4990
|
||||
|
||||
|
|
@ -34,68 +33,64 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin):
|
|||
self.log.warning("No profiles present for adding Ftrack family")
|
||||
return
|
||||
|
||||
add_ftrack_family = False
|
||||
task_name = instance.data.get("task",
|
||||
legacy_io.Session["AVALON_TASK"])
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
host_name = instance.context.data["hostName"]
|
||||
family = instance.data["family"]
|
||||
task_name = instance.data.get("task")
|
||||
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"tasks": task_name
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
profile = filter_profiles(
|
||||
self.profiles,
|
||||
filtering_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
|
||||
add_ftrack_family = False
|
||||
families = instance.data.setdefault("families", [])
|
||||
|
||||
if profile:
|
||||
families = instance.data.get("families")
|
||||
add_ftrack_family = profile["add_ftrack_family"]
|
||||
|
||||
additional_filters = profile.get("advanced_filtering")
|
||||
if additional_filters:
|
||||
self.log.info("'{}' families used for additional filtering".
|
||||
format(families))
|
||||
families_set = set(families) | {family}
|
||||
self.log.info(
|
||||
"'{}' families used for additional filtering".format(
|
||||
families_set))
|
||||
add_ftrack_family = self._get_add_ftrack_f_from_addit_filters(
|
||||
additional_filters,
|
||||
families,
|
||||
families_set,
|
||||
add_ftrack_family
|
||||
)
|
||||
|
||||
if add_ftrack_family:
|
||||
self.log.debug("Adding ftrack family for '{}'".
|
||||
format(instance.data.get("family")))
|
||||
result_str = "Not adding"
|
||||
if add_ftrack_family:
|
||||
result_str = "Adding"
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
|
||||
if families:
|
||||
if "ftrack" not in families:
|
||||
instance.data["families"].append("ftrack")
|
||||
else:
|
||||
instance.data["families"] = ["ftrack"]
|
||||
|
||||
result_str = "Adding"
|
||||
if not add_ftrack_family:
|
||||
result_str = "Not adding"
|
||||
self.log.info("{} 'ftrack' family for instance with '{}'".format(
|
||||
result_str, family
|
||||
))
|
||||
|
||||
def _get_add_ftrack_f_from_addit_filters(self,
|
||||
additional_filters,
|
||||
families,
|
||||
add_ftrack_family):
|
||||
"""
|
||||
Compares additional filters - working on instance's families.
|
||||
def _get_add_ftrack_f_from_addit_filters(
|
||||
self, additional_filters, families, add_ftrack_family
|
||||
):
|
||||
"""Compares additional filters - working on instance's families.
|
||||
|
||||
Triggered for more detailed filtering when main family matches,
|
||||
but content of 'families' actually matter.
|
||||
(For example 'review' in 'families' should result in adding to
|
||||
Ftrack)
|
||||
Triggered for more detailed filtering when main family matches,
|
||||
but content of 'families' actually matter.
|
||||
(For example 'review' in 'families' should result in adding to
|
||||
Ftrack)
|
||||
|
||||
Args:
|
||||
additional_filters (dict) - from Setting
|
||||
families (list) - subfamilies
|
||||
add_ftrack_family (bool) - add ftrack to families if True
|
||||
Args:
|
||||
additional_filters (dict) - from Setting
|
||||
families (set[str]) - subfamilies
|
||||
add_ftrack_family (bool) - add ftrack to families if True
|
||||
"""
|
||||
|
||||
override_filter = None
|
||||
override_filter_value = -1
|
||||
for additional_filter in additional_filters:
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
if not zou_asset_data:
|
||||
raise ValueError("Zou asset data not found in OpenPype!")
|
||||
|
||||
task_name = instance.data.get("task")
|
||||
task_name = instance.data.get("task", context.data.get("task"))
|
||||
if not task_name:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import gazu
|
||||
import pyblish.api
|
||||
import re
|
||||
|
||||
|
||||
class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
||||
|
|
@ -9,27 +10,98 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.IntegratorOrder
|
||||
label = "Kitsu Note and Status"
|
||||
families = ["render", "kitsu"]
|
||||
|
||||
# status settings
|
||||
set_status_note = False
|
||||
note_status_shortname = "wfa"
|
||||
status_change_conditions = {
|
||||
"status_conditions": [],
|
||||
"family_requirements": [],
|
||||
}
|
||||
|
||||
# comment settings
|
||||
custom_comment_template = {
|
||||
"enabled": False,
|
||||
"comment_template": "{comment}",
|
||||
}
|
||||
|
||||
def format_publish_comment(self, instance):
|
||||
"""Format the instance's publish comment
|
||||
|
||||
Formats `instance.data` against the custom template.
|
||||
"""
|
||||
|
||||
def replace_missing_key(match):
|
||||
"""If key is not found in kwargs, set None instead"""
|
||||
key = match.group(1)
|
||||
if key not in instance.data:
|
||||
self.log.warning(
|
||||
"Key '{}' was not found in instance.data "
|
||||
"and will be rendered as an empty string "
|
||||
"in the comment".format(key)
|
||||
)
|
||||
return ""
|
||||
else:
|
||||
return str(instance.data[key])
|
||||
|
||||
template = self.custom_comment_template["comment_template"]
|
||||
pattern = r"\{([^}]*)\}"
|
||||
return re.sub(pattern, replace_missing_key, template)
|
||||
|
||||
def process(self, context):
|
||||
# Get comment text body
|
||||
publish_comment = context.data.get("comment")
|
||||
if not publish_comment:
|
||||
self.log.info("Comment is not set.")
|
||||
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
for instance in context:
|
||||
# Check if instance is a review by checking its family
|
||||
# Allow a match to primary family or any of families
|
||||
families = set([instance.data["family"]] +
|
||||
instance.data.get("families", []))
|
||||
if "review" not in families:
|
||||
continue
|
||||
|
||||
kitsu_task = instance.data.get("kitsu_task")
|
||||
if kitsu_task is None:
|
||||
if not kitsu_task:
|
||||
continue
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = kitsu_task["task_status"]["id"]
|
||||
shortname = kitsu_task["task_status"]["short_name"].upper()
|
||||
note_status = kitsu_task["task_status_id"]
|
||||
|
||||
if self.set_status_note:
|
||||
# Check if any status condition is not met
|
||||
allow_status_change = True
|
||||
for status_cond in self.status_change_conditions[
|
||||
"status_conditions"
|
||||
]:
|
||||
condition = status_cond["condition"] == "equal"
|
||||
match = status_cond["short_name"].upper() == shortname
|
||||
if match and not condition or condition and not match:
|
||||
allow_status_change = False
|
||||
break
|
||||
|
||||
if allow_status_change:
|
||||
# Get families
|
||||
families = {
|
||||
instance.data.get("family")
|
||||
for instance in context
|
||||
if instance.data.get("publish")
|
||||
}
|
||||
|
||||
# Check if any family requirement is met
|
||||
for family_requirement in self.status_change_conditions[
|
||||
"family_requirements"
|
||||
]:
|
||||
condition = family_requirement["condition"] == "equal"
|
||||
|
||||
for family in families:
|
||||
match = family_requirement["family"].lower() == family
|
||||
if match and not condition or condition and not match:
|
||||
allow_status_change = False
|
||||
break
|
||||
|
||||
if allow_status_change:
|
||||
break
|
||||
|
||||
# Set note status
|
||||
if self.set_status_note and allow_status_change:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
|
|
@ -42,11 +114,22 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
|||
"changed!".format(self.note_status_shortname)
|
||||
)
|
||||
|
||||
# Get comment text body
|
||||
publish_comment = instance.data.get("comment")
|
||||
if self.custom_comment_template["enabled"]:
|
||||
publish_comment = self.format_publish_comment(instance)
|
||||
|
||||
if not publish_comment:
|
||||
self.log.info("Comment is not set.")
|
||||
else:
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
# Add comment to kitsu task
|
||||
task_id = kitsu_task["id"]
|
||||
self.log.debug("Add new note in taks id {}".format(task_id))
|
||||
self.log.debug(
|
||||
"Add new note in tasks id {}".format(kitsu_task["id"])
|
||||
)
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
task_id, note_status, comment=publish_comment
|
||||
kitsu_task, note_status, comment=publish_comment
|
||||
)
|
||||
|
||||
instance.data["kitsu_comment"] = kitsu_comment
|
||||
|
|
|
|||
|
|
@ -12,17 +12,17 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
task = instance.data["kitsu_task"]["id"]
|
||||
comment = instance.data["kitsu_comment"]["id"]
|
||||
|
||||
# Check comment has been created
|
||||
if not comment:
|
||||
comment_id = instance.data.get("kitsu_comment", {}).get("id")
|
||||
if not comment_id:
|
||||
self.log.debug(
|
||||
"Comment not created, review not pushed to preview."
|
||||
)
|
||||
return
|
||||
|
||||
# Add review representations as preview of comment
|
||||
task_id = instance.data["kitsu_task"]["id"]
|
||||
for representation in instance.data.get("representations", []):
|
||||
# Skip if not tagged as review
|
||||
if "kitsureview" not in representation.get("tags", []):
|
||||
|
|
@ -31,6 +31,6 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Found review at: {}".format(review_path))
|
||||
|
||||
gazu.task.add_preview(
|
||||
task, comment, review_path, normalize_movie=True
|
||||
task_id, comment_id, review_path, normalize_movie=True
|
||||
)
|
||||
self.log.info("Review upload on comment")
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ def update_op_assets(
|
|||
frame_out = frame_in + frames_duration - 1
|
||||
else:
|
||||
frame_out = project_doc["data"].get("frameEnd", frame_in)
|
||||
item_data["frameEnd"] = frame_out
|
||||
item_data["frameEnd"] = int(frame_out)
|
||||
# Fps, fallback to project's value or default value (25.0)
|
||||
try:
|
||||
fps = float(item_data.get("fps"))
|
||||
|
|
@ -147,33 +147,37 @@ def update_op_assets(
|
|||
item_data["resolutionWidth"] = int(match_res.group(1))
|
||||
item_data["resolutionHeight"] = int(match_res.group(2))
|
||||
else:
|
||||
item_data["resolutionWidth"] = project_doc["data"].get(
|
||||
"resolutionWidth"
|
||||
item_data["resolutionWidth"] = int(
|
||||
project_doc["data"].get("resolutionWidth")
|
||||
)
|
||||
item_data["resolutionHeight"] = project_doc["data"].get(
|
||||
"resolutionHeight"
|
||||
item_data["resolutionHeight"] = int(
|
||||
project_doc["data"].get("resolutionHeight")
|
||||
)
|
||||
# Properties that doesn't fully exist in Kitsu.
|
||||
# Guessing those property names below:
|
||||
# Pixel Aspect Ratio
|
||||
item_data["pixelAspect"] = item_data.get(
|
||||
"pixel_aspect", project_doc["data"].get("pixelAspect")
|
||||
item_data["pixelAspect"] = float(
|
||||
item_data.get(
|
||||
"pixel_aspect", project_doc["data"].get("pixelAspect")
|
||||
)
|
||||
)
|
||||
# Handle Start
|
||||
item_data["handleStart"] = item_data.get(
|
||||
"handle_start", project_doc["data"].get("handleStart")
|
||||
item_data["handleStart"] = int(
|
||||
item_data.get(
|
||||
"handle_start", project_doc["data"].get("handleStart")
|
||||
)
|
||||
)
|
||||
# Handle End
|
||||
item_data["handleEnd"] = item_data.get(
|
||||
"handle_end", project_doc["data"].get("handleEnd")
|
||||
item_data["handleEnd"] = int(
|
||||
item_data.get("handle_end", project_doc["data"].get("handleEnd"))
|
||||
)
|
||||
# Clip In
|
||||
item_data["clipIn"] = item_data.get(
|
||||
"clip_in", project_doc["data"].get("clipIn")
|
||||
item_data["clipIn"] = int(
|
||||
item_data.get("clip_in", project_doc["data"].get("clipIn"))
|
||||
)
|
||||
# Clip Out
|
||||
item_data["clipOut"] = item_data.get(
|
||||
"clip_out", project_doc["data"].get("clipOut")
|
||||
item_data["clipOut"] = int(
|
||||
item_data.get("clip_out", project_doc["data"].get("clipOut"))
|
||||
)
|
||||
|
||||
# Tasks
|
||||
|
|
|
|||
|
|
@ -1472,13 +1472,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return sync_settings
|
||||
|
||||
def get_all_site_configs(self, project_name=None):
|
||||
def get_all_site_configs(self, project_name=None,
|
||||
local_editable_only=False):
|
||||
"""
|
||||
Returns (dict) with all sites configured system wide.
|
||||
|
||||
Args:
|
||||
project_name (str)(optional): if present, check if not disabled
|
||||
|
||||
local_editable_only (bool)(opt): if True return only Local
|
||||
Setting configurable (for LS UI)
|
||||
Returns:
|
||||
(dict): {'studio': {'provider':'local_drive'...},
|
||||
'MY_LOCAL': {'provider':....}}
|
||||
|
|
@ -1499,9 +1501,21 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if site_settings:
|
||||
detail.update(site_settings)
|
||||
system_sites[site] = detail
|
||||
|
||||
system_sites.update(self._get_default_site_configs(sync_enabled,
|
||||
project_name))
|
||||
if local_editable_only:
|
||||
local_schema = SyncServerModule.get_local_settings_schema()
|
||||
editable_keys = {}
|
||||
for provider_code, editables in local_schema.items():
|
||||
editable_keys[provider_code] = ["enabled", "provider"]
|
||||
for editable_item in editables:
|
||||
editable_keys[provider_code].append(editable_item["key"])
|
||||
|
||||
for _, site in system_sites.items():
|
||||
provider = site["provider"]
|
||||
for site_config_key in list(site.keys()):
|
||||
if site_config_key not in editable_keys[provider]:
|
||||
site.pop(site_config_key, None)
|
||||
|
||||
return system_sites
|
||||
|
||||
|
|
|
|||
|
|
@ -141,7 +141,9 @@ class TimersManager(
|
|||
signal_handler = SignalHandler(self)
|
||||
idle_manager = IdleManager()
|
||||
widget_user_idle = WidgetUserIdle(self)
|
||||
widget_user_idle.set_countdown_start(self.time_show_message)
|
||||
widget_user_idle.set_countdown_start(
|
||||
self.time_stop_timer - self.time_show_message
|
||||
)
|
||||
|
||||
idle_manager.signal_reset_timer.connect(
|
||||
widget_user_idle.reset_countdown
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue