Merge branch 'develop' into feature/blender-review

This commit is contained in:
Ondřej Samohel 2023-03-17 14:14:42 +01:00 committed by GitHub
commit 78cc078b5b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
65 changed files with 6025 additions and 5933 deletions

View file

@ -14,6 +14,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
# Execute after workfile template copy
order = 10
app_groups = [
"3dsmax",
"maya",
"nuke",
"nukex",

View file

@ -382,8 +382,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
layout.operator(LaunchLibrary.bl_idname, text="Library...")
layout.separator()
layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...")
# TODO (jasper): maybe add 'Reload Pipeline', 'Reset Frame Range' and
# 'Reset Resolution'?
# TODO (jasper): maybe add 'Reload Pipeline', 'Set Frame Range' and
# 'Set Resolution'?
def draw_avalon_menu(self, context):

View file

@ -66,8 +66,8 @@ host_tools.show_workfiles(parent)
]]></scriptCode>
</scriptItem>
<scriptItem id="reset_frame_range">
<label>Reset Frame Range</label>
<scriptItem id="set_frame_range">
<label>Set Frame Range</label>
<scriptCode><![CDATA[
import openpype.hosts.houdini.api.lib
openpype.hosts.houdini.api.lib.reset_framerange()

View file

@ -11,6 +11,7 @@ import maya.mel as mel
from openpype import resources
from openpype.tools.utils import host_tools
from .lib import get_main_window
from ..tools import show_look_assigner
log = logging.getLogger(__name__)
@ -112,7 +113,7 @@ def override_toolbox_ui():
annotation="Look Manager",
label="Look Manager",
image=os.path.join(icons, "lookmanager.png"),
command=host_tools.show_look_assigner,
command=show_look_assigner,
width=icon_size,
height=icon_size,
parent=parent

View file

@ -2099,29 +2099,40 @@ def get_frame_range():
}
def reset_frame_range():
"""Set frame range to current asset"""
def reset_frame_range(playback=True, render=True, fps=True):
"""Set frame range to current asset
fps = convert_to_maya_fps(
float(legacy_io.Session.get("AVALON_FPS", 25))
)
set_scene_fps(fps)
Args:
playback (bool, Optional): Whether to set the maya timeline playback
frame range. Defaults to True.
render (bool, Optional): Whether to set the maya render frame range.
Defaults to True.
fps (bool, Optional): Whether to set scene FPS. Defaults to True.
"""
if fps:
fps = convert_to_maya_fps(
float(legacy_io.Session.get("AVALON_FPS", 25))
)
set_scene_fps(fps)
frame_range = get_frame_range()
frame_start = frame_range["frameStart"] - int(frame_range["handleStart"])
frame_end = frame_range["frameEnd"] + int(frame_range["handleEnd"])
cmds.playbackOptions(minTime=frame_start)
cmds.playbackOptions(maxTime=frame_end)
cmds.playbackOptions(animationStartTime=frame_start)
cmds.playbackOptions(animationEndTime=frame_end)
cmds.playbackOptions(minTime=frame_start)
cmds.playbackOptions(maxTime=frame_end)
cmds.currentTime(frame_start)
if playback:
cmds.playbackOptions(minTime=frame_start)
cmds.playbackOptions(maxTime=frame_end)
cmds.playbackOptions(animationStartTime=frame_start)
cmds.playbackOptions(animationEndTime=frame_end)
cmds.playbackOptions(minTime=frame_start)
cmds.playbackOptions(maxTime=frame_end)
cmds.currentTime(frame_start)
cmds.setAttr("defaultRenderGlobals.startFrame", frame_start)
cmds.setAttr("defaultRenderGlobals.endFrame", frame_end)
if render:
cmds.setAttr("defaultRenderGlobals.startFrame", frame_start)
cmds.setAttr("defaultRenderGlobals.endFrame", frame_end)
def reset_scene_resolution():

View file

@ -158,7 +158,7 @@ class RenderSettings(object):
cmds.setAttr(
"defaultArnoldDriver.mergeAOVs", multi_exr)
self._additional_attribs_setter(additional_options)
reset_frame_range()
reset_frame_range(playback=False, fps=False, render=True)
def _set_redshift_settings(self, width, height):
"""Sets settings for Redshift."""

View file

@ -12,6 +12,7 @@ from openpype.pipeline.workfile import BuildWorkfile
from openpype.tools.utils import host_tools
from openpype.hosts.maya.api import lib, lib_rendersettings
from .lib import get_main_window, IS_HEADLESS
from ..tools import show_look_assigner
from .workfile_template_builder import (
create_placeholder,
@ -111,12 +112,12 @@ def install():
)
cmds.menuItem(
"Reset Frame Range",
"Set Frame Range",
command=lambda *args: lib.reset_frame_range()
)
cmds.menuItem(
"Reset Resolution",
"Set Resolution",
command=lambda *args: lib.reset_scene_resolution()
)
@ -139,7 +140,7 @@ def install():
cmds.menuItem(
"Look assigner...",
command=lambda *args: host_tools.show_look_assigner(
command=lambda *args: show_look_assigner(
parent_widget
)
)

View file

@ -134,7 +134,7 @@ class ConnectGeometry(InventoryAction):
bool
"""
from Qt import QtWidgets
from qtpy import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:

View file

@ -149,7 +149,7 @@ class ConnectXgen(InventoryAction):
bool
"""
from Qt import QtWidgets
from qtpy import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:

View file

@ -3,7 +3,7 @@ import os
import maya.cmds as cmds
import xgenm
from Qt import QtWidgets
from qtpy import QtWidgets
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api.lib import (

View file

@ -23,6 +23,11 @@ class CollectReview(pyblish.api.InstancePlugin):
task = legacy_io.Session["AVALON_TASK"]
# Get panel.
instance.data["panel"] = cmds.playblast(
activeEditor=True
).split("|")[-1]
# get cameras
members = instance.data['setMembers']
cameras = cmds.ls(members, long=True,

View file

@ -118,7 +118,6 @@ class ExtractPlayblast(publish.Extractor):
# Need to explicitly enable some viewport changes so the viewport is
# refreshed ahead of playblasting.
panel = cmds.getPanel(withFocus=True)
keys = [
"useDefaultMaterial",
"wireframeOnShaded",
@ -129,10 +128,12 @@ class ExtractPlayblast(publish.Extractor):
viewport_defaults = {}
for key in keys:
viewport_defaults[key] = cmds.modelEditor(
panel, query=True, **{key: True}
instance.data["panel"], query=True, **{key: True}
)
if preset["viewport_options"][key]:
cmds.modelEditor(panel, edit=True, **{key: True})
cmds.modelEditor(
instance.data["panel"], edit=True, **{key: True}
)
override_viewport_options = (
capture_presets['Viewport Options']['override_viewport_options']
@ -147,12 +148,10 @@ class ExtractPlayblast(publish.Extractor):
# Update preset with current panel setting
# if override_viewport_options is turned off
panel = cmds.getPanel(withFocus=True) or ""
if not override_viewport_options and "modelPanel" in panel:
panel_preset = capture.parse_active_view()
if not override_viewport_options:
panel_preset = capture.parse_view(instance.data["panel"])
panel_preset.pop("camera")
preset.update(panel_preset)
cmds.setFocus(panel)
self.log.info(
"Using preset:\n{}".format(
@ -163,7 +162,10 @@ class ExtractPlayblast(publish.Extractor):
path = capture.capture(log=self.log, **preset)
# Restoring viewport options.
cmds.modelEditor(panel, edit=True, **viewport_defaults)
if viewport_defaults:
cmds.modelEditor(
instance.data["panel"], edit=True, **viewport_defaults
)
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom)

View file

@ -0,0 +1,27 @@
from openpype.tools.utils.host_tools import qt_app_context
class MayaToolsSingleton:
_look_assigner = None
def get_look_assigner_tool(parent):
"""Create, cache and return look assigner tool window."""
if MayaToolsSingleton._look_assigner is None:
from .mayalookassigner import MayaLookAssignerWindow
mayalookassigner_window = MayaLookAssignerWindow(parent)
MayaToolsSingleton._look_assigner = mayalookassigner_window
return MayaToolsSingleton._look_assigner
def show_look_assigner(parent=None):
"""Look manager is Maya specific tool for look management."""
with qt_app_context():
look_assigner_tool = get_look_assigner_tool(parent)
look_assigner_tool.show()
# Pull window to the front.
look_assigner_tool.raise_()
look_assigner_tool.activateWindow()
look_assigner_tool.showNormal()

View file

@ -69,7 +69,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
# "Set colorspace from presets", self
# )
# reset_resolution_btn = QtWidgets.QPushButton(
# "Reset Resolution from peresets", self
# "Set Resolution from presets", self
# )
layout = QtWidgets.QVBoxLayout(self)
@ -108,7 +108,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
libload_btn.clicked.connect(self.on_libload_clicked)
# rename_btn.clicked.connect(self.on_rename_clicked)
# set_colorspace_btn.clicked.connect(self.on_set_colorspace_clicked)
# reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked)
# reset_resolution_btn.clicked.connect(self.on_set_resolution_clicked)
experimental_btn.clicked.connect(self.on_experimental_clicked)
def on_workfile_clicked(self):
@ -145,8 +145,8 @@ class OpenPypeMenu(QtWidgets.QWidget):
def on_set_colorspace_clicked(self):
print("Clicked Set Colorspace")
def on_reset_resolution_clicked(self):
print("Clicked Reset Resolution")
def on_set_resolution_clicked(self):
print("Clicked Set Resolution")
def on_experimental_clicked(self):
host_tools.show_experimental_tools_dialog()

View file

@ -6,34 +6,22 @@ import datetime
import requests
from .constants import (
CLOCKIFY_ENDPOINT,
ADMIN_PERMISSION_NAMES
ADMIN_PERMISSION_NAMES,
)
from openpype.lib.local_settings import OpenPypeSecureRegistry
def time_check(obj):
if obj.request_counter < 10:
obj.request_counter += 1
return
wait_time = 1 - (time.time() - obj.request_time)
if wait_time > 0:
time.sleep(wait_time)
obj.request_time = time.time()
obj.request_counter = 0
from openpype.lib import Logger
class ClockifyAPI:
log = Logger.get_logger(__name__)
def __init__(self, api_key=None, master_parent=None):
self.workspace_name = None
self.workspace_id = None
self.master_parent = master_parent
self.api_key = api_key
self.request_counter = 0
self.request_time = time.time()
self._workspace_id = None
self._user_id = None
self._secure_registry = None
@property
@ -44,11 +32,19 @@ class ClockifyAPI:
@property
def headers(self):
return {"X-Api-Key": self.api_key}
return {"x-api-key": self.api_key}
@property
def workspace_id(self):
return self._workspace_id
@property
def user_id(self):
return self._user_id
def verify_api(self):
for key, value in self.headers.items():
if value is None or value.strip() == '':
if value is None or value.strip() == "":
return False
return True
@ -59,65 +55,55 @@ class ClockifyAPI:
if api_key is not None and self.validate_api_key(api_key) is True:
self.api_key = api_key
self.set_workspace()
self.set_user_id()
if self.master_parent:
self.master_parent.signed_in()
return True
return False
def validate_api_key(self, api_key):
test_headers = {'X-Api-Key': api_key}
action_url = 'workspaces/'
time_check(self)
test_headers = {"x-api-key": api_key}
action_url = "user"
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=test_headers
CLOCKIFY_ENDPOINT + action_url, headers=test_headers
)
if response.status_code != 200:
return False
return True
def validate_workspace_perm(self, workspace_id=None):
user_id = self.get_user_id()
def validate_workspace_permissions(self, workspace_id=None, user_id=None):
if user_id is None:
self.log.info("No user_id found during validation")
return False
if workspace_id is None:
workspace_id = self.workspace_id
action_url = "/workspaces/{}/users/{}/permissions".format(
workspace_id, user_id
)
time_check(self)
action_url = f"workspaces/{workspace_id}/users?includeRoles=1"
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
user_permissions = response.json()
for perm in user_permissions:
if perm['name'] in ADMIN_PERMISSION_NAMES:
data = response.json()
for user in data:
if user.get("id") == user_id:
roles_data = user.get("roles")
for entities in roles_data:
if entities.get("role") in ADMIN_PERMISSION_NAMES:
return True
return False
def get_user_id(self):
action_url = 'v1/user/'
time_check(self)
action_url = "user"
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
# this regex is neccessary: UNICODE strings are crashing
# during json serialization
id_regex = '\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
result = re.findall(id_regex, str(response.content))
if len(result) != 1:
# replace with log and better message?
print('User ID was not found (this is a BUG!!!)')
return None
return json.loads('{'+result[0]+'}')['id']
result = response.json()
user_id = result.get("id", None)
return user_id
def set_workspace(self, name=None):
if name is None:
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
name = os.environ.get("CLOCKIFY_WORKSPACE", None)
self.workspace_name = name
self.workspace_id = None
if self.workspace_name is None:
return
try:
@ -125,7 +111,7 @@ class ClockifyAPI:
except Exception:
result = False
if result is not False:
self.workspace_id = result
self._workspace_id = result
if self.master_parent is not None:
self.master_parent.start_timer_check()
return True
@ -139,6 +125,14 @@ class ClockifyAPI:
return all_workspaces[name]
return False
def set_user_id(self):
try:
user_id = self.get_user_id()
except Exception:
user_id = None
if user_id is not None:
self._user_id = user_id
def get_api_key(self):
return self.secure_registry.get_item("api_key", None)
@ -146,11 +140,9 @@ class ClockifyAPI:
self.secure_registry.set_item("api_key", api_key)
def get_workspaces(self):
action_url = 'workspaces/'
time_check(self)
action_url = "workspaces/"
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return {
workspace["name"]: workspace["id"] for workspace in response.json()
@ -159,27 +151,22 @@ class ClockifyAPI:
def get_projects(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id)
time_check(self)
action_url = f"workspaces/{workspace_id}/projects"
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return {
project["name"]: project["id"] for project in response.json()
}
if response.status_code != 403:
result = response.json()
return {project["name"]: project["id"] for project in result}
def get_project_by_id(self, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/'.format(
action_url = "workspaces/{}/projects/{}".format(
workspace_id, project_id
)
time_check(self)
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return response.json()
@ -187,32 +174,24 @@ class ClockifyAPI:
def get_tags(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags/'.format(workspace_id)
time_check(self)
action_url = "workspaces/{}/tags".format(workspace_id)
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return {
tag["name"]: tag["id"] for tag in response.json()
}
return {tag["name"]: tag["id"] for tag in response.json()}
def get_tasks(self, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
action_url = "workspaces/{}/projects/{}/tasks".format(
workspace_id, project_id
)
time_check(self)
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return {
task["name"]: task["id"] for task in response.json()
}
return {task["name"]: task["id"] for task in response.json()}
def get_workspace_id(self, workspace_name):
all_workspaces = self.get_workspaces()
@ -236,48 +215,64 @@ class ClockifyAPI:
return None
return all_tasks[tag_name]
def get_task_id(
self, task_name, project_id, workspace_id=None
):
def get_task_id(self, task_name, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
all_tasks = self.get_tasks(
project_id, workspace_id
)
all_tasks = self.get_tasks(project_id, workspace_id)
if task_name not in all_tasks:
return None
return all_tasks[task_name]
def get_current_time(self):
return str(datetime.datetime.utcnow().isoformat())+'Z'
return str(datetime.datetime.utcnow().isoformat()) + "Z"
def start_time_entry(
self, description, project_id, task_id=None, tag_ids=[],
workspace_id=None, billable=True
self,
description,
project_id,
task_id=None,
tag_ids=None,
workspace_id=None,
user_id=None,
billable=True,
):
# Workspace
if workspace_id is None:
workspace_id = self.workspace_id
# User ID
if user_id is None:
user_id = self._user_id
# get running timer to check if we need to start it
current_timer = self.get_in_progress()
# Check if is currently run another times and has same values
current = self.get_in_progress(workspace_id)
if current is not None:
# DO not restart the timer, if it is already running for curent task
if current_timer:
current_timer_hierarchy = current_timer.get("description")
current_project_id = current_timer.get("projectId")
current_task_id = current_timer.get("taskId")
if (
current.get("description", None) == description and
current.get("projectId", None) == project_id and
current.get("taskId", None) == task_id
description == current_timer_hierarchy
and project_id == current_project_id
and task_id == current_task_id
):
self.log.info(
"Timer for the current project is already running"
)
self.bool_timer_run = True
return self.bool_timer_run
self.finish_time_entry(workspace_id)
self.finish_time_entry()
# Convert billable to strings
if billable:
billable = 'true'
billable = "true"
else:
billable = 'false'
billable = "false"
# Rest API Action
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
action_url = "workspaces/{}/user/{}/time-entries".format(
workspace_id, user_id
)
start = self.get_current_time()
body = {
"start": start,
@ -285,169 +280,135 @@ class ClockifyAPI:
"description": description,
"projectId": project_id,
"taskId": task_id,
"tagIds": tag_ids
"tagIds": tag_ids,
}
time_check(self)
response = requests.post(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
success = False
if response.status_code < 300:
success = True
return success
return True
return False
def get_in_progress(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/inProgress'.format(
workspace_id
)
time_check(self)
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
)
def _get_current_timer_values(self, response):
if response is None:
return
try:
output = response.json()
except json.decoder.JSONDecodeError:
output = None
return output
return None
if output and isinstance(output, list):
return output[0]
return None
def finish_time_entry(self, workspace_id=None):
def get_in_progress(self, user_id=None, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
current = self.get_in_progress(workspace_id)
if current is None:
return
if user_id is None:
user_id = self.user_id
current_id = current["id"]
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, current_id
action_url = (
f"workspaces/{workspace_id}/user/"
f"{user_id}/time-entries?in-progress=1"
)
body = {
"start": current["timeInterval"]["start"],
"billable": current["billable"],
"description": current["description"],
"projectId": current["projectId"],
"taskId": current["taskId"],
"tagIds": current["tagIds"],
"end": self.get_current_time()
}
time_check(self)
response = requests.put(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
response = requests.get(
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return self._get_current_timer_values(response)
def finish_time_entry(self, workspace_id=None, user_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
if user_id is None:
user_id = self.user_id
current_timer = self.get_in_progress()
if not current_timer:
return
action_url = "workspaces/{}/user/{}/time-entries".format(
workspace_id, user_id
)
body = {"end": self.get_current_time()}
response = requests.patch(
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
return response.json()
def get_time_entries(
self, workspace_id=None, quantity=10
):
def get_time_entries(self, workspace_id=None, user_id=None, quantity=10):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
time_check(self)
if user_id is None:
user_id = self.user_id
action_url = "workspaces/{}/user/{}/time-entries".format(
workspace_id, user_id
)
response = requests.get(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return response.json()[:quantity]
def remove_time_entry(self, tid, workspace_id=None):
def remove_time_entry(self, tid, workspace_id=None, user_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, tid
action_url = "workspaces/{}/user/{}/time-entries/{}".format(
workspace_id, user_id, tid
)
time_check(self)
response = requests.delete(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
)
return response.json()
def add_project(self, name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id)
action_url = "workspaces/{}/projects".format(workspace_id)
body = {
"name": name,
"clientId": "",
"isPublic": "false",
"estimate": {
"estimate": 0,
"type": "AUTO"
},
"estimate": {"estimate": 0, "type": "AUTO"},
"color": "#f44336",
"billable": "true"
"billable": "true",
}
time_check(self)
response = requests.post(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
return response.json()
def add_workspace(self, name):
action_url = 'workspaces/'
action_url = "workspaces/"
body = {"name": name}
time_check(self)
response = requests.post(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
return response.json()
def add_task(
self, name, project_id, workspace_id=None
):
def add_task(self, name, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
action_url = "workspaces/{}/projects/{}/tasks".format(
workspace_id, project_id
)
body = {
"name": name,
"projectId": project_id
}
time_check(self)
body = {"name": name, "projectId": project_id}
response = requests.post(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
return response.json()
def add_tag(self, name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags'.format(workspace_id)
body = {
"name": name
}
time_check(self)
action_url = "workspaces/{}/tags".format(workspace_id)
body = {"name": name}
response = requests.post(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
json=body
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
)
return response.json()
def delete_project(
self, project_id, workspace_id=None
):
def delete_project(self, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = '/workspaces/{}/projects/{}'.format(
action_url = "/workspaces/{}/projects/{}".format(
workspace_id, project_id
)
time_check(self)
response = requests.delete(
CLOCKIFY_ENDPOINT + action_url,
headers=self.headers,
@ -455,12 +416,12 @@ class ClockifyAPI:
return response.json()
def convert_input(
self, entity_id, entity_name, mode='Workspace', project_id=None
self, entity_id, entity_name, mode="Workspace", project_id=None
):
if entity_id is None:
error = False
error_msg = 'Missing information "{}"'
if mode.lower() == 'workspace':
if mode.lower() == "workspace":
if entity_id is None and entity_name is None:
if self.workspace_id is not None:
entity_id = self.workspace_id
@ -471,14 +432,14 @@ class ClockifyAPI:
else:
if entity_id is None and entity_name is None:
error = True
elif mode.lower() == 'project':
elif mode.lower() == "project":
entity_id = self.get_project_id(entity_name)
elif mode.lower() == 'task':
elif mode.lower() == "task":
entity_id = self.get_task_id(
task_name=entity_name, project_id=project_id
)
else:
raise TypeError('Unknown type')
raise TypeError("Unknown type")
# Raise error
if error:
raise ValueError(error_msg.format(mode))

View file

@ -2,24 +2,13 @@ import os
import threading
import time
from openpype.modules import (
OpenPypeModule,
ITrayModule,
IPluginPaths
)
from openpype.modules import OpenPypeModule, ITrayModule, IPluginPaths
from openpype.client import get_asset_by_name
from .clockify_api import ClockifyAPI
from .constants import (
CLOCKIFY_FTRACK_USER_PATH,
CLOCKIFY_FTRACK_SERVER_PATH
)
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
class ClockifyModule(
OpenPypeModule,
ITrayModule,
IPluginPaths
):
class ClockifyModule(OpenPypeModule, ITrayModule, IPluginPaths):
name = "clockify"
def initialize(self, modules_settings):
@ -33,18 +22,23 @@ class ClockifyModule(
self.timer_manager = None
self.MessageWidgetClass = None
self.message_widget = None
self.clockapi = ClockifyAPI(master_parent=self)
self._clockify_api = None
# TimersManager attributes
# - set `timers_manager_connector` only in `tray_init`
self.timers_manager_connector = None
self._timers_manager_module = None
@property
def clockify_api(self):
if self._clockify_api is None:
from .clockify_api import ClockifyAPI
self._clockify_api = ClockifyAPI(master_parent=self)
return self._clockify_api
def get_global_environments(self):
return {
"CLOCKIFY_WORKSPACE": self.workspace_name
}
return {"CLOCKIFY_WORKSPACE": self.workspace_name}
def tray_init(self):
from .widgets import ClockifySettings, MessageWidget
@ -52,7 +46,7 @@ class ClockifyModule(
self.MessageWidgetClass = MessageWidget
self.message_widget = None
self.widget_settings = ClockifySettings(self.clockapi)
self.widget_settings = ClockifySettings(self.clockify_api)
self.widget_settings_required = None
self.thread_timer_check = None
@ -61,7 +55,7 @@ class ClockifyModule(
self.bool_api_key_set = False
self.bool_workspace_set = False
self.bool_timer_run = False
self.bool_api_key_set = self.clockapi.set_api()
self.bool_api_key_set = self.clockify_api.set_api()
# Define itself as TimersManager connector
self.timers_manager_connector = self
@ -71,12 +65,11 @@ class ClockifyModule(
self.show_settings()
return
self.bool_workspace_set = self.clockapi.workspace_id is not None
self.bool_workspace_set = self.clockify_api.workspace_id is not None
if self.bool_workspace_set is False:
return
self.start_timer_check()
self.set_menu_visibility()
def tray_exit(self, *_a, **_kw):
@ -85,23 +78,19 @@ class ClockifyModule(
def get_plugin_paths(self):
"""Implementaton of IPluginPaths to get plugin paths."""
actions_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"launcher_actions"
os.path.dirname(os.path.abspath(__file__)), "launcher_actions"
)
return {
"actions": [actions_path]
}
return {"actions": [actions_path]}
def get_ftrack_event_handler_paths(self):
"""Function for Ftrack module to add ftrack event handler paths."""
return {
"user": [CLOCKIFY_FTRACK_USER_PATH],
"server": [CLOCKIFY_FTRACK_SERVER_PATH]
"server": [CLOCKIFY_FTRACK_SERVER_PATH],
}
def clockify_timer_stopped(self):
self.bool_timer_run = False
# Call `ITimersManager` method
self.timer_stopped()
def start_timer_check(self):
@ -122,45 +111,44 @@ class ClockifyModule(
def check_running(self):
while self.bool_thread_check_running is True:
bool_timer_run = False
if self.clockapi.get_in_progress() is not None:
if self.clockify_api.get_in_progress() is not None:
bool_timer_run = True
if self.bool_timer_run != bool_timer_run:
if self.bool_timer_run is True:
self.clockify_timer_stopped()
elif self.bool_timer_run is False:
actual_timer = self.clockapi.get_in_progress()
if not actual_timer:
current_timer = self.clockify_api.get_in_progress()
if current_timer is None:
continue
current_proj_id = current_timer.get("projectId")
if not current_proj_id:
continue
actual_proj_id = actual_timer["projectId"]
if not actual_proj_id:
continue
project = self.clockapi.get_project_by_id(actual_proj_id)
project = self.clockify_api.get_project_by_id(
current_proj_id
)
if project and project.get("code") == 501:
continue
project_name = project["name"]
project_name = project.get("name")
actual_timer_hierarchy = actual_timer["description"]
hierarchy_items = actual_timer_hierarchy.split("/")
current_timer_hierarchy = current_timer.get("description")
if not current_timer_hierarchy:
continue
hierarchy_items = current_timer_hierarchy.split("/")
# Each pype timer must have at least 2 items!
if len(hierarchy_items) < 2:
continue
task_name = hierarchy_items[-1]
hierarchy = hierarchy_items[:-1]
task_type = None
if len(actual_timer.get("tags", [])) > 0:
task_type = actual_timer["tags"][0].get("name")
data = {
"task_name": task_name,
"hierarchy": hierarchy,
"project_name": project_name,
"task_type": task_type
}
# Call `ITimersManager` method
self.timer_started(data)
self.bool_timer_run = bool_timer_run
@ -184,6 +172,7 @@ class ClockifyModule(
def tray_menu(self, parent_menu):
# Menu for Tray App
from qtpy import QtWidgets
menu = QtWidgets.QMenu("Clockify", parent_menu)
menu.setProperty("submenu", "on")
@ -204,7 +193,9 @@ class ClockifyModule(
parent_menu.addMenu(menu)
def show_settings(self):
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
self.widget_settings.input_api_key.setText(
self.clockify_api.get_api_key()
)
self.widget_settings.show()
def set_menu_visibility(self):
@ -218,72 +209,82 @@ class ClockifyModule(
def timer_started(self, data):
"""Tell TimersManager that timer started."""
if self._timers_manager_module is not None:
self._timers_manager_module.timer_started(self._module.id, data)
self._timers_manager_module.timer_started(self.id, data)
def timer_stopped(self):
"""Tell TimersManager that timer stopped."""
if self._timers_manager_module is not None:
self._timers_manager_module.timer_stopped(self._module.id)
self._timers_manager_module.timer_stopped(self.id)
def stop_timer(self):
"""Called from TimersManager to stop timer."""
self.clockapi.finish_time_entry()
self.clockify_api.finish_time_entry()
def start_timer(self, input_data):
"""Called from TimersManager to start timer."""
# If not api key is not entered then skip
if not self.clockapi.get_api_key():
return
actual_timer = self.clockapi.get_in_progress()
actual_timer_hierarchy = None
actual_project_id = None
if actual_timer is not None:
actual_timer_hierarchy = actual_timer.get("description")
actual_project_id = actual_timer.get("projectId")
# Concatenate hierarchy and task to get description
desc_items = [val for val in input_data.get("hierarchy", [])]
desc_items.append(input_data["task_name"])
description = "/".join(desc_items)
# Check project existence
project_name = input_data["project_name"]
project_id = self.clockapi.get_project_id(project_name)
def _verify_project_exists(self, project_name):
project_id = self.clockify_api.get_project_id(project_name)
if not project_id:
self.log.warning((
"Project \"{}\" was not found in Clockify. Timer won't start."
).format(project_name))
self.log.warning(
'Project "{}" was not found in Clockify. Timer won\'t start.'
).format(project_name)
if not self.MessageWidgetClass:
return
msg = (
"Project <b>\"{}\"</b> is not"
" in Clockify Workspace <b>\"{}\"</b>."
'Project <b>"{}"</b> is not'
' in Clockify Workspace <b>"{}"</b>.'
"<br><br>Please inform your Project Manager."
).format(project_name, str(self.clockapi.workspace_name))
).format(project_name, str(self.clockify_api.workspace_name))
self.message_widget = self.MessageWidgetClass(
msg, "Clockify - Info Message"
)
self.message_widget.closed.connect(self.on_message_widget_close)
self.message_widget.show()
return False
return project_id
def start_timer(self, input_data):
"""Called from TimersManager to start timer."""
# If not api key is not entered then skip
if not self.clockify_api.get_api_key():
return
if (
actual_timer is not None and
description == actual_timer_hierarchy and
project_id == actual_project_id
):
task_name = input_data.get("task_name")
# Concatenate hierarchy and task to get description
description_items = list(input_data.get("hierarchy", []))
description_items.append(task_name)
description = "/".join(description_items)
# Check project existence
project_name = input_data.get("project_name")
project_id = self._verify_project_exists(project_name)
if not project_id:
return
# Setup timer tags
tag_ids = []
task_tag_id = self.clockapi.get_tag_id(input_data["task_type"])
tag_name = input_data.get("task_type")
if not tag_name:
# no task_type found in the input data
# if the timer is restarted by idle time (bug?)
asset_name = input_data["hierarchy"][-1]
asset_doc = get_asset_by_name(project_name, asset_name)
task_info = asset_doc["data"]["tasks"][task_name]
tag_name = task_info.get("type", "")
if not tag_name:
self.log.info("No tag information found for the timer")
task_tag_id = self.clockify_api.get_tag_id(tag_name)
if task_tag_id is not None:
tag_ids.append(task_tag_id)
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
# Start timer
self.clockify_api.start_time_entry(
description,
project_id,
tag_ids=tag_ids,
workspace_id=self.clockify_api.workspace_id,
user_id=self.clockify_api.user_id,
)

View file

@ -9,4 +9,4 @@ CLOCKIFY_FTRACK_USER_PATH = os.path.join(
)
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/"
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/v1/"

View file

@ -4,7 +4,7 @@ from openpype_modules.ftrack.lib import ServerAction
from openpype_modules.clockify.clockify_api import ClockifyAPI
class SyncClocifyServer(ServerAction):
class SyncClockifyServer(ServerAction):
'''Synchronise project names and task types.'''
identifier = "clockify.sync.server"
@ -14,12 +14,12 @@ class SyncClocifyServer(ServerAction):
role_list = ["Pypeclub", "Administrator", "project Manager"]
def __init__(self, *args, **kwargs):
super(SyncClocifyServer, self).__init__(*args, **kwargs)
super(SyncClockifyServer, self).__init__(*args, **kwargs)
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
api_key = os.environ.get("CLOCKIFY_API_KEY")
self.clockapi = ClockifyAPI(api_key)
self.clockapi.set_workspace(workspace_name)
self.clockify_api = ClockifyAPI(api_key)
self.clockify_api.set_workspace(workspace_name)
if api_key is None:
modified_key = "None"
else:
@ -48,13 +48,16 @@ class SyncClocifyServer(ServerAction):
return True
def launch(self, session, entities, event):
if self.clockapi.workspace_id is None:
self.clockify_api.set_api()
if self.clockify_api.workspace_id is None:
return {
"success": False,
"message": "Clockify Workspace or API key are not set!"
}
if self.clockapi.validate_workspace_perm() is False:
if not self.clockify_api.validate_workspace_permissions(
self.clockify_api.workspace_id, self.clockify_api.user_id
):
return {
"success": False,
"message": "Missing permissions for this action!"
@ -88,9 +91,9 @@ class SyncClocifyServer(ServerAction):
task_type["name"] for task_type in task_types
]
try:
clockify_projects = self.clockapi.get_projects()
clockify_projects = self.clockify_api.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
response = self.clockify_api.add_project(project_name)
if "id" not in response:
self.log.warning(
"Project \"{}\" can't be created. Response: {}".format(
@ -105,7 +108,7 @@ class SyncClocifyServer(ServerAction):
).format(project_name)
}
clockify_workspace_tags = self.clockapi.get_tags()
clockify_workspace_tags = self.clockify_api.get_tags()
for task_type_name in task_type_names:
if task_type_name in clockify_workspace_tags:
self.log.debug(
@ -113,7 +116,7 @@ class SyncClocifyServer(ServerAction):
)
continue
response = self.clockapi.add_tag(task_type_name)
response = self.clockify_api.add_tag(task_type_name)
if "id" not in response:
self.log.warning(
"Task \"{}\" can't be created. Response: {}".format(
@ -138,4 +141,4 @@ class SyncClocifyServer(ServerAction):
def register(session, **kw):
SyncClocifyServer(session).register()
SyncClockifyServer(session).register()

View file

@ -3,7 +3,7 @@ from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.clockify.clockify_api import ClockifyAPI
class SyncClocifyLocal(BaseAction):
class SyncClockifyLocal(BaseAction):
'''Synchronise project names and task types.'''
#: Action identifier.
@ -18,9 +18,9 @@ class SyncClocifyLocal(BaseAction):
icon = statics_icon("app_icons", "clockify-white.png")
def __init__(self, *args, **kwargs):
super(SyncClocifyLocal, self).__init__(*args, **kwargs)
super(SyncClockifyLocal, self).__init__(*args, **kwargs)
#: CLockifyApi
self.clockapi = ClockifyAPI()
self.clockify_api = ClockifyAPI()
def discover(self, session, entities, event):
if (
@ -31,14 +31,18 @@ class SyncClocifyLocal(BaseAction):
return False
def launch(self, session, entities, event):
self.clockapi.set_api()
if self.clockapi.workspace_id is None:
self.clockify_api.set_api()
if self.clockify_api.workspace_id is None:
return {
"success": False,
"message": "Clockify Workspace or API key are not set!"
}
if self.clockapi.validate_workspace_perm() is False:
if (
self.clockify_api.validate_workspace_permissions(
self.clockify_api.workspace_id, self.clockify_api.user_id)
is False
):
return {
"success": False,
"message": "Missing permissions for this action!"
@ -74,9 +78,9 @@ class SyncClocifyLocal(BaseAction):
task_type["name"] for task_type in task_types
]
try:
clockify_projects = self.clockapi.get_projects()
clockify_projects = self.clockify_api.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
response = self.clockify_api.add_project(project_name)
if "id" not in response:
self.log.warning(
"Project \"{}\" can't be created. Response: {}".format(
@ -91,7 +95,7 @@ class SyncClocifyLocal(BaseAction):
).format(project_name)
}
clockify_workspace_tags = self.clockapi.get_tags()
clockify_workspace_tags = self.clockify_api.get_tags()
for task_type_name in task_type_names:
if task_type_name in clockify_workspace_tags:
self.log.debug(
@ -99,7 +103,7 @@ class SyncClocifyLocal(BaseAction):
)
continue
response = self.clockapi.add_tag(task_type_name)
response = self.clockify_api.add_tag(task_type_name)
if "id" not in response:
self.log.warning(
"Task \"{}\" can't be created. Response: {}".format(
@ -121,4 +125,4 @@ class SyncClocifyLocal(BaseAction):
def register(session, **kw):
SyncClocifyLocal(session).register()
SyncClockifyLocal(session).register()

View file

@ -6,9 +6,9 @@ from openpype_modules.clockify.clockify_api import ClockifyAPI
class ClockifyStart(LauncherAction):
name = "clockify_start_timer"
label = "Clockify - Start Timer"
icon = "clockify_icon"
icon = "app_icons/clockify.png"
order = 500
clockapi = ClockifyAPI()
clockify_api = ClockifyAPI()
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
@ -17,23 +17,39 @@ class ClockifyStart(LauncherAction):
return False
def process(self, session, **kwargs):
self.clockify_api.set_api()
user_id = self.clockify_api.user_id
workspace_id = self.clockify_api.workspace_id
project_name = session["AVALON_PROJECT"]
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
description = asset_name
asset_doc = get_asset_by_name(
project_name, asset_name, fields=["data.parents"]
)
if asset_doc is not None:
desc_items = asset_doc.get("data", {}).get("parents", [])
desc_items.append(asset_name)
desc_items.append(task_name)
description = "/".join(desc_items)
project_id = self.clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(self.clockapi.get_tag_id(task_name))
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
# fetch asset docs
asset_doc = get_asset_by_name(project_name, asset_name)
# get task type to fill the timer tag
task_info = asset_doc["data"]["tasks"][task_name]
task_type = task_info["type"]
# check if the task has hierarchy and fill the
parents_data = asset_doc["data"]
if parents_data is not None:
description_items = parents_data.get("parents", [])
description_items.append(asset_name)
description_items.append(task_name)
description = "/".join(description_items)
project_id = self.clockify_api.get_project_id(
project_name, workspace_id
)
tag_ids = []
tag_name = task_type
tag_ids.append(self.clockify_api.get_tag_id(tag_name, workspace_id))
self.clockify_api.start_time_entry(
description,
project_id,
tag_ids=tag_ids,
workspace_id=workspace_id,
user_id=user_id,
)

View file

@ -3,20 +3,39 @@ from openpype_modules.clockify.clockify_api import ClockifyAPI
from openpype.pipeline import LauncherAction
class ClockifySync(LauncherAction):
class ClockifyPermissionsCheckFailed(Exception):
"""Timer start failed due to user permissions check.
Message should be self explanatory as traceback won't be shown.
"""
pass
class ClockifySync(LauncherAction):
name = "sync_to_clockify"
label = "Sync to Clockify"
icon = "clockify_white_icon"
icon = "app_icons/clockify-white.png"
order = 500
clockapi = ClockifyAPI()
have_permissions = clockapi.validate_workspace_perm()
clockify_api = ClockifyAPI()
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
return self.have_permissions
"""Check if there's some projects to sync"""
try:
next(get_projects())
return True
except StopIteration:
return False
def process(self, session, **kwargs):
self.clockify_api.set_api()
workspace_id = self.clockify_api.workspace_id
user_id = self.clockify_api.user_id
if not self.clockify_api.validate_workspace_permissions(
workspace_id, user_id
):
raise ClockifyPermissionsCheckFailed(
"Current CLockify user is missing permissions for this action!"
)
project_name = session.get("AVALON_PROJECT") or ""
projects_to_sync = []
@ -30,24 +49,28 @@ class ClockifySync(LauncherAction):
task_types = project["config"]["tasks"].keys()
projects_info[project["name"]] = task_types
clockify_projects = self.clockapi.get_projects()
clockify_projects = self.clockify_api.get_projects(workspace_id)
for project_name, task_types in projects_info.items():
if project_name in clockify_projects:
continue
response = self.clockapi.add_project(project_name)
response = self.clockify_api.add_project(
project_name, workspace_id
)
if "id" not in response:
self.log.error("Project {} can't be created".format(
project_name
))
self.log.error(
"Project {} can't be created".format(project_name)
)
continue
clockify_workspace_tags = self.clockapi.get_tags()
clockify_workspace_tags = self.clockify_api.get_tags(workspace_id)
for task_type in task_types:
if task_type not in clockify_workspace_tags:
response = self.clockapi.add_tag(task_type)
response = self.clockify_api.add_tag(
task_type, workspace_id
)
if "id" not in response:
self.log.error('Task {} can\'t be created'.format(
task_type
))
self.log.error(
"Task {} can't be created".format(task_type)
)
continue

View file

@ -77,15 +77,15 @@ class MessageWidget(QtWidgets.QWidget):
class ClockifySettings(QtWidgets.QWidget):
SIZE_W = 300
SIZE_W = 500
SIZE_H = 130
loginSignal = QtCore.Signal(object, object, object)
def __init__(self, clockapi, optional=True):
def __init__(self, clockify_api, optional=True):
super(ClockifySettings, self).__init__()
self.clockapi = clockapi
self.clockify_api = clockify_api
self.optional = optional
self.validated = False
@ -162,17 +162,17 @@ class ClockifySettings(QtWidgets.QWidget):
def click_ok(self):
api_key = self.input_api_key.text().strip()
if self.optional is True and api_key == '':
self.clockapi.save_api_key(None)
self.clockapi.set_api(api_key)
self.clockify_api.save_api_key(None)
self.clockify_api.set_api(api_key)
self.validated = False
self._close_widget()
return
validation = self.clockapi.validate_api_key(api_key)
validation = self.clockify_api.validate_api_key(api_key)
if validation:
self.clockapi.save_api_key(api_key)
self.clockapi.set_api(api_key)
self.clockify_api.save_api_key(api_key)
self.clockify_api.set_api(api_key)
self.validated = True
self._close_widget()
else:

View file

@ -362,11 +362,11 @@ def inject_openpype_environment(deadlinePlugin):
args_str = subprocess.list2cmdline(args)
print(">>> Executing: {} {}".format(exe, args_str))
process = ProcessUtils.SpawnProcess(
exe, args_str, os.path.dirname(exe)
process_exitcode = deadlinePlugin.RunProcess(
exe, args_str, os.path.dirname(exe), -1
)
ProcessUtils.WaitForExit(process, -1)
if process.ExitCode != 0:
if process_exitcode != 0:
raise RuntimeError(
"Failed to run OpenPype process to extract environments."
)

View file

@ -7,23 +7,22 @@ Provides:
"""
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.lib import filter_profiles
class CollectFtrackFamily(pyblish.api.InstancePlugin):
"""Adds explicitly 'ftrack' to families to upload instance to FTrack.
Uses selection by combination of hosts/families/tasks names via
profiles resolution.
Triggered everywhere, checks instance against configured.
Checks advanced filtering which works on 'families' not on main
'family', as some variants dynamically resolves addition of ftrack
based on 'families' (editorial drives it by presence of 'review')
"""
Adds explicitly 'ftrack' to families to upload instance to FTrack.
Uses selection by combination of hosts/families/tasks names via
profiles resolution.
Triggered everywhere, checks instance against configured.
Checks advanced filtering which works on 'families' not on main
'family', as some variants dynamically resolves addition of ftrack
based on 'families' (editorial drives it by presence of 'review')
"""
label = "Collect Ftrack Family"
order = pyblish.api.CollectorOrder + 0.4990
@ -34,68 +33,64 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin):
self.log.warning("No profiles present for adding Ftrack family")
return
add_ftrack_family = False
task_name = instance.data.get("task",
legacy_io.Session["AVALON_TASK"])
host_name = legacy_io.Session["AVALON_APP"]
host_name = instance.context.data["hostName"]
family = instance.data["family"]
task_name = instance.data.get("task")
filtering_criteria = {
"hosts": host_name,
"families": family,
"tasks": task_name
}
profile = filter_profiles(self.profiles, filtering_criteria,
logger=self.log)
profile = filter_profiles(
self.profiles,
filtering_criteria,
logger=self.log
)
add_ftrack_family = False
families = instance.data.setdefault("families", [])
if profile:
families = instance.data.get("families")
add_ftrack_family = profile["add_ftrack_family"]
additional_filters = profile.get("advanced_filtering")
if additional_filters:
self.log.info("'{}' families used for additional filtering".
format(families))
families_set = set(families) | {family}
self.log.info(
"'{}' families used for additional filtering".format(
families_set))
add_ftrack_family = self._get_add_ftrack_f_from_addit_filters(
additional_filters,
families,
families_set,
add_ftrack_family
)
if add_ftrack_family:
self.log.debug("Adding ftrack family for '{}'".
format(instance.data.get("family")))
result_str = "Not adding"
if add_ftrack_family:
result_str = "Adding"
if "ftrack" not in families:
families.append("ftrack")
if families:
if "ftrack" not in families:
instance.data["families"].append("ftrack")
else:
instance.data["families"] = ["ftrack"]
result_str = "Adding"
if not add_ftrack_family:
result_str = "Not adding"
self.log.info("{} 'ftrack' family for instance with '{}'".format(
result_str, family
))
def _get_add_ftrack_f_from_addit_filters(self,
additional_filters,
families,
add_ftrack_family):
"""
Compares additional filters - working on instance's families.
def _get_add_ftrack_f_from_addit_filters(
self, additional_filters, families, add_ftrack_family
):
"""Compares additional filters - working on instance's families.
Triggered for more detailed filtering when main family matches,
but content of 'families' actually matter.
(For example 'review' in 'families' should result in adding to
Ftrack)
Triggered for more detailed filtering when main family matches,
but content of 'families' actually matter.
(For example 'review' in 'families' should result in adding to
Ftrack)
Args:
additional_filters (dict) - from Setting
families (list) - subfamilies
add_ftrack_family (bool) - add ftrack to families if True
Args:
additional_filters (dict) - from Setting
families (set[str]) - subfamilies
add_ftrack_family (bool) - add ftrack to families if True
"""
override_filter = None
override_filter_value = -1
for additional_filter in additional_filters:

View file

@ -141,7 +141,9 @@ class TimersManager(
signal_handler = SignalHandler(self)
idle_manager = IdleManager()
widget_user_idle = WidgetUserIdle(self)
widget_user_idle.set_countdown_start(self.time_show_message)
widget_user_idle.set_countdown_start(
self.time_stop_timer - self.time_show_message
)
idle_manager.signal_reset_timer.connect(
widget_user_idle.reset_countdown

View file

@ -350,7 +350,7 @@ How output of the schema could look like on save:
- number input, can be used for both integer and float
- key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`)
- key `"minimum"` as minimum allowed number to enter (Default: `-99999`)
- key `"maxium"` as maximum allowed number to enter (Default: `99999`)
- key `"maximum"` as maximum allowed number to enter (Default: `99999`)
- key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll)
- for UI it is possible to show slider to enable this option set `show_slider` to `true`
```

View file

@ -38,7 +38,6 @@ class HostToolsHelper:
self._subset_manager_tool = None
self._scene_inventory_tool = None
self._library_loader_tool = None
self._look_assigner_tool = None
self._experimental_tools_dialog = None
@property
@ -219,27 +218,6 @@ class HostToolsHelper:
raise ImportError("No Pyblish GUI found")
def get_look_assigner_tool(self, parent):
"""Create, cache and return look assigner tool window."""
if self._look_assigner_tool is None:
from openpype.tools.mayalookassigner import MayaLookAssignerWindow
mayalookassigner_window = MayaLookAssignerWindow(parent)
self._look_assigner_tool = mayalookassigner_window
return self._look_assigner_tool
def show_look_assigner(self, parent=None):
"""Look manager is Maya specific tool for look management."""
with qt_app_context():
look_assigner_tool = self.get_look_assigner_tool(parent)
look_assigner_tool.show()
# Pull window to the front.
look_assigner_tool.raise_()
look_assigner_tool.activateWindow()
look_assigner_tool.showNormal()
def get_experimental_tools_dialog(self, parent=None):
"""Dialog of experimental tools.
@ -315,9 +293,6 @@ class HostToolsHelper:
elif tool_name == "sceneinventory":
return self.get_scene_inventory_tool(parent, *args, **kwargs)
elif tool_name == "lookassigner":
return self.get_look_assigner_tool(parent, *args, **kwargs)
elif tool_name == "publish":
self.log.info("Can't return publish tool window.")
@ -356,9 +331,6 @@ class HostToolsHelper:
elif tool_name == "sceneinventory":
self.show_scene_inventory(parent, *args, **kwargs)
elif tool_name == "lookassigner":
self.show_look_assigner(parent, *args, **kwargs)
elif tool_name == "publish":
self.show_publish(parent, *args, **kwargs)
@ -436,10 +408,6 @@ def show_scene_inventory(parent=None):
_SingletonPoint.show_tool_by_name("sceneinventory", parent)
def show_look_assigner(parent=None):
_SingletonPoint.show_tool_by_name("lookassigner", parent)
def show_publish(parent=None):
_SingletonPoint.show_tool_by_name("publish", parent)

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.2"
__version__ = "3.15.3-nightly.1"

View file

@ -135,7 +135,7 @@ class TestPipelinePublishPlugins(TestPipeline):
}
# load plugin function for testing
plugin = publish_plugins.ExtractorColormanaged()
plugin = publish_plugins.ColormanagedPyblishPluginMixin()
plugin.log = log
config_data, file_rules = plugin.get_colorspace_settings(context)
@ -175,14 +175,14 @@ class TestPipelinePublishPlugins(TestPipeline):
}
# load plugin function for testing
plugin = publish_plugins.ExtractorColormanaged()
plugin = publish_plugins.ColormanagedPyblishPluginMixin()
plugin.log = log
plugin.set_representation_colorspace(
representation_nuke, context,
colorspace_settings=(config_data_nuke, file_rules_nuke)
)
# load plugin function for testing
plugin = publish_plugins.ExtractorColormanaged()
plugin = publish_plugins.ColormanagedPyblishPluginMixin()
plugin.log = log
plugin.set_representation_colorspace(
representation_hiero, context,

View file

@ -27,4 +27,4 @@ import TabItem from '@theme/TabItem';
- for more details on how to use it go [here](admin_use#check-for-mongodb-database-connection)
## OPENPYPE_USERNAME
- if set it overides system created username
- if set it overrides system created username

View file

@ -142,7 +142,7 @@ Fill in the necessary fields (the optional fields are regex filters)
![new place holder](assets/maya-placeholder_new.png)
- Builder type: Wether the the placeholder should load current asset representations or linked assets representations
- Builder type: Whether the the placeholder should load current asset representations or linked assets representations
- Representation: Representation that will be loaded (ex: ma, abc, png, etc...)

View file

@ -7,12 +7,15 @@ sidebar_label: Working with settings
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
OpenPype stores all of it's settings and configuration in the mongo database. To make the configuration as easy as possible we provide a robust GUI where you can access and change everything that is configurable
OpenPype stores all of its settings and configuration in the mongo database. To make the configuration as easy as possible we provide a robust GUI where you can access and change everything that is configurable
**Settings** GUI can be started from the tray menu *Admin -> Studio Settings*.
Please keep in mind that these settings are set-up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to
[Local Settings](admin_settings_local.md) section in the artist documentation.
:::important Studio Settings versus Local Settings
Please keep in mind that these settings are set up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to
[Local Settings](admin_settings_local.md) section in the documentation.
:::
## Categories
@ -76,7 +79,7 @@ You can also reset any settings to OpenPype default by doing `right click` and `
Many settings are useful to be adjusted on a per-project basis. To identify project
overrides, they are marked with **orange edge** and **orange labels** in the settings GUI.
The process of settting project overrides is similar to setting the Studio defaults. The key difference is to select a particular project you want to be configure. Those projects can be found on the left hand side of the Project Settings tab.
The process of setting project overrides is similar to setting the Studio defaults. The key difference is to select a particular project you want to be configure. Those projects can be found on the left hand side of the Project Settings tab.
In the image below you can see all three overrides at the same time.
1. Deadline has **no changes to the OpenPype defaults** at all — **grey** colour of left bar.

View file

@ -68,7 +68,7 @@ Add `--headless` to run OpenPype without graphical UI (useful on server or on au
`--verbose` `<level>` - change log verbose level of OpenPype loggers.
Level value can be integer in range `0-50` or one of enum strings `"notset" (0)`, `"debug" (10)`, `"info" (20)`, `"warning" (30)`, `"error" (40)`, `"ciritcal" (50)`. Value is stored to `OPENPYPE_LOG_LEVEL` environment variable for next processes.
Level value can be integer in range `0-50` or one of enum strings `"notset" (0)`, `"debug" (10)`, `"info" (20)`, `"warning" (30)`, `"error" (40)`, `"critical" (50)`. Value is stored to `OPENPYPE_LOG_LEVEL` environment variable for next processes.
```shell
openpype_console --verbose debug

View file

@ -47,7 +47,7 @@ This is the core functional area for you as a user. Most of your actions will ta
![Menu OpenPype](assets/3dsmax_menu_first_OP.png)
:::note OpenPype Menu
User should use this menu exclusively for **Opening/Saving** when dealing with work files not standard ```File Menu``` even though user still being able perform file operations via this menu but prefferably just performing quick saves during work session not saving actual workfile versions.
User should use this menu exclusively for **Opening/Saving** when dealing with work files not standard ```File Menu``` even though user still being able perform file operations via this menu but preferably just performing quick saves during work session not saving actual workfile versions.
:::
## Working With Scene Files
@ -73,7 +73,7 @@ OpenPype correctly names it and add version to the workfile. This basically happ
etc.
Basically meaning user is free of guessing what is the correct naming and other neccessities to keep everthing in order and managed.
Basically meaning user is free of guessing what is the correct naming and other necessities to keep everything in order and managed.
> Note: user still has also other options for naming like ```Subversion```, ```Artist's Note``` but we won't dive into those now.

View file

@ -34,7 +34,7 @@ a correct name. You should use it instead of standard file saving dialog.
In AfterEffects you'll find the tools in the `OpenPype` extension:
![Extension](assets/photoshop_extension.PNG) <!-- same menu as in PS -->
![Extension](assets/photoshop_extension.png) <!-- same menu as in PS -->
You can show the extension panel by going to `Window` > `Extensions` > `OpenPype`.
@ -104,7 +104,7 @@ There are currently 2 options of `render` item:
When you want to load existing published work, you can use the `Loader` tool. You can reach it in the extension's panel.
![Loader](assets/photoshop_loader.PNG) <!-- picture needs to be changed -->
![Loader](assets/photoshop_loader.png) <!-- picture needs to be changed -->
The supported families for loading into AfterEffects are:
@ -128,7 +128,7 @@ Now that we have some content loaded, you can manage which version is loaded. Th
Loaded images have to stay as smart layers in order to be updated. If you rasterize the layer, you can no longer update it to a different version using OpenPype tools.
:::
![Loader](assets/photoshop_manage.PNG)
![Loader](assets/photoshop_manage.png)
You can switch to a previous version of the image or update to the latest.

View file

@ -44,7 +44,7 @@ Because the saving to the network location happens in the background, be careful
`OpenPype > Create`
![Creator](assets/harmony_creator.PNG)
![Creator](assets/harmony_creator.png)
These are the families supported in Harmony:

View file

@ -231,14 +231,14 @@ All published instances that will replace the place holder must contain unique i
![Create menu](assets/nuke_publishedinstance.png)
The informations about these objects are given by the user by filling the extra attributes of the Place Holder
The information about these objects are given by the user by filling the extra attributes of the Place Holder
![Create menu](assets/nuke_fillingExtraAttributes.png)
### Update Place Holder
This tool alows the user to change the information provided in the extra attributes of the selected Place Holder.
This tool allows the user to change the information provided in the extra attributes of the selected Place Holder.
![Create menu](assets/nuke_updatePlaceHolder.png)
@ -250,7 +250,7 @@ This tool imports the template used and replaces the existed PlaceHolders with t
![Create menu](assets/nuke_buildWorfileFromTemplate.png)
#### Result
- Replace `PLACEHOLDER` node in the template with the published instance corresponding to the informations provided in extra attributes of the Place Holder
- Replace `PLACEHOLDER` node in the template with the published instance corresponding to the information provided in extra attributes of the Place Holder
![Create menu](assets/nuke_buildworkfile.png)

View file

@ -230,8 +230,8 @@ Maya settings concerning framerate, resolution and frame range are handled by
OpenPype. If set correctly in Ftrack, Maya will validate you have correct fps on
scene save and publishing offering way to fix it for you.
For resolution and frame range, use **OpenPype → Reset Frame Range** and
**OpenPype → Reset Resolution**
For resolution and frame range, use **OpenPype → Set Frame Range** and
**OpenPype → Set Resolution**
## Creating rigs with OpenPype
@ -386,7 +386,7 @@ Lets start with empty scene. First I'll pull in my favorite Buddha model.
there just click on **Reference (abc)**.
Next, I want to be sure that I have same frame range as is set on shot I am working
on. To do this just **OpenPype → Reset Frame Range**. This should set Maya timeline to same
on. To do this just **OpenPype → Set Frame Range**. This should set Maya timeline to same
values as they are set on shot in *Ftrack* for example.
I have my time set, so lets create some animation. We'll turn Buddha model around for
@ -500,7 +500,7 @@ and for vray:
maya/<Layer>/<Layer>
```
Doing **OpenPype → Reset Resolution** will set correct resolution on camera.
Doing **OpenPype → Set Resolution** will set correct resolution on camera.
Scene is now ready for submission and should publish without errors.

View file

@ -6,7 +6,7 @@ sidebar_label: Arnold
## Arnold Scene Source (.ass)
Arnold Scene Source can be published as a single file or a sequence of files, determined by the frame range.
When creating the instance, two objectsets are created; `content` and `proxy`. Meshes in the `proxy` objectset will be the viewport representation when loading as `standin`. Proxy representations are stored as `resources` of the subset.
When creating the instance, two objectsets are created; `content` and `proxy`. Meshes in the `proxy` objectset will be the viewport representation when loading as `standin`.
### Arnold Scene Source Proxy Workflow
In order to utilize operators and proxies, the content and proxy nodes need to share the same names (including the shape names). This is done by parenting the content and proxy nodes into separate groups. For example:

View file

@ -75,7 +75,7 @@ enabled instances, you could see more information after clicking on `Details` ta
![Image instances creates](assets/photoshop_publish_validations.png)
In this dialog you could see publishable instances in left colummn, triggered plugins in the middle and logs in the right column.
In this dialog you could see publishable instances in left column, triggered plugins in the middle and logs in the right column.
In left column you could see that `review` instance was created automatically. This instance flattens all publishable instances or
all visible layers if no publishable instances were created into single image which could serve as a single reviewable element (for example in Ftrack).

View file

@ -2,7 +2,7 @@
id: artist_tools_sync_queue
title: Sync Queue
sidebar_label: Sync Queue
description: Track sites syncronization progress.
description: Track sites synchronization progress.
---
# Sync Queue

View file

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 8.2 KiB

Before After
Before After

View file

@ -24,8 +24,8 @@ It's up to the Loaders to read these values and apply the correct expected color
### Keys
- **colorspace** - string value used in other publish plugins and loaders
- **config** - storing two versions of path.
- **path** - is formated and with baked platform root. It is used for posible need to find out where we were sourcing color config during publishing.
- **template** - unformated tempate resolved from settings. It is used for other plugins targeted to remote publish which could be processed at different platform.
- **path** - is formatted and with baked platform root. It is used for possible need to find out where we were sourcing color config during publishing.
- **template** - unformatted template resolved from settings. It is used for other plugins targeted to remote publish which could be processed at different platform.
### Example
{
@ -63,7 +63,7 @@ It's up to the Loaders to read these values and apply the correct expected color
- set the `OCIO` environment variable before launching the host via a prelaunch hook
- or (if the host allows) to set the workfile OCIO config path using the host's API
3. Each Extractor exporting pixel data (e.g. image or video) has to use parent class `openpype.pipeline.publish.publish_plugins.ExtractorColormanaged` and use `self.set_representation_colorspace` on the representations to be integrated.
3. Each Extractor exporting pixel data (e.g. image or video) has to inherit from the mixin class `openpype.pipeline.publish.publish_plugins.ColormanagedPyblishPluginMixin` and use `self.set_representation_colorspace` on the representations to be integrated.
The **set_representation_colorspace** method adds `colorspaceData` to the representation. If the `colorspace` passed is not `None` then it is added directly to the representation with resolved config path otherwise a color space is assumed using the configured file rules. If no file rule matches the `colorspaceData` is **not** added to the representation.

View file

@ -45,10 +45,10 @@ openpype/hosts/{host name}
```
### Launch Hooks
Launch hooks are not directly connected to host implementation, but they can be used to modify launch of process which may be crutial for the implementation. Launch hook are plugins called when DCC is launched. They are processed in sequence before and after launch. Pre launch hooks can change how process of DCC is launched, e.g. change subprocess flags, modify environments or modify launch arguments. If prelaunch hook crashes the application is not launched at all. Postlaunch hooks are triggered after launch of subprocess. They can be used to change statuses in your project tracker, start timer, etc. Crashed postlaunch hooks have no effect on rest of postlaunch hooks or launched process. They can be filtered by platform, host and application and order is defined by integer value. Hooks inside host are automatically loaded (one reason why folder name should match host name) or can be defined from modules. Hooks execution share same launch context where can be stored data used across multiple hooks (please be very specific in stored keys e.g. 'project' vs. 'project_name'). For more detailed information look into `openpype/lib/applications.py`.
Launch hooks are not directly connected to host implementation, but they can be used to modify launch of process which may be crucial for the implementation. Launch hook are plugins called when DCC is launched. They are processed in sequence before and after launch. Pre launch hooks can change how process of DCC is launched, e.g. change subprocess flags, modify environments or modify launch arguments. If prelaunch hook crashes the application is not launched at all. Postlaunch hooks are triggered after launch of subprocess. They can be used to change statuses in your project tracker, start timer, etc. Crashed postlaunch hooks have no effect on rest of postlaunch hooks or launched process. They can be filtered by platform, host and application and order is defined by integer value. Hooks inside host are automatically loaded (one reason why folder name should match host name) or can be defined from modules. Hooks execution share same launch context where can be stored data used across multiple hooks (please be very specific in stored keys e.g. 'project' vs. 'project_name'). For more detailed information look into `openpype/lib/applications.py`.
### Public interface
Public face is at this moment related to launching of the DCC. At this moment there there is only option to modify environment variables before launch by implementing function `add_implementation_envs` (must be available in `openpype/hosts/{host name}/__init__.py`). The function is called after pre launch hooks, as last step before subprocess launch, to be able set environment variables crutial for proper integration. It is also good place for functions that are used in prelaunch hooks and in-DCC integration. Future plans are to be able get workfiles extensions from here. Right now workfiles extensions are hardcoded in `openpype/pipeline/constants.py` under `HOST_WORKFILE_EXTENSIONS`, we would like to handle hosts as addons similar to OpenPype modules, and more improvements which are now hardcoded.
Public face is at this moment related to launching of the DCC. At this moment there there is only option to modify environment variables before launch by implementing function `add_implementation_envs` (must be available in `openpype/hosts/{host name}/__init__.py`). The function is called after pre launch hooks, as last step before subprocess launch, to be able set environment variables crucial for proper integration. It is also good place for functions that are used in prelaunch hooks and in-DCC integration. Future plans are to be able get workfiles extensions from here. Right now workfiles extensions are hardcoded in `openpype/pipeline/constants.py` under `HOST_WORKFILE_EXTENSIONS`, we would like to handle hosts as addons similar to OpenPype modules, and more improvements which are now hardcoded.
### Integration
We've prepared base class `HostBase` in `openpype/host/host.py` to define minimum requirements and provide some default method implementations. The minimum requirement for a host is `name` attribute, this host would not be able to do much but is valid. To extend functionality we've prepared interfaces that helps to identify what is host capable of and if is possible to use certain tools with it. For those cases we defined interfaces for each workflow. `IWorkfileHost` interface add requirement to implement workfiles related methods which makes host usable in combination with Workfiles tool. `ILoadHost` interface add requirements to be able load, update, switch or remove referenced representations which should add support to use Loader and Scene Inventory tools. `INewPublisher` interface is required to be able use host with new OpenPype publish workflow. This is what must or can be implemented to allow certain functionality. `HostBase` will have more responsibility which will be taken from global variables in future. This process won't happen at once, but will be slow to keep backwards compatibility for some time.

View file

@ -415,7 +415,7 @@ class CreateRender(Creator):
# - 'asset' - asset name
# - 'task' - task name
# - 'variant' - variant
# - 'family' - instnace family
# - 'family' - instance family
# Check if should use selection or not
if pre_create_data.get("use_selection"):

View file

@ -355,7 +355,7 @@ These inputs wraps another inputs into {key: value} relation
{
"type": "text",
"key": "command",
"label": "Comand"
"label": "Command"
}
]
},
@ -420,7 +420,7 @@ How output of the schema could look like on save:
- number input, can be used for both integer and float
- key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`)
- key `"minimum"` as minimum allowed number to enter (Default: `-99999`)
- key `"maxium"` as maximum allowed number to enter (Default: `99999`)
- key `"maximum"` as maximum allowed number to enter (Default: `99999`)
- key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll)
- for UI it is possible to show slider to enable this option set `show_slider` to `true`
```javascript
@ -602,7 +602,7 @@ How output of the schema could look like on save:
- there are 2 possible ways how to set the type:
1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below)
2.) item type name as string without modifiers (e.g. [text](#text))
3.) enhancement of 1.) there is also support of `template` type but be carefull about endless loop of templates
3.) enhancement of 1.) there is also support of `template` type but be careful about endless loop of templates
- goal of using `template` is to easily change same item definitions in multiple lists
1.) with item modifiers

View file

@ -57,7 +57,7 @@ Content:
Contains end to end testing in a DCC. Currently it is setup to start DCC application with prepared worfkile, run publish process and compare results in DB and file system automatically.
This approach is implemented as it should work in any DCC application and should cover most common use cases. Not all hosts allow "real headless" publishing, but all hosts should allow to trigger
publish process programatically when UI of host is actually running.
publish process programmatically when UI of host is actually running.
There will be eventually also possibility to build workfile and publish it programmatically, this would work only in DCCs that support it (Maya, Nuke).

View file

@ -4,7 +4,7 @@ title: Ftrack
sidebar_label: Project Manager
---
Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](https://help.ftrack.com/en/).
Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and its basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](https://help.ftrack.com/en/).
## Project management
Setting project attributes is the key to properly working pipeline.

View file

@ -8,7 +8,7 @@ import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/).
Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and its basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/).
## Prepare Ftrack for OpenPype

View file

@ -7,7 +7,7 @@ sidebar_label: Kitsu
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
Kitsu is a great open source production tracker and can be used for project management instead of Ftrack. This documentation assumes that you are familiar with Kitsu and it's basic principles. If you're new to Kitsu, we recommend having a thorough look at [Kitsu Official Documentation](https://kitsu.cg-wire.com/).
Kitsu is a great open source production tracker and can be used for project management instead of Ftrack. This documentation assumes that you are familiar with Kitsu and its basic principles. If you're new to Kitsu, we recommend having a thorough look at [Kitsu Official Documentation](https://kitsu.cg-wire.com/).
## Prepare Kitsu for OpenPype
@ -41,4 +41,4 @@ openpype_console module kitsu push-to-zou -l me@domain.ext -p my_password
## Q&A
### Is it safe to rename an entity from Kitsu?
Absolutely! Entities are linked by their unique IDs between the two databases.
But renaming from the OP's Project Manager won't apply the change to Kitsu, it'll be overriden during the next synchronization.
But renaming from the OP's Project Manager won't apply the change to Kitsu, it'll be overridden during the next synchronization.

View file

@ -89,7 +89,7 @@ all share the same provider).
Handles files stored on disk storage.
Local drive provider is the most basic one that is used for accessing all standard hard disk storage scenarios. It will work with any storage that can be mounted on your system in a standard way. This could correspond to a physical external hard drive, network mounted storage, internal drive or even VPN connected network drive. It doesn't care about how te drive is mounted, but you must be able to point to it with a simple directory path.
Local drive provider is the most basic one that is used for accessing all standard hard disk storage scenarios. It will work with any storage that can be mounted on your system in a standard way. This could correspond to a physical external hard drive, network mounted storage, internal drive or even VPN connected network drive. It doesn't care about how the drive is mounted, but you must be able to point to it with a simple directory path.
Default sites `local` and `studio` both use local drive provider.

View file

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
Project settings can have project specific values. Each new project is using studio values defined in **default** project but these values can be modified or overridden per project.
:::warning Default studio values
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orage colour). Any changes in default project may affect all existing projects.
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orange colour). Any changes in default project may affect all existing projects.
:::
## Color Management (ImageIO)
@ -39,14 +39,14 @@ Procedure of resolving path (from above example) will look first into path 1st a
### Using File rules
File rules are inspired by [OCIO v2 configuration]((https://opencolorio.readthedocs.io/en/latest/guides/authoring/rules.html)). Each rule has a unique name which can be overridden by host-specific _File rules_ (example: `project_settings/nuke/imageio/file_rules/rules`).
The _input pattern_ matching uses REGEX expression syntax (try [regexr.com](https://regexr.com/)). Matching rules procedure's intention is to be used during publishing or loading of representation. Since the publishing procedure is run before integrator formate publish template path, make sure the pattern is working or any work render path.
The _input pattern_ matching uses REGEX expression syntax (try [regexr.com](https://regexr.com/)). Matching rules procedure's intention is to be used during publishing or loading of representation. Since the publishing procedure is run before integrator format publish template path, make sure the pattern is working or any work render path.
:::warning Colorspace name input
The **colorspace name** value is a raw string input and no validation is run after saving project settings. We recommend to open the specified `config.ocio` file and copy pasting the exact colorspace names.
:::
### Extract OIIO Transcode
OIIOTools transcoder plugin with configurable output presets. Any incoming representation with `colorspaceData` is convertable to single or multiple representations with different target colorspaces or display and viewer names found in linked **config.ocio** file.
OIIOTools transcoder plugin with configurable output presets. Any incoming representation with `colorspaceData` is convertible to single or multiple representations with different target colorspaces or display and viewer names found in linked **config.ocio** file.
`oiiotool` is used for transcoding, eg. `oiiotool` must be present in `vendor/bin/oiio` or environment variable `OPENPYPE_OIIO_PATHS` must be provided for custom oiio installation.

View file

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
Project settings can have project specific values. Each new project is using studio values defined in **default** project but these values can be modified or overridden per project.
:::warning Default studio values
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orage colour). Any changes in default project may affect all existing projects.
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orange colour). Any changes in default project may affect all existing projects.
:::
## Workfile Builder

View file

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
Project settings can have project specific values. Each new project is using studio values defined in **default** project but these values can be modified or overridden per project.
:::warning Default studio values
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orage colour). Any changes in default project may affect all existing projects.
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orange colour). Any changes in default project may affect all existing projects.
:::
## Creator Plugins

View file

@ -8,7 +8,7 @@ import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
Ftrack is currently the main project management option for Pype. This documentation assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/).
Ftrack is currently the main project management option for Pype. This documentation assumes that you are familiar with Ftrack and its basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/).
## Prepare Ftrack for Pype

View file

@ -15,9 +15,9 @@ various usage scenarios.
## Studio Preparation
You can find detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able
You can find a detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able
to operate in most studios fairly quickly. The main obstacles are usually related to workflows and habits, that
might not be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get idea about basics.
might not be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get comfortable with the basics.
Keep in mind that if you run into any workflows that are not supported, it's usually just because we haven't hit
that particular case and it can most likely be added upon request.

File diff suppressed because it is too large Load diff