mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into enhancement/AY-2612-resolve-adaptation-colorbleed-plugins
This commit is contained in:
commit
221a48ce34
419 changed files with 5532 additions and 2315 deletions
|
|
@ -1,3 +1,3 @@
|
|||
name = "aftereffects"
|
||||
title = "AfterEffects"
|
||||
version = "0.1.3"
|
||||
version = "0.1.4"
|
||||
|
|
|
|||
|
|
@ -22,12 +22,6 @@ class ValidateSceneSettingsModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class ValidateContainersModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
optional: bool = SettingsField(True, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
|
||||
|
||||
class AfterEffectsPublishPlugins(BaseSettingsModel):
|
||||
CollectReview: CollectReviewPluginModel = SettingsField(
|
||||
default_factory=CollectReviewPluginModel,
|
||||
|
|
@ -37,10 +31,6 @@ class AfterEffectsPublishPlugins(BaseSettingsModel):
|
|||
default_factory=ValidateSceneSettingsModel,
|
||||
title="Validate Scene Settings",
|
||||
)
|
||||
ValidateContainers: ValidateContainersModel = SettingsField(
|
||||
default_factory=ValidateContainersModel,
|
||||
title="Validate Containers",
|
||||
)
|
||||
|
||||
|
||||
AE_PUBLISH_PLUGINS_DEFAULTS = {
|
||||
|
|
@ -58,9 +48,4 @@ AE_PUBLISH_PLUGINS_DEFAULTS = {
|
|||
".*"
|
||||
]
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -281,13 +281,20 @@ def prepare_app_environments(
|
|||
app.environment
|
||||
]
|
||||
|
||||
task_entity = data.get("task_entity")
|
||||
folder_entity = data.get("folder_entity")
|
||||
# Add tools environments
|
||||
groups_by_name = {}
|
||||
tool_by_group_name = collections.defaultdict(dict)
|
||||
if folder_entity:
|
||||
# Make sure each tool group can be added only once
|
||||
for key in folder_entity["attrib"].get("tools") or []:
|
||||
tools = None
|
||||
if task_entity:
|
||||
tools = task_entity["attrib"].get("tools")
|
||||
|
||||
if tools is None and folder_entity:
|
||||
tools = folder_entity["attrib"].get("tools")
|
||||
|
||||
if tools:
|
||||
for key in tools:
|
||||
tool = app.manager.tools.get(key)
|
||||
if not tool or not tool.is_valid_for_app(app):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "applications"
|
||||
title = "Applications"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
|
||||
ayon_server_version = ">=1.0.7"
|
||||
ayon_launcher_version = ">=1.0.2"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "blender"
|
||||
title = "Blender"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
|
|
|
|||
|
|
@ -151,6 +151,10 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
default_factory=ExtractPlayblastModel,
|
||||
title="Extract Playblast"
|
||||
)
|
||||
ExtractModelUSD: ValidatePluginModel = SettingsField(
|
||||
default_factory=ValidatePluginModel,
|
||||
title="Extract Model USD"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_BLENDER_PUBLISH_SETTINGS = {
|
||||
|
|
@ -348,5 +352,10 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = {
|
|||
},
|
||||
indent=4
|
||||
)
|
||||
},
|
||||
"ExtractModelUSD": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
}
|
||||
}
|
||||
|
|
|
|||
5
server_addon/clockify/client/ayon_clockify/__init__.py
Normal file
5
server_addon/clockify/client/ayon_clockify/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .addon import ClockifyAddon
|
||||
|
||||
__all__ = (
|
||||
"ClockifyAddon",
|
||||
)
|
||||
288
server_addon/clockify/client/ayon_clockify/addon.py
Normal file
288
server_addon/clockify/client/ayon_clockify/addon.py
Normal file
|
|
@ -0,0 +1,288 @@
|
|||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
from ayon_core.addon import AYONAddon, ITrayAddon, IPluginPaths
|
||||
|
||||
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
|
||||
class ClockifyAddon(AYONAddon, ITrayAddon, IPluginPaths):
|
||||
name = "clockify"
|
||||
|
||||
def initialize(self, studio_settings):
|
||||
enabled = self.name in studio_settings
|
||||
workspace_name = None
|
||||
if enabled:
|
||||
clockify_settings = studio_settings[self.name]
|
||||
workspace_name = clockify_settings["workspace_name"]
|
||||
|
||||
if enabled and workspace_name:
|
||||
self.log.warning("Clockify Workspace is not set in settings.")
|
||||
enabled = False
|
||||
self.enabled = enabled
|
||||
self.workspace_name = workspace_name
|
||||
|
||||
self.timer_manager = None
|
||||
self.MessageWidgetClass = None
|
||||
self.message_widget = None
|
||||
self._clockify_api = None
|
||||
|
||||
# TimersManager attributes
|
||||
# - set `timers_manager_connector` only in `tray_init`
|
||||
self.timers_manager_connector = None
|
||||
self._timer_manager_addon = None
|
||||
|
||||
@property
|
||||
def clockify_api(self):
|
||||
if self._clockify_api is None:
|
||||
from .clockify_api import ClockifyAPI
|
||||
|
||||
self._clockify_api = ClockifyAPI(master_parent=self)
|
||||
return self._clockify_api
|
||||
|
||||
def get_global_environments(self):
|
||||
return {"CLOCKIFY_WORKSPACE": self.workspace_name}
|
||||
|
||||
def tray_init(self):
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
|
||||
self.MessageWidgetClass = MessageWidget
|
||||
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(self.clockify_api)
|
||||
self.widget_settings_required = None
|
||||
|
||||
self.thread_timer_check = None
|
||||
# Bools
|
||||
self.bool_thread_check_running = False
|
||||
self.bool_api_key_set = False
|
||||
self.bool_workspace_set = False
|
||||
self.bool_timer_run = False
|
||||
self.bool_api_key_set = self.clockify_api.set_api()
|
||||
|
||||
# Define itself as TimersManager connector
|
||||
self.timers_manager_connector = self
|
||||
|
||||
def tray_start(self):
|
||||
if self.bool_api_key_set is False:
|
||||
self.show_settings()
|
||||
return
|
||||
|
||||
self.bool_workspace_set = self.clockify_api.workspace_id is not None
|
||||
if self.bool_workspace_set is False:
|
||||
return
|
||||
|
||||
self.start_timer_check()
|
||||
self.set_menu_visibility()
|
||||
|
||||
def tray_exit(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Implementation of IPluginPaths to get plugin paths."""
|
||||
actions_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "launcher_actions"
|
||||
)
|
||||
return {"actions": [actions_path]}
|
||||
|
||||
def get_ftrack_event_handler_paths(self):
|
||||
"""Function for ftrack addon to add ftrack event handler paths."""
|
||||
return {
|
||||
"user": [CLOCKIFY_FTRACK_USER_PATH],
|
||||
"server": [CLOCKIFY_FTRACK_SERVER_PATH],
|
||||
}
|
||||
|
||||
def clockify_timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
self.timer_stopped()
|
||||
|
||||
def start_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is None:
|
||||
self.thread_timer_check = threading.Thread(
|
||||
target=self.check_running
|
||||
)
|
||||
self.thread_timer_check.daemon = True
|
||||
self.thread_timer_check.start()
|
||||
|
||||
def stop_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is not None:
|
||||
self.thread_timer_check.join()
|
||||
self.thread_timer_check = None
|
||||
|
||||
def check_running(self):
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockify_api.get_in_progress() is not None:
|
||||
bool_timer_run = True
|
||||
|
||||
if self.bool_timer_run != bool_timer_run:
|
||||
if self.bool_timer_run is True:
|
||||
self.clockify_timer_stopped()
|
||||
elif self.bool_timer_run is False:
|
||||
current_timer = self.clockify_api.get_in_progress()
|
||||
if current_timer is None:
|
||||
continue
|
||||
current_proj_id = current_timer.get("projectId")
|
||||
if not current_proj_id:
|
||||
continue
|
||||
|
||||
project = self.clockify_api.get_project_by_id(
|
||||
current_proj_id
|
||||
)
|
||||
if project and project.get("code") == 501:
|
||||
continue
|
||||
|
||||
project_name = project.get("name")
|
||||
|
||||
current_timer_hierarchy = current_timer.get("description")
|
||||
if not current_timer_hierarchy:
|
||||
continue
|
||||
hierarchy_items = current_timer_hierarchy.split("/")
|
||||
# Each pype timer must have at least 2 items!
|
||||
if len(hierarchy_items) < 2:
|
||||
continue
|
||||
|
||||
task_name = hierarchy_items[-1]
|
||||
hierarchy = hierarchy_items[:-1]
|
||||
|
||||
data = {
|
||||
"task_name": task_name,
|
||||
"hierarchy": hierarchy,
|
||||
"project_name": project_name,
|
||||
}
|
||||
self.timer_started(data)
|
||||
|
||||
self.bool_timer_run = bool_timer_run
|
||||
self.set_menu_visibility()
|
||||
time.sleep(5)
|
||||
|
||||
def signed_in(self):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def on_message_widget_close(self):
|
||||
self.message_widget = None
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
from qtpy import QtWidgets
|
||||
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
# Actions
|
||||
action_show_settings = QtWidgets.QAction("Settings", menu)
|
||||
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
|
||||
|
||||
menu.addAction(action_show_settings)
|
||||
menu.addAction(action_stop_timer)
|
||||
|
||||
action_show_settings.triggered.connect(self.show_settings)
|
||||
action_stop_timer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.action_stop_timer = action_stop_timer
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(
|
||||
self.clockify_api.get_api_key()
|
||||
)
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
||||
# --- TimersManager connection methods ---
|
||||
def register_timers_manager(self, timer_manager_addon):
|
||||
"""Store TimersManager for future use."""
|
||||
self._timer_manager_addon = timer_manager_addon
|
||||
|
||||
def timer_started(self, data):
|
||||
"""Tell TimersManager that timer started."""
|
||||
if self._timer_manager_addon is not None:
|
||||
self._timer_manager_addon.timer_started(self.id, data)
|
||||
|
||||
def timer_stopped(self):
|
||||
"""Tell TimersManager that timer stopped."""
|
||||
if self._timer_manager_addon is not None:
|
||||
self._timer_manager_addon.timer_stopped(self.id)
|
||||
|
||||
def stop_timer(self):
|
||||
"""Called from TimersManager to stop timer."""
|
||||
self.clockify_api.finish_time_entry()
|
||||
|
||||
def _verify_project_exists(self, project_name):
|
||||
project_id = self.clockify_api.get_project_id(project_name)
|
||||
if not project_id:
|
||||
self.log.warning(
|
||||
'Project "{}" was not found in Clockify. Timer won\'t start.'
|
||||
).format(project_name)
|
||||
|
||||
if not self.MessageWidgetClass:
|
||||
return
|
||||
|
||||
msg = (
|
||||
'Project <b>"{}"</b> is not'
|
||||
' in Clockify Workspace <b>"{}"</b>.'
|
||||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockify_api.workspace_name))
|
||||
|
||||
self.message_widget = self.MessageWidgetClass(
|
||||
msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.on_message_widget_close)
|
||||
self.message_widget.show()
|
||||
return False
|
||||
return project_id
|
||||
|
||||
def start_timer(self, input_data):
|
||||
"""Called from TimersManager to start timer."""
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockify_api.get_api_key():
|
||||
return
|
||||
|
||||
project_name = input_data.get("project_name")
|
||||
folder_path = input_data.get("folder_path")
|
||||
task_name = input_data.get("task_name")
|
||||
task_type = input_data.get("task_type")
|
||||
if not all((project_name, folder_path, task_name, task_type)):
|
||||
return
|
||||
|
||||
# Concatenate hierarchy and task to get description
|
||||
description = "/".join([folder_path.lstrip("/"), task_name])
|
||||
|
||||
# Check project existence
|
||||
project_id = self._verify_project_exists(project_name)
|
||||
if not project_id:
|
||||
return
|
||||
|
||||
# Setup timer tags
|
||||
if not task_type:
|
||||
self.log.info("No tag information found for the timer")
|
||||
|
||||
tag_ids = []
|
||||
task_tag_id = self.clockify_api.get_tag_id(task_type)
|
||||
if task_tag_id is not None:
|
||||
tag_ids.append(task_tag_id)
|
||||
|
||||
# Start timer
|
||||
self.clockify_api.start_time_entry(
|
||||
description,
|
||||
project_id,
|
||||
tag_ids=tag_ids,
|
||||
workspace_id=self.clockify_api.workspace_id,
|
||||
user_id=self.clockify_api.user_id,
|
||||
)
|
||||
447
server_addon/clockify/client/ayon_clockify/clockify_api.py
Normal file
447
server_addon/clockify/client/ayon_clockify/clockify_api.py
Normal file
|
|
@ -0,0 +1,447 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ayon_core.lib.local_settings import AYONSecureRegistry
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from .constants import (
|
||||
CLOCKIFY_ENDPOINT,
|
||||
ADMIN_PERMISSION_NAMES,
|
||||
)
|
||||
|
||||
|
||||
class ClockifyAPI:
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
def __init__(self, api_key=None, master_parent=None):
|
||||
self.workspace_name = None
|
||||
self.master_parent = master_parent
|
||||
self.api_key = api_key
|
||||
self._workspace_id = None
|
||||
self._user_id = None
|
||||
self._secure_registry = None
|
||||
|
||||
@property
|
||||
def secure_registry(self):
|
||||
if self._secure_registry is None:
|
||||
self._secure_registry = AYONSecureRegistry("clockify")
|
||||
return self._secure_registry
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
return {"x-api-key": self.api_key}
|
||||
|
||||
@property
|
||||
def workspace_id(self):
|
||||
return self._workspace_id
|
||||
|
||||
@property
|
||||
def user_id(self):
|
||||
return self._user_id
|
||||
|
||||
def verify_api(self):
|
||||
for key, value in self.headers.items():
|
||||
if value is None or value.strip() == "":
|
||||
return False
|
||||
return True
|
||||
|
||||
def set_api(self, api_key=None):
|
||||
if api_key is None:
|
||||
api_key = self.get_api_key()
|
||||
|
||||
if api_key is not None and self.validate_api_key(api_key) is True:
|
||||
self.api_key = api_key
|
||||
self.set_workspace()
|
||||
self.set_user_id()
|
||||
if self.master_parent:
|
||||
self.master_parent.signed_in()
|
||||
return True
|
||||
return False
|
||||
|
||||
def validate_api_key(self, api_key):
|
||||
test_headers = {"x-api-key": api_key}
|
||||
action_url = "user"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=test_headers
|
||||
)
|
||||
if response.status_code != 200:
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_workspace_permissions(self, workspace_id=None, user_id=None):
|
||||
if user_id is None:
|
||||
self.log.info("No user_id found during validation")
|
||||
return False
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = f"workspaces/{workspace_id}/users?includeRoles=1"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
data = response.json()
|
||||
for user in data:
|
||||
if user.get("id") == user_id:
|
||||
roles_data = user.get("roles")
|
||||
for entities in roles_data:
|
||||
if entities.get("role") in ADMIN_PERMISSION_NAMES:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_user_id(self):
|
||||
action_url = "user"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
result = response.json()
|
||||
user_id = result.get("id", None)
|
||||
|
||||
return user_id
|
||||
|
||||
def set_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = os.environ.get("CLOCKIFY_WORKSPACE", None)
|
||||
self.workspace_name = name
|
||||
if self.workspace_name is None:
|
||||
return
|
||||
try:
|
||||
result = self.validate_workspace()
|
||||
except Exception:
|
||||
result = False
|
||||
if result is not False:
|
||||
self._workspace_id = result
|
||||
if self.master_parent is not None:
|
||||
self.master_parent.start_timer_check()
|
||||
return True
|
||||
return False
|
||||
|
||||
def validate_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = self.workspace_name
|
||||
all_workspaces = self.get_workspaces()
|
||||
if name in all_workspaces:
|
||||
return all_workspaces[name]
|
||||
return False
|
||||
|
||||
def set_user_id(self):
|
||||
try:
|
||||
user_id = self.get_user_id()
|
||||
except Exception:
|
||||
user_id = None
|
||||
if user_id is not None:
|
||||
self._user_id = user_id
|
||||
|
||||
def get_api_key(self):
|
||||
return self.secure_registry.get_item("api_key", None)
|
||||
|
||||
def save_api_key(self, api_key):
|
||||
self.secure_registry.set_item("api_key", api_key)
|
||||
|
||||
def get_workspaces(self):
|
||||
action_url = "workspaces/"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return {
|
||||
workspace["name"]: workspace["id"] for workspace in response.json()
|
||||
}
|
||||
|
||||
def get_projects(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = f"workspaces/{workspace_id}/projects"
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
if response.status_code != 403:
|
||||
result = response.json()
|
||||
return {project["name"]: project["id"] for project in result}
|
||||
|
||||
def get_project_by_id(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/projects/{}".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
def get_tags(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/tags".format(workspace_id)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return {tag["name"]: tag["id"] for tag in response.json()}
|
||||
|
||||
def get_tasks(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/projects/{}/tasks".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
|
||||
return {task["name"]: task["id"] for task in response.json()}
|
||||
|
||||
def get_workspace_id(self, workspace_name):
|
||||
all_workspaces = self.get_workspaces()
|
||||
if workspace_name not in all_workspaces:
|
||||
return None
|
||||
return all_workspaces[workspace_name]
|
||||
|
||||
def get_project_id(self, project_name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_projects = self.get_projects(workspace_id)
|
||||
if project_name not in all_projects:
|
||||
return None
|
||||
return all_projects[project_name]
|
||||
|
||||
def get_tag_id(self, tag_name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_tasks = self.get_tags(workspace_id)
|
||||
if tag_name not in all_tasks:
|
||||
return None
|
||||
return all_tasks[tag_name]
|
||||
|
||||
def get_task_id(self, task_name, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
all_tasks = self.get_tasks(project_id, workspace_id)
|
||||
if task_name not in all_tasks:
|
||||
return None
|
||||
return all_tasks[task_name]
|
||||
|
||||
def get_current_time(self):
|
||||
return str(datetime.datetime.utcnow().isoformat()) + "Z"
|
||||
|
||||
def start_time_entry(
|
||||
self,
|
||||
description,
|
||||
project_id,
|
||||
task_id=None,
|
||||
tag_ids=None,
|
||||
workspace_id=None,
|
||||
user_id=None,
|
||||
billable=True,
|
||||
):
|
||||
# Workspace
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
# User ID
|
||||
if user_id is None:
|
||||
user_id = self._user_id
|
||||
|
||||
# get running timer to check if we need to start it
|
||||
current_timer = self.get_in_progress()
|
||||
|
||||
# Check if is currently run another times and has same values
|
||||
# DO not restart the timer, if it is already running for current task
|
||||
if current_timer:
|
||||
current_timer_hierarchy = current_timer.get("description")
|
||||
current_project_id = current_timer.get("projectId")
|
||||
current_task_id = current_timer.get("taskId")
|
||||
if (
|
||||
description == current_timer_hierarchy
|
||||
and project_id == current_project_id
|
||||
and task_id == current_task_id
|
||||
):
|
||||
self.log.info(
|
||||
"Timer for the current project is already running"
|
||||
)
|
||||
self.bool_timer_run = True
|
||||
return self.bool_timer_run
|
||||
self.finish_time_entry()
|
||||
|
||||
# Convert billable to strings
|
||||
if billable:
|
||||
billable = "true"
|
||||
else:
|
||||
billable = "false"
|
||||
# Rest API Action
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
start = self.get_current_time()
|
||||
body = {
|
||||
"start": start,
|
||||
"billable": billable,
|
||||
"description": description,
|
||||
"projectId": project_id,
|
||||
"taskId": task_id,
|
||||
"tagIds": tag_ids,
|
||||
}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
if response.status_code < 300:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_current_timer_values(self, response):
|
||||
if response is None:
|
||||
return
|
||||
try:
|
||||
output = response.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
return None
|
||||
if output and isinstance(output, list):
|
||||
return output[0]
|
||||
return None
|
||||
|
||||
def get_in_progress(self, user_id=None, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
|
||||
action_url = (
|
||||
f"workspaces/{workspace_id}/user/"
|
||||
f"{user_id}/time-entries?in-progress=1"
|
||||
)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return self._get_current_timer_values(response)
|
||||
|
||||
def finish_time_entry(self, workspace_id=None, user_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
current_timer = self.get_in_progress()
|
||||
if not current_timer:
|
||||
return
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
body = {"end": self.get_current_time()}
|
||||
response = requests.patch(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_time_entries(self, workspace_id=None, user_id=None, quantity=10):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
if user_id is None:
|
||||
user_id = self.user_id
|
||||
action_url = "workspaces/{}/user/{}/time-entries".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
response = requests.get(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return response.json()[:quantity]
|
||||
|
||||
def remove_time_entry(self, tid, workspace_id=None, user_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/user/{}/time-entries/{}".format(
|
||||
workspace_id, user_id, tid
|
||||
)
|
||||
response = requests.delete(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_project(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/projects".format(workspace_id)
|
||||
body = {
|
||||
"name": name,
|
||||
"clientId": "",
|
||||
"isPublic": "false",
|
||||
"estimate": {"estimate": 0, "type": "AUTO"},
|
||||
"color": "#f44336",
|
||||
"billable": "true",
|
||||
}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_workspace(self, name):
|
||||
action_url = "workspaces/"
|
||||
body = {"name": name}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_task(self, name, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/projects/{}/tasks".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
body = {"name": name, "projectId": project_id}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def add_tag(self, name, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "workspaces/{}/tags".format(workspace_id)
|
||||
body = {"name": name}
|
||||
response = requests.post(
|
||||
CLOCKIFY_ENDPOINT + action_url, headers=self.headers, json=body
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def delete_project(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "/workspaces/{}/projects/{}".format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.delete(
|
||||
CLOCKIFY_ENDPOINT + action_url,
|
||||
headers=self.headers,
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def convert_input(
|
||||
self, entity_id, entity_name, mode="Workspace", project_id=None
|
||||
):
|
||||
if entity_id is None:
|
||||
error = False
|
||||
error_msg = 'Missing information "{}"'
|
||||
if mode.lower() == "workspace":
|
||||
if entity_id is None and entity_name is None:
|
||||
if self.workspace_id is not None:
|
||||
entity_id = self.workspace_id
|
||||
else:
|
||||
error = True
|
||||
else:
|
||||
entity_id = self.get_workspace_id(entity_name)
|
||||
else:
|
||||
if entity_id is None and entity_name is None:
|
||||
error = True
|
||||
elif mode.lower() == "project":
|
||||
entity_id = self.get_project_id(entity_name)
|
||||
elif mode.lower() == "task":
|
||||
entity_id = self.get_task_id(
|
||||
task_name=entity_name, project_id=project_id
|
||||
)
|
||||
else:
|
||||
raise TypeError("Unknown type")
|
||||
# Raise error
|
||||
if error:
|
||||
raise ValueError(error_msg.format(mode))
|
||||
|
||||
return entity_id
|
||||
12
server_addon/clockify/client/ayon_clockify/constants.py
Normal file
12
server_addon/clockify/client/ayon_clockify/constants.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import os
|
||||
|
||||
|
||||
CLOCKIFY_FTRACK_SERVER_PATH = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "ftrack", "server"
|
||||
)
|
||||
CLOCKIFY_FTRACK_USER_PATH = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "ftrack", "user"
|
||||
)
|
||||
|
||||
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
|
||||
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/v1/"
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from ayon_clockify.clockify_api import ClockifyAPI
|
||||
|
||||
from ayon_ftrack.lib import ServerAction
|
||||
|
||||
|
||||
class SyncClockifyServer(ServerAction):
|
||||
'''Synchronise project names and task types.'''
|
||||
|
||||
identifier = "clockify.sync.server"
|
||||
label = "Sync To Clockify (server)"
|
||||
description = "Synchronise data to Clockify workspace"
|
||||
|
||||
role_list = ["Administrator", "project Manager"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncClockifyServer, self).__init__(*args, **kwargs)
|
||||
|
||||
workspace_name = os.environ.get("CLOCKIFY_WORKSPACE")
|
||||
api_key = os.environ.get("CLOCKIFY_API_KEY")
|
||||
self.clockify_api = ClockifyAPI(api_key)
|
||||
self.clockify_api.set_workspace(workspace_name)
|
||||
if api_key is None:
|
||||
modified_key = "None"
|
||||
else:
|
||||
str_len = int(len(api_key) / 2)
|
||||
start_replace = int(len(api_key) / 4)
|
||||
modified_key = ""
|
||||
for idx in range(len(api_key)):
|
||||
if idx >= start_replace and idx < start_replace + str_len:
|
||||
replacement = "X"
|
||||
else:
|
||||
replacement = api_key[idx]
|
||||
modified_key += replacement
|
||||
|
||||
self.log.info(
|
||||
"Clockify info. Workspace: \"{}\" API key: \"{}\"".format(
|
||||
str(workspace_name), str(modified_key)
|
||||
)
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if (
|
||||
len(entities) != 1
|
||||
or entities[0].entity_type.lower() != "project"
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.clockify_api.set_api()
|
||||
if self.clockify_api.workspace_id is None:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Clockify Workspace or API key are not set!"
|
||||
}
|
||||
|
||||
if not self.clockify_api.validate_workspace_permissions(
|
||||
self.clockify_api.workspace_id, self.clockify_api.user_id
|
||||
):
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Missing permissions for this action!"
|
||||
}
|
||||
|
||||
# JOB SETTINGS
|
||||
user_id = event["source"]["user"]["id"]
|
||||
user = session.query("User where id is " + user_id).one()
|
||||
|
||||
job = session.create("Job", {
|
||||
"user": user,
|
||||
"status": "running",
|
||||
"data": json.dumps({"description": "Sync Ftrack to Clockify"})
|
||||
})
|
||||
session.commit()
|
||||
|
||||
project_entity = entities[0]
|
||||
if project_entity.entity_type.lower() != "project":
|
||||
project_entity = self.get_project_from_entity(project_entity)
|
||||
|
||||
project_name = project_entity["full_name"]
|
||||
self.log.info(
|
||||
"Synchronization of project \"{}\" to clockify begins.".format(
|
||||
project_name
|
||||
)
|
||||
)
|
||||
task_types = (
|
||||
project_entity["project_schema"]["_task_type_schema"]["types"]
|
||||
)
|
||||
task_type_names = [
|
||||
task_type["name"] for task_type in task_types
|
||||
]
|
||||
try:
|
||||
clockify_projects = self.clockify_api.get_projects()
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockify_api.add_project(project_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Project \"{}\" can't be created. Response: {}".format(
|
||||
project_name, response
|
||||
)
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Can't create clockify project \"{}\"."
|
||||
" Unexpected error."
|
||||
).format(project_name)
|
||||
}
|
||||
|
||||
clockify_workspace_tags = self.clockify_api.get_tags()
|
||||
for task_type_name in task_type_names:
|
||||
if task_type_name in clockify_workspace_tags:
|
||||
self.log.debug(
|
||||
"Task \"{}\" already exist".format(task_type_name)
|
||||
)
|
||||
continue
|
||||
|
||||
response = self.clockify_api.add_tag(task_type_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Task \"{}\" can't be created. Response: {}".format(
|
||||
task_type_name, response
|
||||
)
|
||||
)
|
||||
|
||||
job["status"] = "done"
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Synchronization to clockify failed.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
if job["status"] != "done":
|
||||
job["status"] = "failed"
|
||||
session.commit()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
SyncClockifyServer(session).register()
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
import json
|
||||
from ayon_clockify.clockify_api import ClockifyAPI
|
||||
from ayon_ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class SyncClockifyLocal(BaseAction):
|
||||
"""Synchronise project names and task types."""
|
||||
|
||||
identifier = "clockify.sync.local"
|
||||
label = "Sync To Clockify"
|
||||
description = "Synchronise data to Clockify workspace"
|
||||
role_list = ["Administrator", "project Manager"]
|
||||
icon = statics_icon("app_icons", "clockify-white.png")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncClockifyLocal, self).__init__(*args, **kwargs)
|
||||
|
||||
self.clockify_api = ClockifyAPI()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if (
|
||||
len(entities) == 1
|
||||
and entities[0].entity_type.lower() == "project"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.clockify_api.set_api()
|
||||
if self.clockify_api.workspace_id is None:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Clockify Workspace or API key are not set!"
|
||||
}
|
||||
|
||||
if (
|
||||
self.clockify_api.validate_workspace_permissions(
|
||||
self.clockify_api.workspace_id, self.clockify_api.user_id)
|
||||
is False
|
||||
):
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Missing permissions for this action!"
|
||||
}
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync ftrack to Clockify'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
|
||||
project_entity = entities[0]
|
||||
if project_entity.entity_type.lower() != "project":
|
||||
project_entity = self.get_project_from_entity(project_entity)
|
||||
|
||||
project_name = project_entity["full_name"]
|
||||
self.log.info(
|
||||
"Synchronization of project \"{}\" to clockify begins.".format(
|
||||
project_name
|
||||
)
|
||||
)
|
||||
task_types = (
|
||||
project_entity["project_schema"]["_task_type_schema"]["types"]
|
||||
)
|
||||
task_type_names = [
|
||||
task_type["name"] for task_type in task_types
|
||||
]
|
||||
try:
|
||||
clockify_projects = self.clockify_api.get_projects()
|
||||
if project_name not in clockify_projects:
|
||||
response = self.clockify_api.add_project(project_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Project \"{}\" can't be created. Response: {}".format(
|
||||
project_name, response
|
||||
)
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Can't create clockify project \"{}\"."
|
||||
" Unexpected error."
|
||||
).format(project_name)
|
||||
}
|
||||
|
||||
clockify_workspace_tags = self.clockify_api.get_tags()
|
||||
for task_type_name in task_type_names:
|
||||
if task_type_name in clockify_workspace_tags:
|
||||
self.log.debug(
|
||||
"Task \"{}\" already exist".format(task_type_name)
|
||||
)
|
||||
continue
|
||||
|
||||
response = self.clockify_api.add_tag(task_type_name)
|
||||
if "id" not in response:
|
||||
self.log.warning(
|
||||
"Task \"{}\" can't be created. Response: {}".format(
|
||||
task_type_name, response
|
||||
)
|
||||
)
|
||||
|
||||
job["status"] = "done"
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
finally:
|
||||
if job["status"] != "done":
|
||||
job["status"] = "failed"
|
||||
session.commit()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
SyncClockifyLocal(session).register()
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_clockify.clockify_api import ClockifyAPI
|
||||
|
||||
from ayon_core.pipeline import LauncherAction
|
||||
|
||||
|
||||
class ClockifyStart(LauncherAction):
|
||||
name = "clockify_start_timer"
|
||||
label = "Clockify - Start Timer"
|
||||
icon = "app_icons/clockify.png"
|
||||
order = 500
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, selection):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
return selection.is_task_selected
|
||||
|
||||
def process(self, selection, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
user_id = self.clockify_api.user_id
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
project_name = selection.project_name
|
||||
folder_path = selection.folder_path
|
||||
task_name = selection.task_name
|
||||
description = "/".join([folder_path.lstrip("/"), task_name])
|
||||
|
||||
# fetch folder entity
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
|
||||
# get task type to fill the timer tag
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
project_id = self.clockify_api.get_project_id(
|
||||
project_name, workspace_id
|
||||
)
|
||||
tag_ids = []
|
||||
tag_name = task_type
|
||||
tag_ids.append(self.clockify_api.get_tag_id(tag_name, workspace_id))
|
||||
self.clockify_api.start_time_entry(
|
||||
description,
|
||||
project_id,
|
||||
tag_ids=tag_ids,
|
||||
workspace_id=workspace_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_clockify.clockify_api import ClockifyAPI
|
||||
from ayon_core.pipeline import LauncherAction
|
||||
|
||||
|
||||
class ClockifyPermissionsCheckFailed(Exception):
|
||||
"""Timer start failed due to user permissions check.
|
||||
Message should be self explanatory as traceback won't be shown.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ClockifySync(LauncherAction):
|
||||
name = "sync_to_clockify"
|
||||
label = "Sync to Clockify"
|
||||
icon = "app_icons/clockify-white.png"
|
||||
order = 500
|
||||
clockify_api = ClockifyAPI()
|
||||
|
||||
def is_compatible(self, selection):
|
||||
"""Check if there's some projects to sync"""
|
||||
if selection.is_project_selected:
|
||||
return True
|
||||
|
||||
try:
|
||||
next(ayon_api.get_projects())
|
||||
return True
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
def process(self, selection, **kwargs):
|
||||
self.clockify_api.set_api()
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
user_id = self.clockify_api.user_id
|
||||
if not self.clockify_api.validate_workspace_permissions(
|
||||
workspace_id, user_id
|
||||
):
|
||||
raise ClockifyPermissionsCheckFailed(
|
||||
"Current CLockify user is missing permissions for this action!"
|
||||
)
|
||||
|
||||
if selection.is_project_selected:
|
||||
projects_to_sync = [selection.project_entity]
|
||||
else:
|
||||
projects_to_sync = ayon_api.get_projects()
|
||||
|
||||
projects_info = {
|
||||
project["name"]: {
|
||||
task_type["name"]
|
||||
for task_type in project["taskTypes"]
|
||||
}
|
||||
for project in projects_to_sync
|
||||
}
|
||||
|
||||
clockify_projects = self.clockify_api.get_projects(workspace_id)
|
||||
for project_name, task_types in projects_info.items():
|
||||
if project_name in clockify_projects:
|
||||
continue
|
||||
|
||||
response = self.clockify_api.add_project(
|
||||
project_name, workspace_id
|
||||
)
|
||||
if "id" not in response:
|
||||
self.log.error(
|
||||
"Project {} can't be created".format(project_name)
|
||||
)
|
||||
continue
|
||||
|
||||
clockify_workspace_tags = self.clockify_api.get_tags(workspace_id)
|
||||
for task_type in task_types:
|
||||
if task_type not in clockify_workspace_tags:
|
||||
response = self.clockify_api.add_tag(
|
||||
task_type, workspace_id
|
||||
)
|
||||
if "id" not in response:
|
||||
self.log.error(
|
||||
"Task {} can't be created".format(task_type)
|
||||
)
|
||||
continue
|
||||
3
server_addon/clockify/client/ayon_clockify/version.py
Normal file
3
server_addon/clockify/client/ayon_clockify/version.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'clockify' version."""
|
||||
__version__ = "0.2.0"
|
||||
207
server_addon/clockify/client/ayon_clockify/widgets.py
Normal file
207
server_addon/clockify/client/ayon_clockify/widgets.py
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
from qtpy import QtCore, QtGui, QtWidgets
|
||||
from ayon_core import resources, style
|
||||
|
||||
|
||||
class MessageWidget(QtWidgets.QWidget):
|
||||
|
||||
SIZE_W = 300
|
||||
SIZE_H = 130
|
||||
|
||||
closed = QtCore.Signal()
|
||||
|
||||
def __init__(self, messages, title):
|
||||
super(MessageWidget, self).__init__()
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint |
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
# Size setting
|
||||
self.resize(self.SIZE_W, self.SIZE_H)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
|
||||
|
||||
# Style
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self.setLayout(self._ui_layout(messages))
|
||||
self.setWindowTitle(title)
|
||||
|
||||
def _ui_layout(self, messages):
|
||||
if not messages:
|
||||
messages = ["*Missing messages (This is a bug)*", ]
|
||||
|
||||
elif not isinstance(messages, (tuple, list)):
|
||||
messages = [messages, ]
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
|
||||
labels = []
|
||||
for message in messages:
|
||||
label = QtWidgets.QLabel(message)
|
||||
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
|
||||
label.setTextFormat(QtCore.Qt.RichText)
|
||||
label.setWordWrap(True)
|
||||
|
||||
labels.append(label)
|
||||
main_layout.addWidget(label)
|
||||
|
||||
btn_close = QtWidgets.QPushButton("Close")
|
||||
btn_close.setToolTip('Close this window')
|
||||
btn_close.clicked.connect(self.on_close_clicked)
|
||||
|
||||
btn_group = QtWidgets.QHBoxLayout()
|
||||
btn_group.addStretch(1)
|
||||
btn_group.addWidget(btn_close)
|
||||
|
||||
main_layout.addLayout(btn_group)
|
||||
|
||||
self.labels = labels
|
||||
self.btn_group = btn_group
|
||||
self.btn_close = btn_close
|
||||
self.main_layout = main_layout
|
||||
|
||||
return main_layout
|
||||
|
||||
def on_close_clicked(self):
|
||||
self.close()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
self.closed.emit()
|
||||
super(MessageWidget, self).close(*args, **kwargs)
|
||||
|
||||
|
||||
class ClockifySettings(QtWidgets.QWidget):
|
||||
SIZE_W = 500
|
||||
SIZE_H = 130
|
||||
|
||||
loginSignal = QtCore.Signal(object, object, object)
|
||||
|
||||
def __init__(self, clockify_api, optional=True):
|
||||
super(ClockifySettings, self).__init__()
|
||||
|
||||
self.clockify_api = clockify_api
|
||||
self.optional = optional
|
||||
self.validated = False
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowTitle("Clockify settings")
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint |
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
# Size setting
|
||||
self.resize(self.SIZE_W, self.SIZE_H)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self._ui_init()
|
||||
|
||||
def _ui_init(self):
|
||||
label_api_key = QtWidgets.QLabel("Clockify API key:")
|
||||
|
||||
input_api_key = QtWidgets.QLineEdit()
|
||||
input_api_key.setFrame(True)
|
||||
input_api_key.setPlaceholderText("e.g. XX1XxXX2x3x4xXxx")
|
||||
|
||||
error_label = QtWidgets.QLabel("")
|
||||
error_label.setTextFormat(QtCore.Qt.RichText)
|
||||
error_label.setWordWrap(True)
|
||||
error_label.hide()
|
||||
|
||||
form_layout = QtWidgets.QFormLayout()
|
||||
form_layout.setContentsMargins(10, 15, 10, 5)
|
||||
form_layout.addRow(label_api_key, input_api_key)
|
||||
form_layout.addRow(error_label)
|
||||
|
||||
btn_ok = QtWidgets.QPushButton("Ok")
|
||||
btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
|
||||
|
||||
btn_cancel = QtWidgets.QPushButton("Cancel")
|
||||
cancel_tooltip = 'Application won\'t start'
|
||||
if self.optional:
|
||||
cancel_tooltip = 'Close this window'
|
||||
btn_cancel.setToolTip(cancel_tooltip)
|
||||
|
||||
btn_group = QtWidgets.QHBoxLayout()
|
||||
btn_group.addStretch(1)
|
||||
btn_group.addWidget(btn_ok)
|
||||
btn_group.addWidget(btn_cancel)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addLayout(form_layout)
|
||||
main_layout.addLayout(btn_group)
|
||||
|
||||
btn_ok.clicked.connect(self.click_ok)
|
||||
btn_cancel.clicked.connect(self._close_widget)
|
||||
|
||||
self.label_api_key = label_api_key
|
||||
self.input_api_key = input_api_key
|
||||
self.error_label = error_label
|
||||
|
||||
self.btn_ok = btn_ok
|
||||
self.btn_cancel = btn_cancel
|
||||
|
||||
def setError(self, msg):
|
||||
self.error_label.setText(msg)
|
||||
self.error_label.show()
|
||||
|
||||
def invalid_input(self, entity):
|
||||
entity.setStyleSheet("border: 1px solid red;")
|
||||
|
||||
def click_ok(self):
|
||||
api_key = self.input_api_key.text().strip()
|
||||
if self.optional is True and api_key == '':
|
||||
self.clockify_api.save_api_key(None)
|
||||
self.clockify_api.set_api(api_key)
|
||||
self.validated = False
|
||||
self._close_widget()
|
||||
return
|
||||
|
||||
validation = self.clockify_api.validate_api_key(api_key)
|
||||
|
||||
if validation:
|
||||
self.clockify_api.save_api_key(api_key)
|
||||
self.clockify_api.set_api(api_key)
|
||||
self.validated = True
|
||||
self._close_widget()
|
||||
else:
|
||||
self.invalid_input(self.input_api_key)
|
||||
self.validated = False
|
||||
self.setError(
|
||||
"Entered invalid API key"
|
||||
)
|
||||
|
||||
def showEvent(self, event):
|
||||
super(ClockifySettings, self).showEvent(event)
|
||||
|
||||
# Make btns same width
|
||||
max_width = max(
|
||||
self.btn_ok.sizeHint().width(),
|
||||
self.btn_cancel.sizeHint().width()
|
||||
)
|
||||
self.btn_ok.setMinimumWidth(max_width)
|
||||
self.btn_cancel.setMinimumWidth(max_width)
|
||||
|
||||
def closeEvent(self, event):
|
||||
if self.optional is True:
|
||||
event.ignore()
|
||||
self._close_widget()
|
||||
else:
|
||||
self.validated = False
|
||||
|
||||
def _close_widget(self):
|
||||
if self.optional is True:
|
||||
self.hide()
|
||||
else:
|
||||
self.close()
|
||||
|
|
@ -1,3 +1,9 @@
|
|||
name = "clockify"
|
||||
title = "Clockify"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
client_dir = "ayon_clockify"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ plugin_for = ["ayon_server"]
|
|||
"""
|
||||
|
||||
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON core addon version."""
|
||||
"""Package declaring AYON addon '{}' version."""
|
||||
__version__ = "{}"
|
||||
'''
|
||||
|
||||
|
|
@ -183,6 +183,7 @@ def create_addon_zip(
|
|||
|
||||
|
||||
def prepare_client_code(
|
||||
addon_name: str,
|
||||
addon_dir: Path,
|
||||
addon_output_dir: Path,
|
||||
addon_version: str
|
||||
|
|
@ -211,7 +212,9 @@ def prepare_client_code(
|
|||
version_path = subpath / "version.py"
|
||||
if version_path.exists():
|
||||
with open(version_path, "w") as stream:
|
||||
stream.write(CLIENT_VERSION_CONTENT.format(addon_version))
|
||||
stream.write(
|
||||
CLIENT_VERSION_CONTENT.format(addon_name, addon_version)
|
||||
)
|
||||
|
||||
zip_filepath = private_dir / "client.zip"
|
||||
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
|
|
@ -262,7 +265,9 @@ def create_addon_package(
|
|||
server_dir, addon_output_dir / "server", dirs_exist_ok=True
|
||||
)
|
||||
|
||||
prepare_client_code(addon_dir, addon_output_dir, addon_version)
|
||||
prepare_client_code(
|
||||
package.name, addon_dir, addon_output_dir, addon_version
|
||||
)
|
||||
|
||||
if create_zip:
|
||||
create_addon_zip(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "deadline"
|
||||
title = "Deadline"
|
||||
version = "0.1.11"
|
||||
version = "0.1.12"
|
||||
|
|
|
|||
|
|
@ -2,11 +2,13 @@ from typing import Type
|
|||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings
|
||||
|
||||
|
||||
class Deadline(BaseServerAddon):
|
||||
settings_model: Type[DeadlineSettings] = DeadlineSettings
|
||||
site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings
|
||||
|
||||
|
||||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@ from .main import (
|
|||
DeadlineSettings,
|
||||
DEFAULT_VALUES,
|
||||
)
|
||||
from .site_settings import DeadlineSiteSettings
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineSettings",
|
||||
"DeadlineSiteSettings",
|
||||
"DEFAULT_VALUES",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,12 +15,6 @@ from .publish_plugins import (
|
|||
)
|
||||
|
||||
|
||||
class ServerListSubmodel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Value")
|
||||
|
||||
|
||||
async def defined_deadline_ws_name_enum_resolver(
|
||||
addon: "BaseServerAddon",
|
||||
settings_variant: str = "production",
|
||||
|
|
@ -32,25 +26,39 @@ async def defined_deadline_ws_name_enum_resolver(
|
|||
|
||||
settings = await addon.get_studio_settings(variant=settings_variant)
|
||||
|
||||
ws_urls = []
|
||||
ws_server_name = []
|
||||
for deadline_url_item in settings.deadline_urls:
|
||||
ws_urls.append(deadline_url_item.name)
|
||||
ws_server_name.append(deadline_url_item.name)
|
||||
|
||||
return ws_urls
|
||||
return ws_server_name
|
||||
|
||||
class ServerItemSubmodel(BaseSettingsModel):
|
||||
"""Connection info about configured DL servers."""
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Url")
|
||||
require_authentication: bool = SettingsField(
|
||||
False, title="Require authentication")
|
||||
not_verify_ssl: bool = SettingsField(
|
||||
False, title="Don't verify SSL")
|
||||
|
||||
|
||||
class DeadlineSettings(BaseSettingsModel):
|
||||
deadline_urls: list[ServerListSubmodel] = SettingsField(
|
||||
# configured DL servers
|
||||
deadline_urls: list[ServerItemSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="System Deadline Webservice URLs",
|
||||
title="System Deadline Webservice Info",
|
||||
scope=["studio"],
|
||||
)
|
||||
|
||||
# name(key) of selected server for project
|
||||
deadline_server: str = SettingsField(
|
||||
title="Project deadline server",
|
||||
title="Project Deadline server name",
|
||||
section="---",
|
||||
scope=["project"],
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins",
|
||||
|
|
@ -62,11 +70,14 @@ class DeadlineSettings(BaseSettingsModel):
|
|||
return value
|
||||
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"deadline_urls": [
|
||||
{
|
||||
"name": "default",
|
||||
"value": "http://127.0.0.1:8082"
|
||||
"value": "http://127.0.0.1:8082",
|
||||
"require_authentication": False,
|
||||
"not_verify_ssl": False
|
||||
}
|
||||
],
|
||||
"deadline_server": "default",
|
||||
|
|
|
|||
26
server_addon/deadline/server/settings/site_settings.py
Normal file
26
server_addon/deadline/server/settings/site_settings.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
)
|
||||
from .main import defined_deadline_ws_name_enum_resolver
|
||||
|
||||
|
||||
class CredentialPerServerModel(BaseSettingsModel):
|
||||
"""Provide credentials for configured DL servers"""
|
||||
_layout = "expanded"
|
||||
server_name: str = SettingsField("",
|
||||
title="DL server name",
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver)
|
||||
username: str = SettingsField("",
|
||||
title="Username")
|
||||
password: str = SettingsField("",
|
||||
title="Password")
|
||||
|
||||
|
||||
class DeadlineSiteSettings(BaseSettingsModel):
|
||||
local_settings: list[CredentialPerServerModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Local setting",
|
||||
description="Please provide credentials for configured Deadline servers",
|
||||
)
|
||||
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
name = "harmony"
|
||||
title = "Harmony"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
|
|
|
|||
|
|
@ -45,11 +45,6 @@ DEFAULT_HARMONY_SETTING = {
|
|||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateSceneSettings": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
|
|
|
|||
|
|
@ -18,14 +18,6 @@ class ValidateAudioPlugin(BaseSettingsModel):
|
|||
active: bool = SettingsField(True, title="Active")
|
||||
|
||||
|
||||
class ValidateContainersPlugin(BaseSettingsModel):
|
||||
"""Check if loaded container is scene are latest versions."""
|
||||
_isGroup = True
|
||||
enabled: bool = True
|
||||
optional: bool = SettingsField(False, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
|
||||
|
||||
class ValidateSceneSettingsPlugin(BaseSettingsModel):
|
||||
"""Validate if FrameStart, FrameEnd and Resolution match shot data in DB.
|
||||
Use regular expressions to limit validations only on particular asset
|
||||
|
|
@ -63,11 +55,6 @@ class HarmonyPublishPlugins(BaseSettingsModel):
|
|||
default_factory=ValidateAudioPlugin,
|
||||
)
|
||||
|
||||
ValidateContainers: ValidateContainersPlugin = SettingsField(
|
||||
title="Validate Containers",
|
||||
default_factory=ValidateContainersPlugin,
|
||||
)
|
||||
|
||||
ValidateSceneSettings: ValidateSceneSettingsPlugin = SettingsField(
|
||||
title="Validate Scene Settings",
|
||||
default_factory=ValidateSceneSettingsPlugin,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "houdini"
|
||||
title = "Houdini"
|
||||
version = "0.2.13"
|
||||
version = "0.2.15"
|
||||
|
|
|
|||
|
|
@ -57,6 +57,9 @@ class CreatePluginsModel(BaseSettingsModel):
|
|||
CreateMantraROP: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Mantra ROP")
|
||||
CreateModel: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Model")
|
||||
CreatePointCache: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create PointCache (Abc)")
|
||||
|
|
@ -124,6 +127,10 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
|
|||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateModel": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreatePointCache": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField
|
||||
)
|
||||
|
||||
|
||||
# Publish Plugins
|
||||
|
|
@ -20,6 +23,27 @@ class CollectChunkSizeModel(BaseSettingsModel):
|
|||
title="Frames Per Task")
|
||||
|
||||
|
||||
class AOVFilterSubmodel(BaseSettingsModel):
|
||||
"""You should use the same host name you are using for Houdini."""
|
||||
host_name: str = SettingsField("", title="Houdini Host name")
|
||||
value: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="AOV regex"
|
||||
)
|
||||
|
||||
class CollectLocalRenderInstancesModel(BaseSettingsModel):
|
||||
|
||||
use_deadline_aov_filter: bool = SettingsField(
|
||||
False,
|
||||
title="Use Deadline AOV Filter"
|
||||
)
|
||||
|
||||
aov_filter: AOVFilterSubmodel = SettingsField(
|
||||
default_factory=AOVFilterSubmodel,
|
||||
title="Reviewable products filter"
|
||||
)
|
||||
|
||||
|
||||
class ValidateWorkfilePathsModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
|
|
@ -49,10 +73,10 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
default_factory=CollectChunkSizeModel,
|
||||
title="Collect Chunk Size."
|
||||
)
|
||||
ValidateContainers: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Latest Containers.",
|
||||
section="Validators")
|
||||
CollectLocalRenderInstances: CollectLocalRenderInstancesModel = SettingsField(
|
||||
default_factory=CollectLocalRenderInstancesModel,
|
||||
title="Collect Local Render Instances."
|
||||
)
|
||||
ValidateInstanceInContextHoudini: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Instance is in same Context.")
|
||||
|
|
@ -82,10 +106,14 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
|
|||
"optional": True,
|
||||
"chunk_size": 999999
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
"CollectLocalRenderInstances": {
|
||||
"use_deadline_aov_filter": False,
|
||||
"aov_filter" : {
|
||||
"host_name": "houdini",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"ValidateInstanceInContextHoudini": {
|
||||
"enabled": True,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "maya"
|
||||
title = "Maya"
|
||||
version = "0.1.17"
|
||||
version = "0.1.20"
|
||||
|
|
|
|||
|
|
@ -46,7 +46,6 @@ def extract_alembic_overrides_enum():
|
|||
return [
|
||||
{"label": "Custom Attributes", "value": "attr"},
|
||||
{"label": "Custom Attributes Prefix", "value": "attrPrefix"},
|
||||
{"label": "Auto Subd", "value": "autoSubd"},
|
||||
{"label": "Data Format", "value": "dataFormat"},
|
||||
{"label": "Euler Filter", "value": "eulerFilter"},
|
||||
{"label": "Mel Per Frame Callback", "value": "melPerFrameCallback"},
|
||||
|
|
@ -347,17 +346,6 @@ class ExtractAlembicModel(BaseSettingsModel):
|
|||
families: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Families")
|
||||
autoSubd: bool = SettingsField(
|
||||
title="Auto Subd",
|
||||
description=(
|
||||
"If this flag is present and the mesh has crease edges, crease "
|
||||
"vertices or holes, the mesh (OPolyMesh) would now be written out "
|
||||
"as an OSubD and crease info will be stored in the Alembic file. "
|
||||
"Otherwise, creases info won't be preserved in Alembic file unless"
|
||||
" a custom Boolean attribute SubDivisionMesh has been added to "
|
||||
"mesh node and its value is true."
|
||||
)
|
||||
)
|
||||
eulerFilter: bool = SettingsField(
|
||||
title="Euler Filter",
|
||||
description="Apply Euler filter while sampling rotations."
|
||||
|
|
@ -409,6 +397,10 @@ class ExtractAlembicModel(BaseSettingsModel):
|
|||
title="Write Color Sets",
|
||||
description="Write vertex colors with the geometry."
|
||||
)
|
||||
writeCreases: bool = SettingsField(
|
||||
title="Write Creases",
|
||||
description="Write the geometry's edge and vertex crease information."
|
||||
)
|
||||
writeFaceSets: bool = SettingsField(
|
||||
title="Write Face Sets",
|
||||
description="Write face sets with the geometry."
|
||||
|
|
@ -642,10 +634,6 @@ class PublishersModel(BaseSettingsModel):
|
|||
title="Validate Instance In Context",
|
||||
section="Validators"
|
||||
)
|
||||
ValidateContainers: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Containers"
|
||||
)
|
||||
ValidateFrameRange: ValidateFrameRangeModel = SettingsField(
|
||||
default_factory=ValidateFrameRangeModel,
|
||||
title="Validate Frame Range"
|
||||
|
|
@ -925,10 +913,6 @@ class PublishersModel(BaseSettingsModel):
|
|||
default_factory=BasicValidateModel,
|
||||
title="Validate Rig Controllers",
|
||||
)
|
||||
ValidateAnimatedReferenceRig: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Animated Reference Rig",
|
||||
)
|
||||
ValidateAnimationContent: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Animation Content",
|
||||
|
|
@ -1071,11 +1055,6 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateFrameRange": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
|
|
@ -1455,11 +1434,6 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateAnimatedReferenceRig": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True
|
||||
},
|
||||
"ValidateAnimationContent": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
@ -1617,7 +1591,6 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
],
|
||||
"attr": "",
|
||||
"attrPrefix": "",
|
||||
"autoSubd": False,
|
||||
"bake_attributes": [],
|
||||
"bake_attribute_prefixes": [],
|
||||
"dataFormat": "ogawa",
|
||||
|
|
@ -1641,7 +1614,7 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"renderableOnly": False,
|
||||
"stripNamespaces": True,
|
||||
"uvsOnly": False,
|
||||
"uvWrite": False,
|
||||
"uvWrite": True,
|
||||
"userAttr": "",
|
||||
"userAttrPrefix": "",
|
||||
"verbose": False,
|
||||
|
|
@ -1649,6 +1622,7 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"wholeFrameGeo": False,
|
||||
"worldSpace": True,
|
||||
"writeColorSets": False,
|
||||
"writeCreases": False,
|
||||
"writeFaceSets": False,
|
||||
"writeNormals": True,
|
||||
"writeUVSets": False,
|
||||
|
|
|
|||
10
server_addon/nuke/client/ayon_nuke/__init__.py
Normal file
10
server_addon/nuke/client/ayon_nuke/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from .addon import (
|
||||
NUKE_ROOT_DIR,
|
||||
NukeAddon,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"NUKE_ROOT_DIR",
|
||||
"NukeAddon",
|
||||
)
|
||||
71
server_addon/nuke/client/ayon_nuke/addon.py
Normal file
71
server_addon/nuke/client/ayon_nuke/addon.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import os
|
||||
import platform
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class NukeAddon(AYONAddon, IHostAddon):
|
||||
name = "nuke"
|
||||
host_name = "nuke"
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Add requirements to NUKE_PATH
|
||||
new_nuke_paths = [
|
||||
os.path.join(NUKE_ROOT_DIR, "startup")
|
||||
]
|
||||
old_nuke_path = env.get("NUKE_PATH") or ""
|
||||
for path in old_nuke_path.split(os.pathsep):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
if norm_path not in new_nuke_paths:
|
||||
new_nuke_paths.append(norm_path)
|
||||
|
||||
env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths)
|
||||
# Remove auto screen scale factor for Qt
|
||||
# - let Nuke decide it's value
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
# Remove tkinter library paths if are set
|
||||
env.pop("TK_LIBRARY", None)
|
||||
env.pop("TCL_LIBRARY", None)
|
||||
|
||||
# Add vendor to PYTHONPATH
|
||||
python_path = env["PYTHONPATH"]
|
||||
python_path_parts = []
|
||||
if python_path:
|
||||
python_path_parts = python_path.split(os.pathsep)
|
||||
vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor")
|
||||
python_path_parts.insert(0, vendor_path)
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_path_parts)
|
||||
|
||||
# Set default values if are not already set via settings
|
||||
defaults = {
|
||||
"LOGLEVEL": "DEBUG"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
# Try to add QuickTime to PATH
|
||||
quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem"
|
||||
if platform.system() == "windows" and os.path.exists(quick_time_path):
|
||||
path_value = env.get("PATH") or ""
|
||||
path_paths = [
|
||||
path
|
||||
for path in path_value.split(os.pathsep)
|
||||
if path
|
||||
]
|
||||
path_paths.append(quick_time_path)
|
||||
env["PATH"] = os.pathsep.join(path_paths)
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(NUKE_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".nk"]
|
||||
104
server_addon/nuke/client/ayon_nuke/api/__init__.py
Normal file
104
server_addon/nuke/client/ayon_nuke/api/__init__.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
from .workio import (
|
||||
file_extensions,
|
||||
has_unsaved_changes,
|
||||
save_file,
|
||||
open_file,
|
||||
current_file,
|
||||
work_root,
|
||||
)
|
||||
from .command import (
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
from .plugin import (
|
||||
NukeCreator,
|
||||
NukeWriteCreator,
|
||||
NukeCreatorError,
|
||||
get_instance_group_node_childs,
|
||||
get_colorspace_from_node
|
||||
)
|
||||
from .pipeline import (
|
||||
NukeHost,
|
||||
|
||||
ls,
|
||||
|
||||
list_instances,
|
||||
remove_instance,
|
||||
select_instance,
|
||||
|
||||
containerise,
|
||||
parse_container,
|
||||
update_container,
|
||||
|
||||
)
|
||||
from .lib import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
ROOT_DATA_KNOB,
|
||||
maintained_selection,
|
||||
reset_selection,
|
||||
select_nodes,
|
||||
get_view_process_node,
|
||||
duplicate_node,
|
||||
convert_knob_value_to_correct_type,
|
||||
get_node_data,
|
||||
set_node_data,
|
||||
update_node_data,
|
||||
create_write_node,
|
||||
link_knobs
|
||||
)
|
||||
from .utils import (
|
||||
colorspace_exists_on_node,
|
||||
get_colorspace_list
|
||||
)
|
||||
|
||||
from .actions import (
|
||||
SelectInvalidAction,
|
||||
SelectInstanceNodeAction
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"file_extensions",
|
||||
"has_unsaved_changes",
|
||||
"save_file",
|
||||
"open_file",
|
||||
"current_file",
|
||||
"work_root",
|
||||
|
||||
"viewer_update_and_undo_stop",
|
||||
|
||||
"NukeCreator",
|
||||
"NukeWriteCreator",
|
||||
"NukeCreatorError",
|
||||
"NukeHost",
|
||||
"get_instance_group_node_childs",
|
||||
"get_colorspace_from_node",
|
||||
|
||||
"ls",
|
||||
|
||||
"list_instances",
|
||||
"remove_instance",
|
||||
"select_instance",
|
||||
|
||||
"containerise",
|
||||
"parse_container",
|
||||
"update_container",
|
||||
|
||||
"INSTANCE_DATA_KNOB",
|
||||
"ROOT_DATA_KNOB",
|
||||
"maintained_selection",
|
||||
"reset_selection",
|
||||
"select_nodes",
|
||||
"get_view_process_node",
|
||||
"duplicate_node",
|
||||
"convert_knob_value_to_correct_type",
|
||||
"get_node_data",
|
||||
"set_node_data",
|
||||
"update_node_data",
|
||||
"create_write_node",
|
||||
"link_knobs",
|
||||
|
||||
"colorspace_exists_on_node",
|
||||
"get_colorspace_list",
|
||||
|
||||
"SelectInvalidAction",
|
||||
"SelectInstanceNodeAction"
|
||||
)
|
||||
77
server_addon/nuke/client/ayon_nuke/api/actions.py
Normal file
77
server_addon/nuke/client/ayon_nuke/api/actions.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
from .lib import (
|
||||
reset_selection,
|
||||
select_nodes
|
||||
)
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Nuke when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid nodes"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context,
|
||||
plugin=plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = set()
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.update(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: {}".format(invalid))
|
||||
reset_selection()
|
||||
select_nodes(invalid)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
|
||||
|
||||
class SelectInstanceNodeAction(pyblish.api.Action):
|
||||
"""Select instance node for failed plugin."""
|
||||
label = "Select instance node"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "mdi.cursor-default-click"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances for the plug-in
|
||||
errored_instances = get_errored_instances_from_context(
|
||||
context, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding instance nodes..")
|
||||
nodes = set()
|
||||
for instance in errored_instances:
|
||||
instance_node = instance.data.get("transientData", {}).get("node")
|
||||
if not instance_node:
|
||||
raise RuntimeError(
|
||||
"No transientData['node'] found on instance: {}".format(
|
||||
instance
|
||||
)
|
||||
)
|
||||
nodes.add(instance_node)
|
||||
|
||||
if nodes:
|
||||
self.log.info("Selecting instance nodes: {}".format(nodes))
|
||||
reset_selection()
|
||||
select_nodes(nodes)
|
||||
else:
|
||||
self.log.info("No instance nodes found.")
|
||||
21
server_addon/nuke/client/ayon_nuke/api/command.py
Normal file
21
server_addon/nuke/client/ayon_nuke/api/command.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import logging
|
||||
import contextlib
|
||||
import nuke
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def viewer_update_and_undo_stop():
|
||||
"""Lock viewer from updating and stop recording undo steps"""
|
||||
try:
|
||||
# stop active viewer to update any change
|
||||
viewer = nuke.activeViewer()
|
||||
if viewer:
|
||||
viewer.stop()
|
||||
else:
|
||||
log.warning("No available active Viewer")
|
||||
nuke.Undo.disable()
|
||||
yield
|
||||
finally:
|
||||
nuke.Undo.enable()
|
||||
4
server_addon/nuke/client/ayon_nuke/api/constants.py
Normal file
4
server_addon/nuke/client/ayon_nuke/api/constants.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
import os
|
||||
|
||||
|
||||
ASSIST = bool(os.getenv("NUKEASSIST"))
|
||||
92
server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py
Normal file
92
server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
import os
|
||||
import re
|
||||
import nuke
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class GizmoMenu():
|
||||
def __init__(self, title, icon=None):
|
||||
|
||||
self.toolbar = self._create_toolbar_menu(
|
||||
title,
|
||||
icon=icon
|
||||
)
|
||||
|
||||
self._script_actions = []
|
||||
|
||||
def _create_toolbar_menu(self, name, icon=None):
|
||||
nuke_node_menu = nuke.menu("Nodes")
|
||||
return nuke_node_menu.addMenu(
|
||||
name,
|
||||
icon=icon
|
||||
)
|
||||
|
||||
def _make_menu_path(self, path, icon=None):
|
||||
parent = self.toolbar
|
||||
for folder in re.split(r"/|\\", path):
|
||||
if not folder:
|
||||
continue
|
||||
existing_menu = parent.findItem(folder)
|
||||
if existing_menu:
|
||||
parent = existing_menu
|
||||
else:
|
||||
parent = parent.addMenu(folder, icon=icon)
|
||||
|
||||
return parent
|
||||
|
||||
def build_from_configuration(self, configuration):
|
||||
for menu in configuration:
|
||||
# Construct parent path else parent is toolbar
|
||||
parent = self.toolbar
|
||||
gizmo_toolbar_path = menu.get("gizmo_toolbar_path")
|
||||
if gizmo_toolbar_path:
|
||||
parent = self._make_menu_path(gizmo_toolbar_path)
|
||||
|
||||
for item in menu["sub_gizmo_list"]:
|
||||
assert isinstance(item, dict), "Configuration is wrong!"
|
||||
|
||||
if not item.get("title"):
|
||||
continue
|
||||
|
||||
item_type = item.get("sourcetype")
|
||||
|
||||
if item_type == "python":
|
||||
parent.addCommand(
|
||||
item["title"],
|
||||
command=str(item["command"]),
|
||||
icon=item.get("icon"),
|
||||
shortcut=item.get("shortcut")
|
||||
)
|
||||
elif item_type == "file":
|
||||
parent.addCommand(
|
||||
item['title'],
|
||||
"nuke.createNode('{}')".format(item.get('file_name')),
|
||||
shortcut=item.get('shortcut')
|
||||
)
|
||||
|
||||
# add separator
|
||||
# Special behavior for separators
|
||||
elif item_type == "separator":
|
||||
parent.addSeparator()
|
||||
|
||||
# add submenu
|
||||
# items should hold a collection of submenu items (dict)
|
||||
elif item_type == "menu":
|
||||
# assert "items" in item, "Menu is missing 'items' key"
|
||||
parent.addMenu(
|
||||
item['title'],
|
||||
icon=item.get('icon')
|
||||
)
|
||||
|
||||
def add_gizmo_path(self, gizmo_paths):
|
||||
for gizmo_path in gizmo_paths:
|
||||
if os.path.isdir(gizmo_path):
|
||||
for folder in os.listdir(gizmo_path):
|
||||
if os.path.isdir(os.path.join(gizmo_path, folder)):
|
||||
nuke.pluginAddPath(os.path.join(gizmo_path, folder))
|
||||
nuke.pluginAddPath(gizmo_path)
|
||||
else:
|
||||
log.warning("This path doesn't exist: {}".format(gizmo_path))
|
||||
2990
server_addon/nuke/client/ayon_nuke/api/lib.py
Normal file
2990
server_addon/nuke/client/ayon_nuke/api/lib.py
Normal file
File diff suppressed because it is too large
Load diff
640
server_addon/nuke/client/ayon_nuke/api/pipeline.py
Normal file
640
server_addon/nuke/client/ayon_nuke/api/pipeline.py
Normal file
|
|
@ -0,0 +1,640 @@
|
|||
import nuke
|
||||
|
||||
import os
|
||||
import importlib
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.host import (
|
||||
HostBase,
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost
|
||||
)
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_core.lib import register_event_callback, Logger
|
||||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_workfile_build_plugin_path,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AVALON_CONTAINER_ID,
|
||||
get_current_folder_path,
|
||||
get_current_task_name,
|
||||
registered_host,
|
||||
)
|
||||
from ayon_core.pipeline.workfile import BuildWorkfile
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_nuke import NUKE_ROOT_DIR
|
||||
from ayon_core.tools.workfile_template_build import open_template_ui
|
||||
|
||||
from .lib import (
|
||||
Context,
|
||||
ROOT_DATA_KNOB,
|
||||
INSTANCE_DATA_KNOB,
|
||||
get_main_window,
|
||||
WorkfileSettings,
|
||||
# TODO: remove this once workfile builder will be removed
|
||||
process_workfile_builder,
|
||||
start_workfile_template_builder,
|
||||
launch_workfiles_app,
|
||||
check_inventory_versions,
|
||||
set_avalon_knob_data,
|
||||
read_avalon_data,
|
||||
on_script_load,
|
||||
dirmap_file_name_filter,
|
||||
add_scripts_menu,
|
||||
add_scripts_gizmo,
|
||||
get_node_data,
|
||||
set_node_data,
|
||||
MENU_LABEL,
|
||||
)
|
||||
from .workfile_template_builder import (
|
||||
build_workfile_template,
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
NukeTemplateBuilder,
|
||||
)
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
file_extensions,
|
||||
has_unsaved_changes,
|
||||
work_root,
|
||||
current_file
|
||||
)
|
||||
from .constants import ASSIST
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
PLUGINS_DIR = os.path.join(NUKE_ROOT_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
# registering pyblish gui regarding settings in presets
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
class NukeHost(
|
||||
HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
):
|
||||
name = "nuke"
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
return open_file(filepath)
|
||||
|
||||
def save_workfile(self, filepath=None):
|
||||
return save_file(filepath)
|
||||
|
||||
def work_root(self, session):
|
||||
return work_root(session)
|
||||
|
||||
def get_current_workfile(self):
|
||||
return current_file()
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return has_unsaved_changes()
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return file_extensions()
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def install(self):
|
||||
"""Installing all requirements for Nuke host"""
|
||||
|
||||
pyblish.api.register_host("nuke")
|
||||
|
||||
self.log.info("Registering Nuke plug-ins..")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
# Register AYON event for workfiles loading.
|
||||
register_event_callback("workio.open_file", check_inventory_versions)
|
||||
register_event_callback("taskChanged", change_context_label)
|
||||
|
||||
_install_menu()
|
||||
|
||||
# add script menu
|
||||
add_scripts_menu()
|
||||
add_scripts_gizmo()
|
||||
|
||||
add_nuke_callbacks()
|
||||
|
||||
launch_workfiles_app()
|
||||
|
||||
def get_context_data(self):
|
||||
root_node = nuke.root()
|
||||
return get_node_data(root_node, ROOT_DATA_KNOB)
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
root_node = nuke.root()
|
||||
set_node_data(root_node, ROOT_DATA_KNOB, data)
|
||||
|
||||
|
||||
def add_nuke_callbacks():
|
||||
""" Adding all available nuke callbacks
|
||||
"""
|
||||
nuke_settings = get_current_project_settings()["nuke"]
|
||||
workfile_settings = WorkfileSettings()
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(
|
||||
workfile_settings.set_context_settings, nodeClass="Root")
|
||||
|
||||
# adding favorites to file browser
|
||||
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
|
||||
# template builder callbacks
|
||||
nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
# TODO: remove this callback once workfile builder will be removed
|
||||
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
|
||||
|
||||
# fix ffmpeg settings on script
|
||||
nuke.addOnScriptLoad(on_script_load)
|
||||
|
||||
# set checker for last versions on loaded containers
|
||||
nuke.addOnScriptLoad(check_inventory_versions)
|
||||
nuke.addOnScriptSave(check_inventory_versions)
|
||||
|
||||
# set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
if nuke_settings["dirmap"]["enabled"]:
|
||||
log.info("Added Nuke's dir-mapping callback ...")
|
||||
# Add dirmap for file paths.
|
||||
nuke.addFilenameFilter(dirmap_file_name_filter)
|
||||
|
||||
log.info("Added Nuke callbacks ...")
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
CAUTION: This is primarily for development and debugging purposes.
|
||||
|
||||
"""
|
||||
|
||||
for module in (
|
||||
"ayon_nuke.api.actions",
|
||||
"ayon_nuke.api.menu",
|
||||
"ayon_nuke.api.plugin",
|
||||
"ayon_nuke.api.lib",
|
||||
):
|
||||
log.info("Reloading module: {}...".format(module))
|
||||
|
||||
module = importlib.import_module(module)
|
||||
|
||||
try:
|
||||
importlib.reload(module)
|
||||
except AttributeError as e:
|
||||
from importlib import reload
|
||||
log.warning("Cannot reload module: {}".format(e))
|
||||
reload(module)
|
||||
|
||||
|
||||
def _show_workfiles():
|
||||
# Make sure parent is not set
|
||||
# - this makes Workfiles tool as separated window which
|
||||
# avoid issues with reopening
|
||||
# - it is possible to explicitly change on top flag of the tool
|
||||
host_tools.show_workfiles(parent=None, on_top=False)
|
||||
|
||||
|
||||
def get_context_label():
|
||||
return "{0}, {1}".format(
|
||||
get_current_folder_path(),
|
||||
get_current_task_name()
|
||||
)
|
||||
|
||||
|
||||
def _install_menu():
|
||||
"""Install AYON menu into Nuke's main menu bar."""
|
||||
|
||||
# uninstall original AYON menu
|
||||
main_window = get_main_window()
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.addMenu(MENU_LABEL)
|
||||
|
||||
if not ASSIST:
|
||||
label = get_context_label()
|
||||
context_action_item = menu.addCommand("Context")
|
||||
context_action_item.setEnabled(False)
|
||||
|
||||
Context.context_action_item = context_action_item
|
||||
|
||||
context_action = context_action_item.action()
|
||||
context_action.setText(label)
|
||||
|
||||
# add separator after context label
|
||||
menu.addSeparator()
|
||||
|
||||
menu.addCommand(
|
||||
"Work Files...",
|
||||
_show_workfiles
|
||||
)
|
||||
|
||||
menu.addSeparator()
|
||||
if not ASSIST:
|
||||
# only add parent if nuke version is 14 or higher
|
||||
# known issue with no solution yet
|
||||
menu.addCommand(
|
||||
"Create...",
|
||||
lambda: host_tools.show_publisher(
|
||||
parent=main_window,
|
||||
tab="create"
|
||||
)
|
||||
)
|
||||
# only add parent if nuke version is 14 or higher
|
||||
# known issue with no solution yet
|
||||
menu.addCommand(
|
||||
"Publish...",
|
||||
lambda: host_tools.show_publisher(
|
||||
parent=main_window,
|
||||
tab="publish"
|
||||
)
|
||||
)
|
||||
|
||||
menu.addCommand(
|
||||
"Load...",
|
||||
lambda: host_tools.show_loader(
|
||||
parent=main_window,
|
||||
use_context=True
|
||||
)
|
||||
)
|
||||
menu.addCommand(
|
||||
"Manage...",
|
||||
lambda: host_tools.show_scene_inventory(parent=main_window)
|
||||
)
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Library...",
|
||||
lambda: host_tools.show_library_loader(
|
||||
parent=main_window
|
||||
)
|
||||
)
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Set Resolution",
|
||||
lambda: WorkfileSettings().reset_resolution()
|
||||
)
|
||||
menu.addCommand(
|
||||
"Set Frame Range",
|
||||
lambda: WorkfileSettings().reset_frame_range_handles()
|
||||
)
|
||||
menu.addCommand(
|
||||
"Set Colorspace",
|
||||
lambda: WorkfileSettings().set_colorspace()
|
||||
)
|
||||
menu.addCommand(
|
||||
"Apply All Settings",
|
||||
lambda: WorkfileSettings().set_context_settings()
|
||||
)
|
||||
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Build Workfile",
|
||||
lambda: BuildWorkfile().process()
|
||||
)
|
||||
|
||||
menu_template = menu.addMenu("Template Builder")
|
||||
menu_template.addCommand(
|
||||
"Build Workfile from template",
|
||||
lambda: build_workfile_template()
|
||||
)
|
||||
|
||||
if not ASSIST:
|
||||
menu_template.addSeparator()
|
||||
menu_template.addCommand(
|
||||
"Open template",
|
||||
lambda: open_template_ui(
|
||||
NukeTemplateBuilder(registered_host()), get_main_window()
|
||||
)
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Create Place Holder",
|
||||
lambda: create_placeholder()
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Update Place Holder",
|
||||
lambda: update_placeholder()
|
||||
)
|
||||
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Experimental tools...",
|
||||
lambda: host_tools.show_experimental_tools_dialog(parent=main_window)
|
||||
)
|
||||
menu.addSeparator()
|
||||
# add reload pipeline only in debug mode
|
||||
if bool(os.getenv("NUKE_DEBUG")):
|
||||
menu.addSeparator()
|
||||
menu.addCommand("Reload Pipeline", reload_config)
|
||||
|
||||
# adding shortcuts
|
||||
add_shortcuts_from_presets()
|
||||
|
||||
|
||||
def change_context_label():
|
||||
if ASSIST:
|
||||
return
|
||||
|
||||
context_action_item = Context.context_action_item
|
||||
if context_action_item is None:
|
||||
return
|
||||
context_action = context_action_item.action()
|
||||
|
||||
old_label = context_action.text()
|
||||
new_label = get_context_label()
|
||||
|
||||
context_action.setText(new_label)
|
||||
|
||||
log.info("Task label changed from `{}` to `{}`".format(
|
||||
old_label, new_label))
|
||||
|
||||
|
||||
def add_shortcuts_from_presets():
|
||||
menubar = nuke.menu("Nuke")
|
||||
nuke_presets = get_current_project_settings()["nuke"]["general"]
|
||||
|
||||
if nuke_presets.get("menu"):
|
||||
menu_label_mapping = {
|
||||
"create": "Create...",
|
||||
"manage": "Manage...",
|
||||
"load": "Load...",
|
||||
"build_workfile": "Build Workfile",
|
||||
"publish": "Publish..."
|
||||
}
|
||||
|
||||
for command_name, shortcut_str in nuke_presets.get("menu").items():
|
||||
log.info("menu_name `{}` | menu_label `{}`".format(
|
||||
command_name, MENU_LABEL
|
||||
))
|
||||
log.info("Adding Shortcut `{}` to `{}`".format(
|
||||
shortcut_str, command_name
|
||||
))
|
||||
try:
|
||||
menu = menubar.findItem(MENU_LABEL)
|
||||
item_label = menu_label_mapping[command_name]
|
||||
menuitem = menu.findItem(item_label)
|
||||
menuitem.setShortcut(shortcut_str)
|
||||
except (AttributeError, KeyError) as e:
|
||||
log.error(e)
|
||||
|
||||
|
||||
def containerise(node,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
loader=None,
|
||||
data=None):
|
||||
"""Bundle `node` into an assembly and imprint it with metadata
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
node (nuke.Node): Nuke's node object to imprint as container
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of node used to produce this container.
|
||||
|
||||
Returns:
|
||||
node (nuke.Node): containerised nuke's node object
|
||||
|
||||
"""
|
||||
data = OrderedDict(
|
||||
[
|
||||
("schema", "openpype:container-2.0"),
|
||||
("id", AVALON_CONTAINER_ID),
|
||||
("name", name),
|
||||
("namespace", namespace),
|
||||
("loader", str(loader)),
|
||||
("representation", context["representation"]["id"]),
|
||||
],
|
||||
|
||||
**data or dict()
|
||||
)
|
||||
|
||||
set_avalon_knob_data(node, data)
|
||||
|
||||
# set tab to first native
|
||||
node.setTab(0)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def parse_container(node):
|
||||
"""Returns containerised data of a node
|
||||
|
||||
Reads the imprinted data from `containerise`.
|
||||
|
||||
Arguments:
|
||||
node (nuke.Node): Nuke's node object to read imprinted data
|
||||
|
||||
Returns:
|
||||
dict: The container schema data for this container node.
|
||||
|
||||
"""
|
||||
data = read_avalon_data(node)
|
||||
|
||||
# If not all required data return the empty container
|
||||
required = ["schema", "id", "name",
|
||||
"namespace", "loader", "representation"]
|
||||
if not all(key in data for key in required):
|
||||
return
|
||||
|
||||
# Store the node's name
|
||||
data.update({
|
||||
"objectName": node.fullName(),
|
||||
"node": node,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def update_container(node, keys=None):
|
||||
"""Returns node with updateted containder data
|
||||
|
||||
Arguments:
|
||||
node (nuke.Node): The node in Nuke to imprint as container,
|
||||
keys (dict, optional): data which should be updated
|
||||
|
||||
Returns:
|
||||
node (nuke.Node): nuke node with updated container data
|
||||
|
||||
Raises:
|
||||
TypeError on given an invalid container node
|
||||
|
||||
"""
|
||||
keys = keys or dict()
|
||||
|
||||
container = parse_container(node)
|
||||
if not container:
|
||||
raise TypeError("Not a valid container node.")
|
||||
|
||||
container.update(keys)
|
||||
node = set_avalon_knob_data(node, container)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def ls():
|
||||
"""List available containers.
|
||||
|
||||
This function is used by the Container Manager in Nuke. You'll
|
||||
need to implement a for-loop that then *yields* one Container at
|
||||
a time.
|
||||
"""
|
||||
all_nodes = nuke.allNodes(recurseGroups=False)
|
||||
|
||||
nodes = [n for n in all_nodes]
|
||||
|
||||
for n in nodes:
|
||||
container = parse_container(n)
|
||||
if container:
|
||||
yield container
|
||||
|
||||
|
||||
def list_instances(creator_id=None):
|
||||
"""List all created instances to publish from current workfile.
|
||||
|
||||
For SubsetManager
|
||||
|
||||
Args:
|
||||
creator_id (Optional[str]): creator identifier
|
||||
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
instances_by_order = defaultdict(list)
|
||||
product_instances = []
|
||||
instance_ids = set()
|
||||
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
|
||||
if node.Class() in ["Viewer", "Dot"]:
|
||||
continue
|
||||
|
||||
try:
|
||||
if node["disable"].value():
|
||||
continue
|
||||
except NameError:
|
||||
# pass if disable knob doesn't exist
|
||||
pass
|
||||
|
||||
# get data from avalon knob
|
||||
instance_data = get_node_data(
|
||||
node, INSTANCE_DATA_KNOB)
|
||||
|
||||
if not instance_data:
|
||||
continue
|
||||
|
||||
if instance_data["id"] not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
if creator_id and instance_data["creator_identifier"] != creator_id:
|
||||
continue
|
||||
|
||||
instance_id = instance_data.get("instance_id")
|
||||
if not instance_id:
|
||||
pass
|
||||
elif instance_id in instance_ids:
|
||||
instance_data.pop("instance_id")
|
||||
else:
|
||||
instance_ids.add(instance_id)
|
||||
|
||||
# node name could change, so update product name data
|
||||
_update_product_name_data(instance_data, node)
|
||||
|
||||
if "render_order" not in node.knobs():
|
||||
product_instances.append((node, instance_data))
|
||||
continue
|
||||
|
||||
order = int(node["render_order"].value())
|
||||
instances_by_order[order].append((node, instance_data))
|
||||
|
||||
# Sort instances based on order attribute or product name.
|
||||
# TODO: remove in future Publisher enhanced with sorting
|
||||
ordered_instances = []
|
||||
for key in sorted(instances_by_order.keys()):
|
||||
instances_by_product = defaultdict(list)
|
||||
for node, data_ in instances_by_order[key]:
|
||||
product_name = data_.get("productName")
|
||||
if product_name is None:
|
||||
product_name = data_.get("subset")
|
||||
instances_by_product[product_name].append((node, data_))
|
||||
for subkey in sorted(instances_by_product.keys()):
|
||||
ordered_instances.extend(instances_by_product[subkey])
|
||||
|
||||
instances_by_product = defaultdict(list)
|
||||
for node, data_ in product_instances:
|
||||
product_name = data_.get("productName")
|
||||
if product_name is None:
|
||||
product_name = data_.get("subset")
|
||||
instances_by_product[product_name].append((node, data_))
|
||||
for key in sorted(instances_by_product.keys()):
|
||||
ordered_instances.extend(instances_by_product[key])
|
||||
|
||||
return ordered_instances
|
||||
|
||||
|
||||
def _update_product_name_data(instance_data, node):
|
||||
"""Update product name data in instance data.
|
||||
|
||||
Args:
|
||||
instance_data (dict): instance creator data
|
||||
node (nuke.Node): nuke node
|
||||
"""
|
||||
# make sure node name is product name
|
||||
old_product_name = instance_data.get("productName")
|
||||
if old_product_name is None:
|
||||
old_product_name = instance_data.get("subset")
|
||||
old_variant = instance_data["variant"]
|
||||
product_name_root = old_product_name.replace(old_variant, "")
|
||||
|
||||
new_product_name = node.name()
|
||||
new_variant = new_product_name.replace(product_name_root, "")
|
||||
|
||||
instance_data["productName"] = new_product_name
|
||||
instance_data["variant"] = new_variant
|
||||
|
||||
|
||||
def remove_instance(instance):
|
||||
"""Remove instance from current workfile metadata.
|
||||
|
||||
For SubsetManager
|
||||
|
||||
Args:
|
||||
instance (dict): instance representation from subsetmanager model
|
||||
"""
|
||||
instance_node = instance.transient_data["node"]
|
||||
instance_knob = instance_node.knobs()[INSTANCE_DATA_KNOB]
|
||||
instance_node.removeKnob(instance_knob)
|
||||
nuke.delete(instance_node)
|
||||
|
||||
|
||||
def select_instance(instance):
|
||||
"""
|
||||
Select instance in Node View
|
||||
|
||||
Args:
|
||||
instance (dict): instance representation from subsetmanager model
|
||||
"""
|
||||
instance_node = instance.transient_data["node"]
|
||||
instance_node["selected"].setValue(True)
|
||||
1193
server_addon/nuke/client/ayon_nuke/api/plugin.py
Normal file
1193
server_addon/nuke/client/ayon_nuke/api/plugin.py
Normal file
File diff suppressed because it is too large
Load diff
224
server_addon/nuke/client/ayon_nuke/api/utils.py
Normal file
224
server_addon/nuke/client/ayon_nuke/api/utils.py
Normal file
|
|
@ -0,0 +1,224 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import nuke
|
||||
|
||||
import pyblish.util
|
||||
import pyblish.api
|
||||
from qtpy import QtWidgets
|
||||
|
||||
from ayon_core import resources
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.tools.utils import show_message_dialog
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
|
||||
|
||||
def set_context_favorites(favorites=None):
|
||||
""" Adding favorite folders to nuke's browser
|
||||
|
||||
Arguments:
|
||||
favorites (dict): couples of {name:path}
|
||||
"""
|
||||
favorites = favorites or {}
|
||||
icon_path = resources.get_resource("icons", "folder-favorite.png")
|
||||
for name, path in favorites.items():
|
||||
nuke.addFavoriteDir(
|
||||
name,
|
||||
path,
|
||||
nuke.IMAGE | nuke.SCRIPT | nuke.GEO,
|
||||
icon=icon_path)
|
||||
|
||||
|
||||
def get_node_outputs(node):
|
||||
'''
|
||||
Return a dictionary of the nodes and pipes that are connected to node
|
||||
'''
|
||||
dep_dict = {}
|
||||
dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS)
|
||||
for d in dependencies:
|
||||
dep_dict[d] = []
|
||||
for i in range(d.inputs()):
|
||||
if d.input(i) == node:
|
||||
dep_dict[d].append(i)
|
||||
return dep_dict
|
||||
|
||||
|
||||
def is_node_gizmo(node):
|
||||
'''
|
||||
return True if node is gizmo
|
||||
'''
|
||||
return 'gizmo_file' in node.knobs()
|
||||
|
||||
|
||||
def gizmo_is_nuke_default(gizmo):
|
||||
'''Check if gizmo is in default install path'''
|
||||
plug_dir = os.path.join(os.path.dirname(
|
||||
nuke.env['ExecutablePath']), 'plugins')
|
||||
return gizmo.filename().startswith(plug_dir)
|
||||
|
||||
|
||||
def bake_gizmos_recursively(in_group=None):
|
||||
"""Converting a gizmo to group
|
||||
|
||||
Arguments:
|
||||
is_group (nuke.Node)[optonal]: group node or all nodes
|
||||
"""
|
||||
from .lib import maintained_selection
|
||||
if in_group is None:
|
||||
in_group = nuke.Root()
|
||||
# preserve selection after all is done
|
||||
with maintained_selection():
|
||||
# jump to the group
|
||||
with in_group:
|
||||
for node in nuke.allNodes():
|
||||
if is_node_gizmo(node) and not gizmo_is_nuke_default(node):
|
||||
with node:
|
||||
outputs = get_node_outputs(node)
|
||||
group = node.makeGroup()
|
||||
# Reconnect inputs and outputs if any
|
||||
if outputs:
|
||||
for n, pipes in outputs.items():
|
||||
for i in pipes:
|
||||
n.setInput(i, group)
|
||||
for i in range(node.inputs()):
|
||||
group.setInput(i, node.input(i))
|
||||
# set node position and name
|
||||
group.setXYpos(node.xpos(), node.ypos())
|
||||
name = node.name()
|
||||
nuke.delete(node)
|
||||
group.setName(name)
|
||||
node = group
|
||||
|
||||
if node.Class() == "Group":
|
||||
bake_gizmos_recursively(node)
|
||||
|
||||
|
||||
def colorspace_exists_on_node(node, colorspace_name):
|
||||
""" Check if colorspace exists on node
|
||||
|
||||
Look through all options in the colorspace knob, and see if we have an
|
||||
exact match to one of the items.
|
||||
|
||||
Args:
|
||||
node (nuke.Node): nuke node object
|
||||
colorspace_name (str): color profile name
|
||||
|
||||
Returns:
|
||||
bool: True if exists
|
||||
"""
|
||||
try:
|
||||
colorspace_knob = node['colorspace']
|
||||
except ValueError:
|
||||
# knob is not available on input node
|
||||
return False
|
||||
|
||||
return colorspace_name in get_colorspace_list(colorspace_knob)
|
||||
|
||||
|
||||
def get_colorspace_list(colorspace_knob):
|
||||
"""Get available colorspace profile names
|
||||
|
||||
Args:
|
||||
colorspace_knob (nuke.Knob): nuke knob object
|
||||
|
||||
Returns:
|
||||
list: list of strings names of profiles
|
||||
"""
|
||||
results = []
|
||||
|
||||
# This pattern is to match with roles which uses an indentation and
|
||||
# parentheses with original colorspace. The value returned from the
|
||||
# colorspace is the string before the indentation, so we'll need to
|
||||
# convert the values to match with value returned from the knob,
|
||||
# ei. knob.value().
|
||||
pattern = r".*\t.* \(.*\)"
|
||||
for colorspace in nuke.getColorspaceList(colorspace_knob):
|
||||
match = re.search(pattern, colorspace)
|
||||
if match:
|
||||
results.append(colorspace.split("\t", 1)[0])
|
||||
else:
|
||||
results.append(colorspace)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def is_headless():
|
||||
"""
|
||||
Returns:
|
||||
bool: headless
|
||||
"""
|
||||
return QtWidgets.QApplication.instance() is None
|
||||
|
||||
|
||||
def submit_render_on_farm(node):
|
||||
# Ensure code is executed in root context.
|
||||
if nuke.root() == nuke.thisNode():
|
||||
_submit_render_on_farm(node)
|
||||
else:
|
||||
# If not in root context, move to the root context and then execute the
|
||||
# code.
|
||||
with nuke.root():
|
||||
_submit_render_on_farm(node)
|
||||
|
||||
|
||||
def _submit_render_on_farm(node):
|
||||
"""Render on farm submission
|
||||
|
||||
This function prepares the context for farm submission, validates it,
|
||||
extracts relevant data, copies the current workfile to a timestamped copy,
|
||||
and submits the job to the farm.
|
||||
|
||||
Args:
|
||||
node (Node): The node for which the farm submission is being made.
|
||||
"""
|
||||
|
||||
host = registered_host()
|
||||
create_context = CreateContext(host)
|
||||
|
||||
# Ensure CreateInstance is enabled.
|
||||
for instance in create_context.instances:
|
||||
if node.name() != instance.transient_data["node"].name():
|
||||
continue
|
||||
|
||||
instance.data["active"] = True
|
||||
|
||||
context = pyblish.api.Context()
|
||||
context.data["create_context"] = create_context
|
||||
# Used in pyblish plugin to determine which instance to publish.
|
||||
context.data["node_name"] = node.name()
|
||||
# Used in pyblish plugins to determine whether to run or not.
|
||||
context.data["render_on_farm"] = True
|
||||
|
||||
# Since we need to bypass version validation and incrementing, we need to
|
||||
# remove the plugins from the list that are responsible for these tasks.
|
||||
plugins = pyblish.api.discover()
|
||||
blacklist = ["IncrementScriptVersion", "ValidateVersion"]
|
||||
plugins = [
|
||||
plugin
|
||||
for plugin in plugins
|
||||
if plugin.__name__ not in blacklist
|
||||
]
|
||||
|
||||
context = pyblish.util.publish(context, plugins=plugins)
|
||||
|
||||
error_message = ""
|
||||
success = True
|
||||
for result in context.data["results"]:
|
||||
if result["success"]:
|
||||
continue
|
||||
|
||||
success = False
|
||||
|
||||
err = result["error"]
|
||||
error_message += "\n"
|
||||
error_message += err.formatted_traceback
|
||||
|
||||
if not success:
|
||||
show_message_dialog(
|
||||
"Publish Errors", error_message, level="critical"
|
||||
)
|
||||
return
|
||||
|
||||
show_message_dialog(
|
||||
"Submission Successful", "Submission to the farm was successful."
|
||||
)
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
import collections
|
||||
import nuke
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
from .lib import (
|
||||
imprint,
|
||||
reset_selection,
|
||||
get_main_window,
|
||||
WorkfileSettings,
|
||||
)
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
||||
class NukeTemplateBuilder(AbstractTemplateBuilder):
|
||||
"""Concrete implementation of AbstractTemplateBuilder for nuke"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_preset implementation)
|
||||
|
||||
Returns:
|
||||
bool: Whether the template was successfully imported or not
|
||||
"""
|
||||
|
||||
# TODO check if the template is already imported
|
||||
|
||||
nuke.nodePaste(path)
|
||||
reset_selection()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class NukePlaceholderPlugin(PlaceholderPlugin):
|
||||
node_color = 4278190335
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
placeholder_nodes = {}
|
||||
all_groups = collections.deque()
|
||||
all_groups.append(nuke.thisGroup())
|
||||
while all_groups:
|
||||
group = all_groups.popleft()
|
||||
for node in group.nodes():
|
||||
if isinstance(node, nuke.Group):
|
||||
all_groups.append(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
||||
if "empty" in node_knobs and node.knob("empty").value():
|
||||
continue
|
||||
|
||||
placeholder_nodes[node.fullName()] = node
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(self.node_color)
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
imprint(placeholder, {"is_placeholder": True})
|
||||
placeholder.knob("is_placeholder").setVisible(False)
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node = nuke.toNode(placeholder_item.scene_identifier)
|
||||
imprint(node, placeholder_data)
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = {}
|
||||
for key in self.get_placeholder_keys():
|
||||
knob = node.knob(key)
|
||||
value = None
|
||||
if knob is not None:
|
||||
value = knob.getValue()
|
||||
placeholder_data[key] = value
|
||||
return placeholder_data
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful"""
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
||||
# set all settings to shot context default
|
||||
WorkfileSettings().set_context_settings()
|
||||
|
||||
|
||||
def update_workfile_template(*args):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.rebuild_template()
|
||||
|
||||
|
||||
def create_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = NukeTemplateBuilder(host)
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder,
|
||||
parent=get_main_window())
|
||||
window.show()
|
||||
|
||||
|
||||
def update_placeholder(*args):
|
||||
host = registered_host()
|
||||
builder = NukeTemplateBuilder(host)
|
||||
placeholder_items_by_id = {
|
||||
placeholder_item.scene_identifier: placeholder_item
|
||||
for placeholder_item in builder.get_placeholders()
|
||||
}
|
||||
placeholder_items = []
|
||||
for node in nuke.selectedNodes():
|
||||
node_name = node.fullName()
|
||||
if node_name in placeholder_items_by_id:
|
||||
placeholder_items.append(placeholder_items_by_id[node_name])
|
||||
|
||||
# TODO show UI at least
|
||||
if len(placeholder_items) == 0:
|
||||
raise ValueError("No node selected")
|
||||
|
||||
if len(placeholder_items) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
|
||||
placeholder_item = placeholder_items[0]
|
||||
window = WorkfileBuildPlaceholderDialog(host, builder,
|
||||
parent=get_main_window())
|
||||
window.set_update_mode(placeholder_item)
|
||||
window.exec_()
|
||||
78
server_addon/nuke/client/ayon_nuke/api/workio.py
Normal file
78
server_addon/nuke/client/ayon_nuke/api/workio.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import os
|
||||
import nuke
|
||||
import shutil
|
||||
from .utils import is_headless
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [".nk"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
return nuke.root().modified()
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
path = filepath.replace("\\", "/")
|
||||
nuke.scriptSaveAs(path, overwrite=1)
|
||||
nuke.Root()["name"].setValue(path)
|
||||
nuke.Root()["project_directory"].setValue(os.path.dirname(path))
|
||||
nuke.Root().setModified(False)
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
|
||||
def read_script(nuke_script):
|
||||
nuke.scriptClear()
|
||||
nuke.scriptReadFile(nuke_script)
|
||||
nuke.Root()["name"].setValue(nuke_script)
|
||||
nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script))
|
||||
nuke.Root().setModified(False)
|
||||
|
||||
filepath = filepath.replace("\\", "/")
|
||||
|
||||
# To remain in the same window, we have to clear the script and read
|
||||
# in the contents of the workfile.
|
||||
# Nuke Preferences can be read after the script is read.
|
||||
read_script(filepath)
|
||||
|
||||
if not is_headless():
|
||||
autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate()
|
||||
autosave_prmpt = "Autosave detected.\n" \
|
||||
"Would you like to load the autosave file?" # noqa
|
||||
if os.path.isfile(autosave) and nuke.ask(autosave_prmpt):
|
||||
try:
|
||||
# Overwrite the filepath with autosave
|
||||
shutil.copy(autosave, filepath)
|
||||
# Now read the (auto-saved) script again
|
||||
read_script(filepath)
|
||||
except shutil.Error as err:
|
||||
nuke.message(
|
||||
"Detected autosave file could not be used.\n{}"
|
||||
|
||||
.format(err))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def current_file():
|
||||
current_file = nuke.root().name()
|
||||
|
||||
# Unsaved current file
|
||||
if current_file == 'Root':
|
||||
return None
|
||||
|
||||
return os.path.normpath(current_file).replace("\\", "/")
|
||||
|
||||
|
||||
def work_root(session):
|
||||
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
path = os.path.join(work_dir, scene_dir)
|
||||
else:
|
||||
path = work_dir
|
||||
|
||||
return os.path.normpath(path).replace("\\", "/")
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from ayon_applications import PreLaunchHook
|
||||
|
||||
|
||||
class PrelaunchNukeAssistHook(PreLaunchHook):
|
||||
"""
|
||||
Adding flag when nukeassist
|
||||
"""
|
||||
app_groups = {"nukeassist"}
|
||||
launch_types = set()
|
||||
|
||||
def execute(self):
|
||||
self.launch_context.env["NUKEASSIST"] = "1"
|
||||
0
server_addon/nuke/client/ayon_nuke/plugins/__init__.py
Normal file
0
server_addon/nuke/client/ayon_nuke/plugins/__init__.py
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin
|
||||
from ayon_nuke.api.lib import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
get_node_data,
|
||||
get_avalon_knob_data,
|
||||
NODE_TAB_NAME,
|
||||
)
|
||||
from ayon_nuke.api.plugin import convert_to_valid_instaces
|
||||
|
||||
import nuke
|
||||
|
||||
|
||||
class LegacyConverted(ProductConvertorPlugin):
|
||||
identifier = "legacy.converter"
|
||||
|
||||
def find_instances(self):
|
||||
|
||||
legacy_found = False
|
||||
# search for first available legacy item
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
if node.Class() in ["Viewer", "Dot"]:
|
||||
continue
|
||||
|
||||
if get_node_data(node, INSTANCE_DATA_KNOB):
|
||||
continue
|
||||
|
||||
if NODE_TAB_NAME not in node.knobs():
|
||||
continue
|
||||
|
||||
# get data from avalon knob
|
||||
avalon_knob_data = get_avalon_knob_data(
|
||||
node, ["avalon:", "ak:"], create=False)
|
||||
|
||||
if not avalon_knob_data:
|
||||
continue
|
||||
|
||||
if avalon_knob_data["id"] not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
# catch and break
|
||||
legacy_found = True
|
||||
break
|
||||
|
||||
if legacy_found:
|
||||
# if not item do not add legacy instance converter
|
||||
self.add_convertor_item("Convert legacy instances")
|
||||
|
||||
def convert(self):
|
||||
# loop all instances and convert them
|
||||
convert_to_valid_instaces()
|
||||
# remove legacy item if all is fine
|
||||
self.remove_convertor_item()
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
from nukescripts import autoBackdrop
|
||||
|
||||
from ayon_nuke.api import (
|
||||
NukeCreator,
|
||||
maintained_selection,
|
||||
select_nodes
|
||||
)
|
||||
|
||||
|
||||
class CreateBackdrop(NukeCreator):
|
||||
"""Add Publishable Backdrop"""
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_backdrop"
|
||||
label = "Nukenodes (backdrop)"
|
||||
product_type = "nukenodes"
|
||||
icon = "file-archive-o"
|
||||
maintain_selection = True
|
||||
|
||||
# plugin attributes
|
||||
node_color = "0xdfea5dff"
|
||||
|
||||
def create_instance_node(
|
||||
self,
|
||||
node_name,
|
||||
knobs=None,
|
||||
parent=None,
|
||||
node_type=None
|
||||
):
|
||||
with maintained_selection():
|
||||
if len(self.selected_nodes) >= 1:
|
||||
select_nodes(self.selected_nodes)
|
||||
|
||||
created_node = autoBackdrop()
|
||||
created_node["name"].setValue(node_name)
|
||||
created_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
created_node["note_font_size"].setValue(24)
|
||||
created_node["label"].setValue("[{}]".format(node_name))
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance = super(CreateBackdrop, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data
|
||||
)
|
||||
|
||||
return instance
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
import nuke
|
||||
from ayon_nuke.api import (
|
||||
NukeCreator,
|
||||
NukeCreatorError,
|
||||
maintained_selection
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
create_camera_node_by_version
|
||||
)
|
||||
|
||||
|
||||
class CreateCamera(NukeCreator):
|
||||
"""Add Publishable Camera"""
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_camera"
|
||||
label = "Camera (3d)"
|
||||
product_type = "camera"
|
||||
icon = "camera"
|
||||
|
||||
# plugin attributes
|
||||
node_color = "0xff9100ff"
|
||||
|
||||
def create_instance_node(
|
||||
self,
|
||||
node_name,
|
||||
knobs=None,
|
||||
parent=None,
|
||||
node_type=None
|
||||
):
|
||||
with maintained_selection():
|
||||
if self.selected_nodes:
|
||||
node = self.selected_nodes[0]
|
||||
if node.Class() != "Camera3":
|
||||
raise NukeCreatorError(
|
||||
"Creator error: Select only camera node type")
|
||||
created_node = self.selected_nodes[0]
|
||||
else:
|
||||
created_node = create_camera_node_by_version()
|
||||
|
||||
created_node["tile_color"].setValue(
|
||||
int(self.node_color, 16))
|
||||
|
||||
created_node["name"].setValue(node_name)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance = super(CreateCamera, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def set_selected_nodes(self, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = nuke.selectedNodes()
|
||||
if self.selected_nodes == []:
|
||||
raise NukeCreatorError(
|
||||
"Creator error: No active selection")
|
||||
elif len(self.selected_nodes) > 1:
|
||||
raise NukeCreatorError(
|
||||
"Creator error: Select only one camera node")
|
||||
else:
|
||||
self.selected_nodes = []
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
import nuke
|
||||
from ayon_nuke.api import (
|
||||
NukeCreator,
|
||||
NukeCreatorError,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class CreateGizmo(NukeCreator):
|
||||
"""Add Publishable Group as gizmo"""
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_gizmo"
|
||||
label = "Gizmo (group)"
|
||||
product_type = "gizmo"
|
||||
icon = "file-archive-o"
|
||||
default_variants = ["ViewerInput", "Lut", "Effect"]
|
||||
|
||||
# plugin attributes
|
||||
node_color = "0x7533c1ff"
|
||||
|
||||
def create_instance_node(
|
||||
self,
|
||||
node_name,
|
||||
knobs=None,
|
||||
parent=None,
|
||||
node_type=None
|
||||
):
|
||||
with maintained_selection():
|
||||
if self.selected_nodes:
|
||||
node = self.selected_nodes[0]
|
||||
if node.Class() != "Group":
|
||||
raise NukeCreatorError(
|
||||
"Creator error: Select only 'Group' node type")
|
||||
created_node = node
|
||||
else:
|
||||
created_node = nuke.collapseToGroup()
|
||||
|
||||
created_node["tile_color"].setValue(
|
||||
int(self.node_color, 16))
|
||||
|
||||
created_node["name"].setValue(node_name)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance = super(CreateGizmo, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def set_selected_nodes(self, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = nuke.selectedNodes()
|
||||
if self.selected_nodes == []:
|
||||
raise NukeCreatorError("Creator error: No active selection")
|
||||
elif len(self.selected_nodes) > 1:
|
||||
NukeCreatorError("Creator error: Select only one 'Group' node")
|
||||
else:
|
||||
self.selected_nodes = []
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
import nuke
|
||||
from ayon_nuke.api import (
|
||||
NukeCreator,
|
||||
NukeCreatorError,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class CreateModel(NukeCreator):
|
||||
"""Add Publishable Camera"""
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_model"
|
||||
label = "Model (3d)"
|
||||
product_type = "model"
|
||||
icon = "cube"
|
||||
default_variants = ["Main"]
|
||||
|
||||
# plugin attributes
|
||||
node_color = "0xff3200ff"
|
||||
|
||||
def create_instance_node(
|
||||
self,
|
||||
node_name,
|
||||
knobs=None,
|
||||
parent=None,
|
||||
node_type=None
|
||||
):
|
||||
with maintained_selection():
|
||||
if self.selected_nodes:
|
||||
node = self.selected_nodes[0]
|
||||
if node.Class() != "Scene":
|
||||
raise NukeCreatorError(
|
||||
"Creator error: Select only 'Scene' node type")
|
||||
created_node = node
|
||||
else:
|
||||
created_node = nuke.createNode("Scene")
|
||||
|
||||
created_node["tile_color"].setValue(
|
||||
int(self.node_color, 16))
|
||||
|
||||
created_node["name"].setValue(node_name)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance = super(CreateModel, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
def set_selected_nodes(self, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = nuke.selectedNodes()
|
||||
if self.selected_nodes == []:
|
||||
raise NukeCreatorError("Creator error: No active selection")
|
||||
elif len(self.selected_nodes) > 1:
|
||||
NukeCreatorError("Creator error: Select only one 'Scene' node")
|
||||
else:
|
||||
self.selected_nodes = []
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
import nuke
|
||||
import six
|
||||
import sys
|
||||
from ayon_nuke.api import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
NukeCreator,
|
||||
NukeCreatorError,
|
||||
set_node_data
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
class CreateSource(NukeCreator):
|
||||
"""Add Publishable Read with source"""
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_source"
|
||||
label = "Source (read)"
|
||||
product_type = "source"
|
||||
icon = "film"
|
||||
default_variants = ["Effect", "Backplate", "Fire", "Smoke"]
|
||||
|
||||
# plugin attributes
|
||||
node_color = "0xff9100ff"
|
||||
|
||||
def create_instance_node(
|
||||
self,
|
||||
node_name,
|
||||
read_node
|
||||
):
|
||||
read_node["tile_color"].setValue(
|
||||
int(self.node_color, 16))
|
||||
read_node["name"].setValue(node_name)
|
||||
|
||||
return read_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
# make sure selected nodes are added
|
||||
self.set_selected_nodes(pre_create_data)
|
||||
|
||||
try:
|
||||
for read_node in self.selected_nodes:
|
||||
if read_node.Class() != 'Read':
|
||||
continue
|
||||
|
||||
node_name = read_node.name()
|
||||
_product_name = product_name + node_name
|
||||
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(_product_name)
|
||||
|
||||
instance_node = self.create_instance_node(
|
||||
_product_name,
|
||||
read_node
|
||||
)
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
_product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
|
||||
instance.transient_data["node"] = instance_node
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
set_node_data(
|
||||
instance_node,
|
||||
INSTANCE_DATA_KNOB,
|
||||
instance.data_to_store()
|
||||
)
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
NukeCreatorError,
|
||||
NukeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def set_selected_nodes(self, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = nuke.selectedNodes()
|
||||
if self.selected_nodes == []:
|
||||
raise NukeCreatorError("Creator error: No active selection")
|
||||
else:
|
||||
NukeCreatorError(
|
||||
"Creator error: only supported with active selection")
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
import nuke
|
||||
import sys
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
UISeparatorDef,
|
||||
EnumDef
|
||||
)
|
||||
from ayon_nuke import api as napi
|
||||
from ayon_nuke.api.plugin import exposed_write_knobs
|
||||
|
||||
|
||||
class CreateWriteImage(napi.NukeWriteCreator):
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_write_image"
|
||||
label = "Image (write)"
|
||||
product_type = "image"
|
||||
icon = "sign-out"
|
||||
|
||||
instance_attributes = [
|
||||
"use_range_limit"
|
||||
]
|
||||
default_variants = [
|
||||
"StillFrame",
|
||||
"MPFrame",
|
||||
"LayoutFrame"
|
||||
]
|
||||
temp_rendering_path_template = (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum(),
|
||||
UISeparatorDef(),
|
||||
self._get_frame_source_number()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
def _get_render_target_enum(self):
|
||||
rendering_targets = {
|
||||
"local": "Local machine rendering",
|
||||
"frames": "Use existing frames"
|
||||
}
|
||||
|
||||
return EnumDef(
|
||||
"render_target",
|
||||
items=rendering_targets,
|
||||
label="Render target"
|
||||
)
|
||||
|
||||
def _get_frame_source_number(self):
|
||||
return NumberDef(
|
||||
"active_frame",
|
||||
label="Active frame",
|
||||
default=nuke.frame()
|
||||
)
|
||||
|
||||
def create_instance_node(self, product_name, instance_data):
|
||||
settings = self.project_settings["nuke"]["create"]["CreateWriteImage"]
|
||||
|
||||
# add fpath_template
|
||||
write_data = {
|
||||
"creator": self.__class__.__name__,
|
||||
"productName": product_name,
|
||||
"fpath_template": self.temp_rendering_path_template,
|
||||
"render_on_farm": (
|
||||
"render_on_farm" in settings["instance_attributes"]
|
||||
)
|
||||
}
|
||||
write_data.update(instance_data)
|
||||
|
||||
created_node = napi.create_write_node(
|
||||
product_name,
|
||||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"frame": nuke.frame()
|
||||
}
|
||||
)
|
||||
|
||||
self._add_frame_range_limit(created_node, instance_data)
|
||||
|
||||
self.integrate_links(created_node, outputs=True)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
product_name = product_name.format(**pre_create_data)
|
||||
|
||||
# pass values from precreate to instance
|
||||
self.pass_pre_attributes_to_instance(
|
||||
instance_data,
|
||||
pre_create_data,
|
||||
[
|
||||
"active_frame",
|
||||
"render_target"
|
||||
]
|
||||
)
|
||||
|
||||
# make sure selected nodes are added
|
||||
self.set_selected_nodes(pre_create_data)
|
||||
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance_node = self.create_instance_node(
|
||||
product_name,
|
||||
instance_data,
|
||||
)
|
||||
|
||||
try:
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
|
||||
instance.transient_data["node"] = instance_node
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
napi.set_node_data(
|
||||
instance_node,
|
||||
napi.INSTANCE_DATA_KNOB,
|
||||
instance.data_to_store()
|
||||
)
|
||||
|
||||
exposed_write_knobs(
|
||||
self.project_settings, self.__class__.__name__, instance_node
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
napi.NukeCreatorError,
|
||||
napi.NukeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
def _add_frame_range_limit(self, write_node, instance_data):
|
||||
if "use_range_limit" not in self.instance_attributes:
|
||||
return
|
||||
|
||||
active_frame = (
|
||||
instance_data["creator_attributes"].get("active_frame"))
|
||||
|
||||
write_node.begin()
|
||||
for n in nuke.allNodes():
|
||||
# get write node
|
||||
if n.Class() in "Write":
|
||||
w_node = n
|
||||
write_node.end()
|
||||
|
||||
w_node["use_limit"].setValue(True)
|
||||
w_node["first"].setValue(active_frame or nuke.frame())
|
||||
w_node["last"].setExpression("first")
|
||||
|
||||
return write_node
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
import nuke
|
||||
import sys
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef
|
||||
)
|
||||
from ayon_nuke import api as napi
|
||||
from ayon_nuke.api.plugin import exposed_write_knobs
|
||||
|
||||
|
||||
class CreateWritePrerender(napi.NukeWriteCreator):
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_write_prerender"
|
||||
label = "Prerender (write)"
|
||||
product_type = "prerender"
|
||||
icon = "sign-out"
|
||||
|
||||
instance_attributes = [
|
||||
"use_range_limit"
|
||||
]
|
||||
default_variants = [
|
||||
"Key01",
|
||||
"Bg01",
|
||||
"Fg01",
|
||||
"Branch01",
|
||||
"Part01"
|
||||
]
|
||||
temp_rendering_path_template = (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
|
||||
|
||||
# Before write node render.
|
||||
order = 90
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, product_name, instance_data):
|
||||
settings = self.project_settings["nuke"]["create"]
|
||||
settings = settings["CreateWritePrerender"]
|
||||
|
||||
# add fpath_template
|
||||
write_data = {
|
||||
"creator": self.__class__.__name__,
|
||||
"productName": product_name,
|
||||
"fpath_template": self.temp_rendering_path_template,
|
||||
"render_on_farm": (
|
||||
"render_on_farm" in settings["instance_attributes"]
|
||||
)
|
||||
}
|
||||
|
||||
write_data.update(instance_data)
|
||||
|
||||
# get width and height
|
||||
if self.selected_node:
|
||||
width, height = (
|
||||
self.selected_node.width(), self.selected_node.height())
|
||||
else:
|
||||
actual_format = nuke.root().knob('format').value()
|
||||
width, height = (actual_format.width(), actual_format.height())
|
||||
|
||||
created_node = napi.create_write_node(
|
||||
product_name,
|
||||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"width": width,
|
||||
"height": height
|
||||
}
|
||||
)
|
||||
|
||||
self._add_frame_range_limit(created_node)
|
||||
|
||||
self.integrate_links(created_node, outputs=True)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# pass values from precreate to instance
|
||||
self.pass_pre_attributes_to_instance(
|
||||
instance_data,
|
||||
pre_create_data,
|
||||
[
|
||||
"render_target"
|
||||
]
|
||||
)
|
||||
|
||||
# make sure selected nodes are added
|
||||
self.set_selected_nodes(pre_create_data)
|
||||
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance_node = self.create_instance_node(
|
||||
product_name,
|
||||
instance_data
|
||||
)
|
||||
|
||||
try:
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
|
||||
instance.transient_data["node"] = instance_node
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
napi.set_node_data(
|
||||
instance_node,
|
||||
napi.INSTANCE_DATA_KNOB,
|
||||
instance.data_to_store()
|
||||
)
|
||||
|
||||
exposed_write_knobs(
|
||||
self.project_settings, self.__class__.__name__, instance_node
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
napi.NukeCreatorError,
|
||||
napi.NukeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
def _add_frame_range_limit(self, write_node):
|
||||
if "use_range_limit" not in self.instance_attributes:
|
||||
return
|
||||
|
||||
write_node.begin()
|
||||
for n in nuke.allNodes():
|
||||
# get write node
|
||||
if n.Class() in "Write":
|
||||
w_node = n
|
||||
write_node.end()
|
||||
|
||||
w_node["use_limit"].setValue(True)
|
||||
w_node["first"].setValue(nuke.root()["first_frame"].value())
|
||||
w_node["last"].setValue(nuke.root()["last_frame"].value())
|
||||
|
||||
return write_node
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
import nuke
|
||||
import sys
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef
|
||||
)
|
||||
from ayon_nuke import api as napi
|
||||
from ayon_nuke.api.plugin import exposed_write_knobs
|
||||
|
||||
|
||||
class CreateWriteRender(napi.NukeWriteCreator):
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "create_write_render"
|
||||
label = "Render (write)"
|
||||
product_type = "render"
|
||||
icon = "sign-out"
|
||||
|
||||
instance_attributes = [
|
||||
"reviewable"
|
||||
]
|
||||
default_variants = [
|
||||
"Main",
|
||||
"Mask"
|
||||
]
|
||||
temp_rendering_path_template = (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attr_defs = [
|
||||
BoolDef(
|
||||
"use_selection",
|
||||
default=not self.create_context.headless,
|
||||
label="Use selection"
|
||||
),
|
||||
self._get_render_target_enum()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
def create_instance_node(self, product_name, instance_data):
|
||||
settings = self.project_settings["nuke"]["create"]["CreateWriteRender"]
|
||||
|
||||
# add fpath_template
|
||||
write_data = {
|
||||
"creator": self.__class__.__name__,
|
||||
"productName": product_name,
|
||||
"fpath_template": self.temp_rendering_path_template,
|
||||
"render_on_farm": (
|
||||
"render_on_farm" in settings["instance_attributes"]
|
||||
)
|
||||
}
|
||||
|
||||
write_data.update(instance_data)
|
||||
|
||||
# get width and height
|
||||
if self.selected_node:
|
||||
width, height = (
|
||||
self.selected_node.width(), self.selected_node.height())
|
||||
else:
|
||||
actual_format = nuke.root().knob('format').value()
|
||||
width, height = (actual_format.width(), actual_format.height())
|
||||
|
||||
self.log.debug(">>>>>>> : {}".format(self.instance_attributes))
|
||||
self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs()))
|
||||
|
||||
created_node = napi.create_write_node(
|
||||
product_name,
|
||||
write_data,
|
||||
input=self.selected_node,
|
||||
prenodes=self.prenodes,
|
||||
linked_knobs=self.get_linked_knobs(),
|
||||
**{
|
||||
"width": width,
|
||||
"height": height
|
||||
}
|
||||
)
|
||||
|
||||
self.integrate_links(created_node, outputs=False)
|
||||
|
||||
return created_node
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# pass values from precreate to instance
|
||||
self.pass_pre_attributes_to_instance(
|
||||
instance_data,
|
||||
pre_create_data,
|
||||
[
|
||||
"render_target"
|
||||
]
|
||||
)
|
||||
# make sure selected nodes are added
|
||||
self.set_selected_nodes(pre_create_data)
|
||||
|
||||
# make sure product name is unique
|
||||
self.check_existing_product(product_name)
|
||||
|
||||
instance_node = self.create_instance_node(
|
||||
product_name,
|
||||
instance_data
|
||||
)
|
||||
|
||||
try:
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
|
||||
instance.transient_data["node"] = instance_node
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
napi.set_node_data(
|
||||
instance_node,
|
||||
napi.INSTANCE_DATA_KNOB,
|
||||
instance.data_to_store()
|
||||
)
|
||||
|
||||
exposed_write_knobs(
|
||||
self.project_settings, self.__class__.__name__, instance_node
|
||||
)
|
||||
|
||||
return instance
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
napi.NukeCreatorError,
|
||||
napi.NukeCreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
import ayon_api
|
||||
|
||||
import ayon_nuke.api as api
|
||||
from ayon_core.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
set_node_data
|
||||
)
|
||||
import nuke
|
||||
|
||||
|
||||
class WorkfileCreator(AutoCreator):
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
identifier = "workfile"
|
||||
product_type = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
root_node = nuke.root()
|
||||
instance_data = api.get_node_data(
|
||||
root_node, api.INSTANCE_DATA_KNOB
|
||||
)
|
||||
|
||||
project_name = self.create_context.get_current_project_name()
|
||||
folder_path = self.create_context.get_current_folder_path()
|
||||
task_name = self.create_context.get_current_task_name()
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
instance_data.update({
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant
|
||||
})
|
||||
instance_data.update(self.get_dynamic_data(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
instance_data
|
||||
))
|
||||
|
||||
instance = CreatedInstance(
|
||||
self.product_type, product_name, instance_data, self
|
||||
)
|
||||
instance.transient_data["node"] = root_node
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
instance_node = created_inst.transient_data["node"]
|
||||
|
||||
set_node_data(
|
||||
instance_node,
|
||||
INSTANCE_DATA_KNOB,
|
||||
created_inst.data_to_store()
|
||||
)
|
||||
|
||||
def create(self, options=None):
|
||||
# no need to create if it is created
|
||||
# in `collect_instances`
|
||||
pass
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline import InventoryAction
|
||||
from ayon_nuke.api.lib import set_avalon_knob_data
|
||||
|
||||
|
||||
class RepairOldLoaders(InventoryAction):
|
||||
|
||||
label = "Repair Old Loaders"
|
||||
icon = "gears"
|
||||
color = "#cc0000"
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
def process(self, containers):
|
||||
import nuke
|
||||
new_loader = "LoadClip"
|
||||
|
||||
for cdata in containers:
|
||||
orig_loader = cdata["loader"]
|
||||
orig_name = cdata["objectName"]
|
||||
if orig_loader not in ["LoadSequence", "LoadMov"]:
|
||||
self.log.warning(
|
||||
"This repair action is only working on "
|
||||
"`LoadSequence` and `LoadMov` Loaders")
|
||||
continue
|
||||
|
||||
new_name = orig_name.replace(orig_loader, new_loader)
|
||||
node = nuke.toNode(cdata["objectName"])
|
||||
|
||||
cdata.update({
|
||||
"loader": new_loader,
|
||||
"objectName": new_name
|
||||
})
|
||||
node["name"].setValue(new_name)
|
||||
# get data from avalon knob
|
||||
set_avalon_knob_data(node, cdata)
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
from ayon_core.pipeline import InventoryAction
|
||||
from ayon_nuke.api.command import viewer_update_and_undo_stop
|
||||
|
||||
|
||||
class SelectContainers(InventoryAction):
|
||||
|
||||
label = "Select Containers"
|
||||
icon = "mouse-pointer"
|
||||
color = "#d8d8d8"
|
||||
|
||||
def process(self, containers):
|
||||
import nuke
|
||||
|
||||
nodes = [nuke.toNode(i["objectName"]) for i in containers]
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
# clear previous_selection
|
||||
[n['selected'].setValue(False) for n in nodes]
|
||||
# Select tool
|
||||
for node in nodes:
|
||||
node["selected"].setValue(True)
|
||||
77
server_addon/nuke/client/ayon_nuke/plugins/load/actions.py
Normal file
77
server_addon/nuke/client/ayon_nuke/plugins/load/actions.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
"""A module containing generic loader actions that will display in the Loader.
|
||||
|
||||
"""
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline import load
|
||||
from ayon_nuke.api import lib
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class SetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
product_types = {
|
||||
"animation",
|
||||
"camera",
|
||||
"write",
|
||||
"yeticache",
|
||||
"pointcache",
|
||||
}
|
||||
representations = {"*"}
|
||||
extensions = {"*"}
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
|
||||
start = version_attributes.get("frameStart")
|
||||
end = version_attributes.get("frameEnd")
|
||||
|
||||
log.info("start: {}, end: {}".format(start, end))
|
||||
if start is None or end is None:
|
||||
log.info("Skipping setting frame range because start or "
|
||||
"end frame data is missing..")
|
||||
return
|
||||
|
||||
lib.update_frame_range(start, end)
|
||||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
product_types = {
|
||||
"animation",
|
||||
"camera",
|
||||
"write",
|
||||
"yeticache",
|
||||
"pointcache",
|
||||
}
|
||||
representations = {"*"}
|
||||
|
||||
label = "Set frame range (with handles)"
|
||||
order = 12
|
||||
icon = "clock-o"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
version_attributes = context["version"]["attrib"]
|
||||
start = version_attributes.get("frameStart")
|
||||
end = version_attributes.get("frameEnd")
|
||||
|
||||
if start is None or end is None:
|
||||
print("Skipping setting frame range because start or "
|
||||
"end frame data is missing..")
|
||||
return
|
||||
|
||||
# Include handles
|
||||
start -= version_attributes.get("handleStart", 0)
|
||||
end += version_attributes.get("handleEnd", 0)
|
||||
|
||||
lib.update_frame_range(start, end)
|
||||
255
server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py
Normal file
255
server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
import nuke
|
||||
import nukescripts
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
maintained_selection,
|
||||
reset_selection,
|
||||
select_nodes,
|
||||
get_avalon_knob_data,
|
||||
set_avalon_knob_data
|
||||
)
|
||||
from ayon_nuke.api.command import viewer_update_and_undo_stop
|
||||
from ayon_nuke.api import containerise, update_container
|
||||
|
||||
|
||||
class LoadBackdropNodes(load.LoaderPlugin):
|
||||
"""Loading Published Backdrop nodes (workfile, nukenodes)"""
|
||||
|
||||
product_types = {"workfile", "nukenodes"}
|
||||
representations = {"*"}
|
||||
extensions = {"nk"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Import Nuke Nodes"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = "white"
|
||||
node_color = "0x7533c1ff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to import .nk file into script and wrap
|
||||
it on backdrop
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
# add attributes from the version to imprint to metadata knob
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
# Get mouse position
|
||||
n = nuke.createNode("NoOp")
|
||||
xcursor, ycursor = (n.xpos(), n.ypos())
|
||||
reset_selection()
|
||||
nuke.delete(n)
|
||||
|
||||
bdn_frame = 50
|
||||
|
||||
with maintained_selection():
|
||||
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
# get all pasted nodes
|
||||
new_nodes = list()
|
||||
nodes = nuke.selectedNodes()
|
||||
|
||||
# get pointer position in DAG
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes, direction="right", offset=200 + bdn_frame
|
||||
)
|
||||
|
||||
# reset position to all nodes and replace inputs and output
|
||||
for n in nodes:
|
||||
reset_selection()
|
||||
xpos = (n.xpos() - xcursor) + xpointer
|
||||
ypos = (n.ypos() - ycursor) + ypointer
|
||||
n.setXYpos(xpos, ypos)
|
||||
|
||||
# replace Input nodes for dots
|
||||
if n.Class() in "Input":
|
||||
dot = nuke.createNode("Dot")
|
||||
new_name = n.name().replace("INP", "DOT")
|
||||
dot.setName(new_name)
|
||||
dot["label"].setValue(new_name)
|
||||
dot.setXYpos(xpos, ypos)
|
||||
new_nodes.append(dot)
|
||||
|
||||
# rewire
|
||||
dep = n.dependent()
|
||||
for d in dep:
|
||||
index = next((i for i, dpcy in enumerate(
|
||||
d.dependencies())
|
||||
if n is dpcy), 0)
|
||||
d.setInput(index, dot)
|
||||
|
||||
# remove Input node
|
||||
reset_selection()
|
||||
nuke.delete(n)
|
||||
continue
|
||||
|
||||
# replace Input nodes for dots
|
||||
elif n.Class() in "Output":
|
||||
dot = nuke.createNode("Dot")
|
||||
new_name = n.name() + "_DOT"
|
||||
dot.setName(new_name)
|
||||
dot["label"].setValue(new_name)
|
||||
dot.setXYpos(xpos, ypos)
|
||||
new_nodes.append(dot)
|
||||
|
||||
# rewire
|
||||
dep = next((d for d in n.dependencies()), None)
|
||||
if dep:
|
||||
dot.setInput(0, dep)
|
||||
|
||||
# remove Input node
|
||||
reset_selection()
|
||||
nuke.delete(n)
|
||||
continue
|
||||
else:
|
||||
new_nodes.append(n)
|
||||
|
||||
# reselect nodes with new Dot instead of Inputs and Output
|
||||
reset_selection()
|
||||
select_nodes(new_nodes)
|
||||
# place on backdrop
|
||||
bdn = nukescripts.autoBackdrop()
|
||||
|
||||
# add frame offset
|
||||
xpos = bdn.xpos() - bdn_frame
|
||||
ypos = bdn.ypos() - bdn_frame
|
||||
bdwidth = bdn["bdwidth"].value() + (bdn_frame*2)
|
||||
bdheight = bdn["bdheight"].value() + (bdn_frame*2)
|
||||
|
||||
bdn["xpos"].setValue(xpos)
|
||||
bdn["ypos"].setValue(ypos)
|
||||
bdn["bdwidth"].setValue(bdwidth)
|
||||
bdn["bdheight"].setValue(bdheight)
|
||||
|
||||
bdn["name"].setValue(object_name)
|
||||
bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name))
|
||||
bdn["note_font_size"].setValue(20)
|
||||
|
||||
return containerise(
|
||||
node=bdn,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get corresponding node
|
||||
GN = container["node"]
|
||||
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
name = container["name"]
|
||||
namespace = container["namespace"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace,
|
||||
}
|
||||
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with maintained_selection():
|
||||
xpos = GN.xpos()
|
||||
ypos = GN.ypos()
|
||||
avalon_data = get_avalon_knob_data(GN)
|
||||
nuke.delete(GN)
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
GN = nuke.selectedNode()
|
||||
set_avalon_knob_data(GN, avalon_data)
|
||||
GN.setXYpos(xpos, ypos)
|
||||
GN["name"].setValue(object_name)
|
||||
|
||||
# get all versions in list
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.node_color
|
||||
else:
|
||||
color_value = "0xd88467ff"
|
||||
GN["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
return update_container(GN, data_imprint)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -0,0 +1,198 @@
|
|||
import nuke
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class AlembicCameraLoader(load.LoaderPlugin):
|
||||
"""
|
||||
This will load alembic camera into script.
|
||||
"""
|
||||
|
||||
product_types = {"camera"}
|
||||
representations = {"*"}
|
||||
extensions = {"abc"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Alembic Camera"
|
||||
icon = "camera"
|
||||
color = "orange"
|
||||
node_color = "0x3469ffff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
# get main variables
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
fps = version_attributes.get("fps") or nuke.root()["fps"].getValue()
|
||||
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
# add additional metadata from the version to imprint to metadata knob
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
}
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
camera_node = nuke.createNode(
|
||||
"Camera2",
|
||||
"name {} file {} read_from_file True".format(
|
||||
object_name, file),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
camera_node.forceValidate()
|
||||
camera_node["frame_rate"].setValue(float(fps))
|
||||
|
||||
# workaround because nuke's bug is not adding
|
||||
# animation keys properly
|
||||
xpos = camera_node.xpos()
|
||||
ypos = camera_node.ypos()
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
nuke.delete(camera_node)
|
||||
nuke.nodePaste("%clipboard%")
|
||||
camera_node = nuke.toNode(object_name)
|
||||
camera_node.setXYpos(xpos, ypos)
|
||||
|
||||
# color node by correct color by actual version
|
||||
self.node_version_color(
|
||||
context["project"]["name"], version_entity, camera_node
|
||||
)
|
||||
|
||||
return containerise(
|
||||
node=camera_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""
|
||||
Called by Scene Inventory when look should be updated to current
|
||||
version.
|
||||
If any reference edits cannot be applied, eg. shader renamed and
|
||||
material not present, reference is unloaded and cleaned.
|
||||
All failed edits are highlighted to the user via message box.
|
||||
|
||||
Args:
|
||||
container: object that has look to be updated
|
||||
representation: (dict): relationship data to get proper
|
||||
representation from DB and persisted
|
||||
data in .json
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
# Get version from io
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get main variables
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
fps = version_attributes.get("fps") or nuke.root()["fps"].getValue()
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"]
|
||||
}
|
||||
|
||||
# add attributes from the version to imprint to metadata knob
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
camera_node = container["node"]
|
||||
camera_node['selected'].setValue(True)
|
||||
|
||||
# collect input output dependencies
|
||||
dependencies = camera_node.dependencies()
|
||||
dependent = camera_node.dependent()
|
||||
|
||||
camera_node["frame_rate"].setValue(float(fps))
|
||||
camera_node["file"].setValue(file)
|
||||
|
||||
# workaround because nuke's bug is
|
||||
# not adding animation keys properly
|
||||
xpos = camera_node.xpos()
|
||||
ypos = camera_node.ypos()
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
camera_name = camera_node.name()
|
||||
nuke.delete(camera_node)
|
||||
nuke.nodePaste("%clipboard%")
|
||||
camera_node = nuke.toNode(camera_name)
|
||||
camera_node.setXYpos(xpos, ypos)
|
||||
|
||||
# link to original input nodes
|
||||
for i, input in enumerate(dependencies):
|
||||
camera_node.setInput(i, input)
|
||||
# link to original output nodes
|
||||
for d in dependent:
|
||||
index = next((i for i, dpcy in enumerate(
|
||||
d.dependencies())
|
||||
if camera_node is dpcy), 0)
|
||||
d.setInput(index, camera_node)
|
||||
|
||||
# color node by correct color by actual version
|
||||
self.node_version_color(
|
||||
context["project"]["name"], version_entity, camera_node
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
return update_container(camera_node, data_imprint)
|
||||
|
||||
def node_version_color(self, project_name, version_entity, node):
|
||||
""" Coloring a node by correct color by actual version
|
||||
"""
|
||||
# get all versions in list
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.node_color
|
||||
else:
|
||||
color_value = "0xd88467ff"
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
584
server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py
Normal file
584
server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py
Normal file
|
|
@ -0,0 +1,584 @@
|
|||
from copy import deepcopy
|
||||
|
||||
import nuke
|
||||
import qargparse
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_core.pipeline.colorspace import (
|
||||
get_imageio_file_rules_colorspace_from_filepath,
|
||||
get_current_context_imageio_config_preset,
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
get_imageio_input_colorspace,
|
||||
maintained_selection
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop,
|
||||
colorspace_exists_on_node
|
||||
)
|
||||
from ayon_core.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
from ayon_nuke.api import plugin
|
||||
|
||||
|
||||
class LoadClip(plugin.NukeLoader):
|
||||
"""Load clip into Nuke
|
||||
|
||||
Either it is image sequence or video file.
|
||||
"""
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
product_types = {
|
||||
"source",
|
||||
"plate",
|
||||
"render",
|
||||
"prerender",
|
||||
"review",
|
||||
}
|
||||
representations = {"*"}
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Clip"
|
||||
order = -20
|
||||
icon = "file-video-o"
|
||||
color = "white"
|
||||
|
||||
# Loaded from settings
|
||||
representations_include = []
|
||||
|
||||
script_start = int(nuke.root()["first_frame"].value())
|
||||
|
||||
# option gui
|
||||
options_defaults = {
|
||||
"start_at_workfile": True,
|
||||
"add_retime": True,
|
||||
"deep_exr": False
|
||||
}
|
||||
|
||||
node_name_template = "{class_name}_{ext}"
|
||||
|
||||
@classmethod
|
||||
def get_options(cls, *args):
|
||||
return [
|
||||
qargparse.Boolean(
|
||||
"start_at_workfile",
|
||||
help="Load at workfile start frame",
|
||||
default=cls.options_defaults["start_at_workfile"]
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"add_retime",
|
||||
help="Load with retime",
|
||||
default=cls.options_defaults["add_retime"]
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"deep_exr",
|
||||
help="Read with deep exr",
|
||||
default=cls.options_defaults["deep_exr"]
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_representations(cls):
|
||||
return cls.representations_include or cls.representations
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load asset via database."""
|
||||
project_name = context["project"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
version_data = version_entity["data"]
|
||||
|
||||
# reset container id so it is always unique for each instance
|
||||
self.reset_container_id()
|
||||
|
||||
is_sequence = len(repre_entity["files"]) > 1
|
||||
|
||||
if is_sequence:
|
||||
context["representation"] = (
|
||||
self._representation_with_hash_in_frame(repre_entity)
|
||||
)
|
||||
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
||||
start_at_workfile = options.get(
|
||||
"start_at_workfile", self.options_defaults["start_at_workfile"])
|
||||
|
||||
add_retime = options.get(
|
||||
"add_retime", self.options_defaults["add_retime"])
|
||||
|
||||
deep_exr = options.get(
|
||||
"deep_exr", self.options_defaults["deep_exr"])
|
||||
|
||||
repre_id = repre_entity["id"]
|
||||
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repre_id))
|
||||
|
||||
self.handle_start = version_attributes.get("handleStart", 0)
|
||||
self.handle_end = version_attributes.get("handleEnd", 0)
|
||||
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
first -= self.handle_start
|
||||
last += self.handle_end
|
||||
|
||||
if not is_sequence:
|
||||
duration = last - first
|
||||
first = 1
|
||||
last = first + duration
|
||||
|
||||
# If a slate is present, the frame range is 1 frame longer for movies,
|
||||
# but file sequences its the first frame that is 1 frame lower.
|
||||
slate_frames = repre_entity["data"].get("slateFrames", 0)
|
||||
extension = "." + repre_entity["context"]["ext"]
|
||||
|
||||
if extension in VIDEO_EXTENSIONS:
|
||||
last += slate_frames
|
||||
|
||||
files_count = len(repre_entity["files"])
|
||||
if extension in IMAGE_EXTENSIONS and files_count != 1:
|
||||
first -= slate_frames
|
||||
|
||||
# Fallback to folder name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context["folder"]["name"]
|
||||
|
||||
if not filepath:
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repre_id))
|
||||
return
|
||||
|
||||
read_name = self._get_node_name(context)
|
||||
read_node = None
|
||||
if deep_exr:
|
||||
# Create the Loader with the filename path set
|
||||
read_node = nuke.createNode(
|
||||
"DeepRead",
|
||||
"name {}".format(read_name),
|
||||
inpanel=False
|
||||
)
|
||||
else:
|
||||
# Create the Loader with the filename path set
|
||||
read_node = nuke.createNode(
|
||||
"Read",
|
||||
"name {}".format(read_name),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# get colorspace
|
||||
colorspace = (
|
||||
repre_entity["data"].get("colorspace")
|
||||
or version_attributes.get("colorSpace")
|
||||
)
|
||||
|
||||
# to avoid multiple undo steps for rest of process
|
||||
# we will switch off undo-ing
|
||||
with viewer_update_and_undo_stop():
|
||||
read_node["file"].setValue(filepath)
|
||||
if read_node.Class() == "Read":
|
||||
self.set_colorspace_to_node(
|
||||
read_node,
|
||||
filepath,
|
||||
project_name,
|
||||
version_entity,
|
||||
repre_entity
|
||||
)
|
||||
|
||||
self._set_range_to_node(
|
||||
read_node, first, last, start_at_workfile, slate_frames
|
||||
)
|
||||
|
||||
version_name = version_entity["version"]
|
||||
if version_name < 0:
|
||||
version_name = "hero"
|
||||
|
||||
data_imprint = {
|
||||
"version": version_name,
|
||||
"db_colorspace": colorspace
|
||||
}
|
||||
|
||||
# add attributes from the version to imprint metadata knob
|
||||
for key in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"source",
|
||||
"fps",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
]:
|
||||
value = version_attributes.get(key, str(None))
|
||||
if isinstance(value, str):
|
||||
value = value.replace("\\", "/")
|
||||
data_imprint[key] = value
|
||||
|
||||
if add_retime and version_data.get("retime"):
|
||||
data_imprint["addRetime"] = True
|
||||
|
||||
read_node["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
container = containerise(
|
||||
read_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
if add_retime and version_data.get("retime"):
|
||||
self._make_retimes(read_node, version_data)
|
||||
|
||||
self.set_as_member(read_node)
|
||||
|
||||
return container
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def _representation_with_hash_in_frame(self, repre_entity):
|
||||
"""Convert frame key value to padded hash
|
||||
|
||||
Args:
|
||||
repre_entity (dict): Representation entity.
|
||||
|
||||
Returns:
|
||||
dict: altered representation data
|
||||
|
||||
"""
|
||||
new_repre_entity = deepcopy(repre_entity)
|
||||
context = new_repre_entity["context"]
|
||||
|
||||
# Get the frame from the context and hash it
|
||||
frame = context["frame"]
|
||||
hashed_frame = "#" * len(str(frame))
|
||||
|
||||
# Replace the frame with the hash in the originalBasename
|
||||
if (
|
||||
"{originalBasename}" in new_repre_entity["attrib"]["template"]
|
||||
):
|
||||
origin_basename = context["originalBasename"]
|
||||
context["originalBasename"] = origin_basename.replace(
|
||||
frame, hashed_frame
|
||||
)
|
||||
|
||||
# Replace the frame with the hash in the frame
|
||||
new_repre_entity["context"]["frame"] = hashed_frame
|
||||
return new_repre_entity
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
version_data = version_entity["data"]
|
||||
|
||||
is_sequence = len(repre_entity["files"]) > 1
|
||||
|
||||
read_node = container["node"]
|
||||
|
||||
if is_sequence:
|
||||
repre_entity = self._representation_with_hash_in_frame(
|
||||
repre_entity
|
||||
)
|
||||
|
||||
filepath = (
|
||||
get_representation_path(repre_entity)
|
||||
).replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
||||
start_at_workfile = "start at" in read_node['frame_mode'].value()
|
||||
|
||||
add_retime = [
|
||||
key for key in read_node.knobs().keys()
|
||||
if "addRetime" in key
|
||||
]
|
||||
|
||||
repre_id = repre_entity["id"]
|
||||
|
||||
# colorspace profile
|
||||
colorspace = (
|
||||
repre_entity["data"].get("colorspace")
|
||||
or version_attributes.get("colorSpace")
|
||||
)
|
||||
|
||||
self.handle_start = version_attributes.get("handleStart", 0)
|
||||
self.handle_end = version_attributes.get("handleEnd", 0)
|
||||
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
first -= self.handle_start
|
||||
last += self.handle_end
|
||||
|
||||
if not is_sequence:
|
||||
duration = last - first
|
||||
first = 1
|
||||
last = first + duration
|
||||
|
||||
if not filepath:
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repre_id))
|
||||
return
|
||||
|
||||
read_node["file"].setValue(filepath)
|
||||
|
||||
# to avoid multiple undo steps for rest of process
|
||||
# we will switch off undo-ing
|
||||
with viewer_update_and_undo_stop():
|
||||
if read_node.Class() == "Read":
|
||||
self.set_colorspace_to_node(
|
||||
read_node,
|
||||
filepath,
|
||||
project_name,
|
||||
version_entity,
|
||||
repre_entity
|
||||
)
|
||||
|
||||
self._set_range_to_node(read_node, first, last, start_at_workfile)
|
||||
|
||||
updated_dict = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": str(first),
|
||||
"frameEnd": str(last),
|
||||
"version": str(version_entity["version"]),
|
||||
"db_colorspace": colorspace,
|
||||
"source": version_attributes.get("source"),
|
||||
"handleStart": str(self.handle_start),
|
||||
"handleEnd": str(self.handle_end),
|
||||
"fps": str(version_attributes.get("fps"))
|
||||
}
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
# change color of read_node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = "0x4ecd25ff"
|
||||
else:
|
||||
color_value = "0xd84f20ff"
|
||||
read_node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(read_node, updated_dict)
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
if add_retime and version_data.get("retime"):
|
||||
self._make_retimes(read_node, version_data)
|
||||
else:
|
||||
self.clear_members(read_node)
|
||||
|
||||
self.set_as_member(read_node)
|
||||
|
||||
def set_colorspace_to_node(
|
||||
self,
|
||||
read_node,
|
||||
filepath,
|
||||
project_name,
|
||||
version_entity,
|
||||
repre_entity,
|
||||
):
|
||||
"""Set colorspace to read node.
|
||||
|
||||
Sets colorspace with available names validation.
|
||||
|
||||
Args:
|
||||
read_node (nuke.Node): The nuke's read node
|
||||
filepath (str): File path.
|
||||
project_name (str): Project name.
|
||||
version_entity (dict): Version entity.
|
||||
repre_entity (dict): Representation entity.
|
||||
|
||||
"""
|
||||
used_colorspace = self._get_colorspace_data(
|
||||
project_name, version_entity, repre_entity, filepath
|
||||
)
|
||||
if (
|
||||
used_colorspace
|
||||
and colorspace_exists_on_node(read_node, used_colorspace)
|
||||
):
|
||||
self.log.info(f"Used colorspace: {used_colorspace}")
|
||||
read_node["colorspace"].setValue(used_colorspace)
|
||||
else:
|
||||
self.log.info("Colorspace not set...")
|
||||
|
||||
def remove(self, container):
|
||||
read_node = container["node"]
|
||||
assert read_node.Class() == "Read", "Must be Read"
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
members = self.get_members(read_node)
|
||||
nuke.delete(read_node)
|
||||
for member in members:
|
||||
nuke.delete(member)
|
||||
|
||||
def _set_range_to_node(
|
||||
self, read_node, first, last, start_at_workfile, slate_frames=0
|
||||
):
|
||||
read_node['origfirst'].setValue(int(first))
|
||||
read_node['first'].setValue(int(first))
|
||||
read_node['origlast'].setValue(int(last))
|
||||
read_node['last'].setValue(int(last))
|
||||
|
||||
# set start frame depending on workfile or version
|
||||
if start_at_workfile:
|
||||
read_node['frame_mode'].setValue("start at")
|
||||
|
||||
start_frame = self.script_start - slate_frames
|
||||
|
||||
read_node['frame'].setValue(str(start_frame))
|
||||
|
||||
def _make_retimes(self, parent_node, version_data):
|
||||
''' Create all retime and timewarping nodes with copied animation '''
|
||||
speed = version_data.get('speed', 1)
|
||||
time_warp_nodes = version_data.get('timewarps', [])
|
||||
last_node = None
|
||||
source_id = self.get_container_id(parent_node)
|
||||
self.log.debug("__ source_id: {}".format(source_id))
|
||||
self.log.debug("__ members: {}".format(
|
||||
self.get_members(parent_node)))
|
||||
|
||||
dependent_nodes = self.clear_members(parent_node)
|
||||
|
||||
with maintained_selection():
|
||||
parent_node['selected'].setValue(True)
|
||||
|
||||
if speed != 1:
|
||||
rtn = nuke.createNode(
|
||||
"Retime",
|
||||
"speed {}".format(speed))
|
||||
|
||||
rtn["before"].setValue("continue")
|
||||
rtn["after"].setValue("continue")
|
||||
rtn["input.first_lock"].setValue(True)
|
||||
rtn["input.first"].setValue(
|
||||
self.script_start
|
||||
)
|
||||
self.set_as_member(rtn)
|
||||
last_node = rtn
|
||||
|
||||
if time_warp_nodes != []:
|
||||
start_anim = self.script_start + (self.handle_start / speed)
|
||||
for timewarp in time_warp_nodes:
|
||||
twn = nuke.createNode(
|
||||
timewarp["Class"],
|
||||
"name {}".format(timewarp["name"])
|
||||
)
|
||||
if isinstance(timewarp["lookup"], list):
|
||||
# if array for animation
|
||||
twn["lookup"].setAnimated()
|
||||
for i, value in enumerate(timewarp["lookup"]):
|
||||
twn["lookup"].setValueAt(
|
||||
(start_anim + i) + value,
|
||||
(start_anim + i))
|
||||
else:
|
||||
# if static value `int`
|
||||
twn["lookup"].setValue(timewarp["lookup"])
|
||||
|
||||
self.set_as_member(twn)
|
||||
last_node = twn
|
||||
|
||||
if dependent_nodes:
|
||||
# connect to original inputs
|
||||
for i, n in enumerate(dependent_nodes):
|
||||
last_node.setInput(i, n)
|
||||
|
||||
def _get_node_name(self, context):
|
||||
folder_entity = context["folder"]
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
folder_name = folder_entity["name"]
|
||||
repre_cont = repre_entity["context"]
|
||||
name_data = {
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
},
|
||||
"product": {
|
||||
"name": product_name,
|
||||
},
|
||||
"asset": folder_name,
|
||||
"subset": product_name,
|
||||
"representation": repre_entity["name"],
|
||||
"ext": repre_cont["representation"],
|
||||
"id": repre_entity["id"],
|
||||
"class_name": self.__class__.__name__
|
||||
}
|
||||
|
||||
return self.node_name_template.format(**name_data)
|
||||
|
||||
def _get_colorspace_data(
|
||||
self, project_name, version_entity, repre_entity, filepath
|
||||
):
|
||||
"""Get colorspace data from version and representation documents
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
version_entity (dict): Version entity.
|
||||
repre_entity (dict): Representation entity.
|
||||
filepath (str): File path.
|
||||
|
||||
Returns:
|
||||
Any[str,None]: colorspace name or None
|
||||
"""
|
||||
# Get backward compatible colorspace key.
|
||||
colorspace = repre_entity["data"].get("colorspace")
|
||||
self.log.debug(
|
||||
f"Colorspace from representation colorspace: {colorspace}"
|
||||
)
|
||||
|
||||
# Get backward compatible version data key if colorspace is not found.
|
||||
if not colorspace:
|
||||
colorspace = version_entity["attrib"].get("colorSpace")
|
||||
self.log.debug(
|
||||
f"Colorspace from version colorspace: {colorspace}"
|
||||
)
|
||||
|
||||
# Get colorspace from representation colorspaceData if colorspace is
|
||||
# not found.
|
||||
if not colorspace:
|
||||
colorspace_data = repre_entity["data"].get("colorspaceData", {})
|
||||
colorspace = colorspace_data.get("colorspace")
|
||||
self.log.debug(
|
||||
f"Colorspace from representation colorspaceData: {colorspace}"
|
||||
)
|
||||
|
||||
config_data = get_current_context_imageio_config_preset()
|
||||
# check if any filerules are not applicable
|
||||
new_parsed_colorspace = get_imageio_file_rules_colorspace_from_filepath( # noqa
|
||||
filepath, "nuke", project_name, config_data=config_data
|
||||
)
|
||||
self.log.debug(f"Colorspace new filerules: {new_parsed_colorspace}")
|
||||
|
||||
# colorspace from `project_settings/nuke/imageio/regexInputs`
|
||||
old_parsed_colorspace = get_imageio_input_colorspace(filepath)
|
||||
self.log.debug(f"Colorspace old filerules: {old_parsed_colorspace}")
|
||||
|
||||
return (
|
||||
new_parsed_colorspace
|
||||
or old_parsed_colorspace
|
||||
or colorspace
|
||||
)
|
||||
361
server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py
Normal file
361
server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py
Normal file
|
|
@ -0,0 +1,361 @@
|
|||
import json
|
||||
from collections import OrderedDict
|
||||
import nuke
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class LoadEffects(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
product_types = {"effect"}
|
||||
representations = {"*"}
|
||||
extensions = {"json"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Effects - nodes"
|
||||
order = 0
|
||||
icon = "cc"
|
||||
color = "white"
|
||||
ignore_attr = ["useLifetime"]
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
# get main variables
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace,
|
||||
}
|
||||
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
GN = nuke.createNode(
|
||||
"Group",
|
||||
"name {}_1".format(object_name),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for ef_name, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if k in self.ignore_attr:
|
||||
continue
|
||||
|
||||
try:
|
||||
node[k].value()
|
||||
except NameError as e:
|
||||
self.log.warning(e)
|
||||
continue
|
||||
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to find parent read node
|
||||
self.connect_read_node(GN, namespace, json_f["assignTo"])
|
||||
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("Loaded lut setup: `{}`".format(GN["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=GN,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
# get main variables
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get corresponding node
|
||||
GN = container["node"]
|
||||
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = container["namespace"]
|
||||
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps",
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
GN,
|
||||
data_imprint
|
||||
)
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
# first remove all nodes
|
||||
[nuke.delete(n) for n in nuke.allNodes()]
|
||||
|
||||
# create input node
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for _, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if k in self.ignore_attr:
|
||||
continue
|
||||
|
||||
try:
|
||||
node[k].value()
|
||||
except NameError as e:
|
||||
self.log.warning(e)
|
||||
continue
|
||||
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
# create output node
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to find parent read node
|
||||
self.connect_read_node(GN, namespace, json_f["assignTo"])
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = "0x3469ffff"
|
||||
else:
|
||||
color_value = "0xd84f20ff"
|
||||
|
||||
GN["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
def connect_read_node(self, group_node, namespace, product_name):
|
||||
"""
|
||||
Finds read node and selects it
|
||||
|
||||
Arguments:
|
||||
namespace (str): namespace name
|
||||
|
||||
Returns:
|
||||
nuke node: node is selected
|
||||
None: if nothing found
|
||||
"""
|
||||
search_name = "{0}_{1}".format(namespace, product_name)
|
||||
|
||||
node = [
|
||||
n for n in nuke.allNodes(filter="Read")
|
||||
if search_name in n["file"].value()
|
||||
]
|
||||
if len(node) > 0:
|
||||
rn = node[0]
|
||||
else:
|
||||
rn = None
|
||||
|
||||
# Parent read node has been found
|
||||
# solving connections
|
||||
if rn:
|
||||
dep_nodes = rn.dependent()
|
||||
|
||||
if len(dep_nodes) > 0:
|
||||
for dn in dep_nodes:
|
||||
dn.setInput(0, group_node)
|
||||
|
||||
group_node.setInput(0, rn)
|
||||
group_node.autoplace()
|
||||
|
||||
def reorder_nodes(self, data):
|
||||
new_order = OrderedDict()
|
||||
trackNums = [v["trackIndex"] for k, v in data.items()
|
||||
if isinstance(v, dict)]
|
||||
subTrackNums = [v["subTrackIndex"] for k, v in data.items()
|
||||
if isinstance(v, dict)]
|
||||
|
||||
for trackIndex in range(
|
||||
min(trackNums), max(trackNums) + 1):
|
||||
for subTrackIndex in range(
|
||||
min(subTrackNums), max(subTrackNums) + 1):
|
||||
item = self.get_item(data, trackIndex, subTrackIndex)
|
||||
if item is not {}:
|
||||
new_order.update(item)
|
||||
return new_order
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if isinstance(val, dict)
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes through all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, six.text_type):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -0,0 +1,372 @@
|
|||
import json
|
||||
from collections import OrderedDict
|
||||
import six
|
||||
import nuke
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api import lib
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class LoadEffectsInputProcess(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
product_types = {"effect"}
|
||||
representations = {"*"}
|
||||
extensions = {"json"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Effects - Input Process"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = "#cc0000"
|
||||
ignore_attr = ["useLifetime"]
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace,
|
||||
}
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
GN = nuke.createNode(
|
||||
"Group",
|
||||
"name {}_1".format(object_name),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for _, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if k in self.ignore_attr:
|
||||
continue
|
||||
|
||||
try:
|
||||
node[k].value()
|
||||
except NameError as e:
|
||||
self.log.warning(e)
|
||||
continue
|
||||
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to place it under Viewer1
|
||||
if not self.connect_active_viewer(GN):
|
||||
nuke.delete(GN)
|
||||
return
|
||||
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("Loaded lut setup: `{}`".format(GN["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=GN,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get corresponding node
|
||||
GN = container["node"]
|
||||
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace,
|
||||
}
|
||||
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
GN,
|
||||
data_imprint
|
||||
)
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
# first remove all nodes
|
||||
[nuke.delete(n) for n in nuke.allNodes()]
|
||||
|
||||
# create input node
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for _, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if k in self.ignore_attr:
|
||||
continue
|
||||
|
||||
try:
|
||||
node[k].value()
|
||||
except NameError as e:
|
||||
self.log.warning(e)
|
||||
continue
|
||||
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
# create output node
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# get all versions in list
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = "0x3469ffff"
|
||||
else:
|
||||
color_value = "0xd84f20ff"
|
||||
GN["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info("updated to version: {}".format(version_entity["name"]))
|
||||
|
||||
def connect_active_viewer(self, group_node):
|
||||
"""
|
||||
Finds Active viewer and
|
||||
place the node under it, also adds
|
||||
name of group into Input Process of the viewer
|
||||
|
||||
Arguments:
|
||||
group_node (nuke node): nuke group node object
|
||||
|
||||
"""
|
||||
group_node_name = group_node["name"].value()
|
||||
|
||||
viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()]
|
||||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
||||
# get coordinates of Viewer1
|
||||
xpos = viewer["xpos"].value()
|
||||
ypos = viewer["ypos"].value()
|
||||
|
||||
ypos += 150
|
||||
|
||||
viewer["ypos"].setValue(ypos)
|
||||
|
||||
# set coordinates to group node
|
||||
group_node["xpos"].setValue(xpos)
|
||||
group_node["ypos"].setValue(ypos + 50)
|
||||
|
||||
# add group node name to Viewer Input Process
|
||||
viewer["input_process_node"].setValue(group_node_name)
|
||||
|
||||
# put backdrop under
|
||||
lib.create_backdrop(
|
||||
label="Input Process",
|
||||
layer=2,
|
||||
nodes=[viewer, group_node],
|
||||
color="0x7c7faaff")
|
||||
|
||||
return True
|
||||
|
||||
def reorder_nodes(self, data):
|
||||
new_order = OrderedDict()
|
||||
trackNums = [v["trackIndex"] for k, v in data.items()
|
||||
if isinstance(v, dict)]
|
||||
subTrackNums = [v["subTrackIndex"] for k, v in data.items()
|
||||
if isinstance(v, dict)]
|
||||
|
||||
for trackIndex in range(
|
||||
min(trackNums), max(trackNums) + 1):
|
||||
for subTrackIndex in range(
|
||||
min(subTrackNums), max(subTrackNums) + 1):
|
||||
item = self.get_item(data, trackIndex, subTrackIndex)
|
||||
if item is not {}:
|
||||
new_order.update(item)
|
||||
return new_order
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if isinstance(val, dict)
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes through all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, six.text_type):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
190
server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py
Normal file
190
server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
import nuke
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection,
|
||||
get_avalon_knob_data,
|
||||
set_avalon_knob_data,
|
||||
swap_node_with_dependency,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class LoadGizmo(load.LoaderPlugin):
|
||||
"""Loading nuke Gizmo"""
|
||||
|
||||
product_types = {"gizmo"}
|
||||
representations = {"*"}
|
||||
extensions = {"nk"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Gizmo"
|
||||
order = 0
|
||||
icon = "dropbox"
|
||||
color = "white"
|
||||
node_color = "0x75338eff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get Gizmo into node graph
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerized nuke node object
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
# add attributes from the version to imprint to metadata knob
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with maintained_selection():
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
group_node = nuke.selectedNode()
|
||||
|
||||
group_node["name"].setValue(object_name)
|
||||
|
||||
return containerise(
|
||||
node=group_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
|
||||
# get corresponding node
|
||||
group_node = container["node"]
|
||||
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# capture pipeline metadata
|
||||
avalon_data = get_avalon_knob_data(group_node)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with maintained_selection([group_node]):
|
||||
# insert nuke script to the script
|
||||
nuke.nodePaste(file)
|
||||
# convert imported to selected node
|
||||
new_group_node = nuke.selectedNode()
|
||||
# swap nodes with maintained connections
|
||||
with swap_node_with_dependency(
|
||||
group_node, new_group_node) as node_name:
|
||||
new_group_node["name"].setValue(node_name)
|
||||
# set updated pipeline metadata
|
||||
set_avalon_knob_data(new_group_node, avalon_data)
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.node_color
|
||||
else:
|
||||
color_value = "0xd88467ff"
|
||||
|
||||
new_group_node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["name"])
|
||||
)
|
||||
|
||||
return update_container(new_group_node, data_imprint)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
270
server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py
Normal file
270
server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
import nuke
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection,
|
||||
create_backdrop,
|
||||
get_avalon_knob_data,
|
||||
set_avalon_knob_data,
|
||||
swap_node_with_dependency,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class LoadGizmoInputProcess(load.LoaderPlugin):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
product_types = {"gizmo"}
|
||||
representations = {"*"}
|
||||
extensions = {"nk"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Gizmo - Input Process"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = "#cc0000"
|
||||
node_color = "0x7533c1ff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get Gizmo as Input Process on viewer
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerized nuke node object
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
# add additional metadata from the version to imprint to metadata knob
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with maintained_selection():
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
group_node = nuke.selectedNode()
|
||||
|
||||
group_node["name"].setValue(object_name)
|
||||
|
||||
# try to place it under Viewer1
|
||||
if not self.connect_active_viewer(group_node):
|
||||
nuke.delete(group_node)
|
||||
return
|
||||
|
||||
return containerise(
|
||||
node=group_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get corresponding node
|
||||
group_node = container["node"]
|
||||
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspaceInput": colorspace
|
||||
}
|
||||
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# capture pipeline metadata
|
||||
avalon_data = get_avalon_knob_data(group_node)
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with maintained_selection([group_node]):
|
||||
# insert nuke script to the script
|
||||
nuke.nodePaste(file)
|
||||
# convert imported to selected node
|
||||
new_group_node = nuke.selectedNode()
|
||||
# swap nodes with maintained connections
|
||||
with swap_node_with_dependency(
|
||||
group_node, new_group_node) as node_name:
|
||||
new_group_node["name"].setValue(node_name)
|
||||
# set updated pipeline metadata
|
||||
set_avalon_knob_data(new_group_node, avalon_data)
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.node_color
|
||||
else:
|
||||
color_value = "0xd88467ff"
|
||||
new_group_node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
return update_container(new_group_node, data_imprint)
|
||||
|
||||
def connect_active_viewer(self, group_node):
|
||||
"""
|
||||
Finds Active viewer and
|
||||
place the node under it, also adds
|
||||
name of group into Input Process of the viewer
|
||||
|
||||
Arguments:
|
||||
group_node (nuke node): nuke group node object
|
||||
|
||||
"""
|
||||
group_node_name = group_node["name"].value()
|
||||
|
||||
viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()]
|
||||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
||||
# get coordinates of Viewer1
|
||||
xpos = viewer["xpos"].value()
|
||||
ypos = viewer["ypos"].value()
|
||||
|
||||
ypos += 150
|
||||
|
||||
viewer["ypos"].setValue(ypos)
|
||||
|
||||
# set coordinates to group node
|
||||
group_node["xpos"].setValue(xpos)
|
||||
group_node["ypos"].setValue(ypos + 50)
|
||||
|
||||
# add group node name to Viewer Input Process
|
||||
viewer["input_process_node"].setValue(group_node_name)
|
||||
|
||||
# put backdrop under
|
||||
create_backdrop(
|
||||
label="Input Process",
|
||||
layer=2,
|
||||
nodes=[viewer, group_node],
|
||||
color="0x7c7faaff"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes through all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, six.text_type):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
254
server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py
Normal file
254
server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
import nuke
|
||||
|
||||
import qargparse
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import (
|
||||
get_imageio_input_colorspace
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
from ayon_core.lib.transcoding import (
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
|
||||
class LoadImage(load.LoaderPlugin):
|
||||
"""Load still image into Nuke"""
|
||||
|
||||
product_types = {
|
||||
"render2d",
|
||||
"source",
|
||||
"plate",
|
||||
"render",
|
||||
"prerender",
|
||||
"review",
|
||||
"image",
|
||||
}
|
||||
representations = {"*"}
|
||||
extensions = set(ext.lstrip(".") for ext in IMAGE_EXTENSIONS)
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Image"
|
||||
order = -10
|
||||
icon = "image"
|
||||
color = "white"
|
||||
|
||||
# Loaded from settings
|
||||
representations_include = []
|
||||
|
||||
node_name_template = "{class_name}_{ext}"
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
"frame_number",
|
||||
label="Frame Number",
|
||||
default=int(nuke.root()["first_frame"].getValue()),
|
||||
min=1,
|
||||
max=999999,
|
||||
help="What frame is reading from?"
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_representations(cls):
|
||||
return cls.representations_include or cls.representations
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
self.log.info("__ options: `{}`".format(options))
|
||||
frame_number = options.get(
|
||||
"frame_number", int(nuke.root()["first_frame"].getValue())
|
||||
)
|
||||
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
repre_entity = context["representation"]
|
||||
repre_id = repre_entity["id"]
|
||||
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repre_id))
|
||||
|
||||
last = first = int(frame_number)
|
||||
|
||||
# Fallback to folder name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context["folder"]["name"]
|
||||
|
||||
file = self.filepath_from_context(context)
|
||||
|
||||
if not file:
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repre_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
frame = repre_entity["context"].get("frame")
|
||||
if frame:
|
||||
padding = len(frame)
|
||||
file = file.replace(
|
||||
frame,
|
||||
format(frame_number, "0{}".format(padding)))
|
||||
|
||||
read_name = self._get_node_name(context)
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
r = nuke.createNode(
|
||||
"Read",
|
||||
"name {}".format(read_name),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
r["file"].setValue(file)
|
||||
|
||||
# Set colorspace defined in version data
|
||||
colorspace = version_entity["attrib"].get("colorSpace")
|
||||
if colorspace:
|
||||
r["colorspace"].setValue(str(colorspace))
|
||||
|
||||
preset_clrsp = get_imageio_input_colorspace(file)
|
||||
|
||||
if preset_clrsp is not None:
|
||||
r["colorspace"].setValue(preset_clrsp)
|
||||
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
|
||||
# add attributes from the version to imprint metadata knob
|
||||
colorspace = version_attributes["colorSpace"]
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"],
|
||||
"colorspace": colorspace,
|
||||
}
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes.get(k, str(None))
|
||||
|
||||
r["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
return containerise(r,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
node = container["node"]
|
||||
frame_number = node["first"].value()
|
||||
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
repr_cont = repre_entity["context"]
|
||||
|
||||
file = get_representation_path(repre_entity)
|
||||
|
||||
if not file:
|
||||
repre_id = repre_entity["id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repre_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
frame = repr_cont.get("frame")
|
||||
if frame:
|
||||
padding = len(frame)
|
||||
file = file.replace(
|
||||
frame,
|
||||
format(frame_number, "0{}".format(padding)))
|
||||
|
||||
# Get start frame from version data
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
last = first = int(frame_number)
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
node["file"].setValue(file)
|
||||
node["origfirst"].setValue(first)
|
||||
node["first"].setValue(first)
|
||||
node["origlast"].setValue(last)
|
||||
node["last"].setValue(last)
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
updated_dict = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": str(first),
|
||||
"frameEnd": str(last),
|
||||
"version": str(version_entity["version"]),
|
||||
"colorspace": version_attributes.get("colorSpace"),
|
||||
"source": version_attributes.get("source"),
|
||||
"fps": str(version_attributes.get("fps")),
|
||||
}
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = "0x4ecd25ff"
|
||||
else:
|
||||
color_value = "0xd84f20ff"
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(node, updated_dict)
|
||||
self.log.info("updated to version: {}".format(
|
||||
version_entity["version"]
|
||||
))
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
||||
def _get_node_name(self, context):
|
||||
folder_entity = context["folder"]
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
folder_name = folder_entity["name"]
|
||||
repre_cont = repre_entity["context"]
|
||||
name_data = {
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
},
|
||||
"product": {
|
||||
"name": product_name,
|
||||
},
|
||||
"asset": folder_name,
|
||||
"subset": product_name,
|
||||
"representation": repre_entity["name"],
|
||||
"ext": repre_cont["representation"],
|
||||
"id": repre_entity["id"],
|
||||
"class_name": self.__class__.__name__
|
||||
}
|
||||
|
||||
return self.node_name_template.format(**name_data)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import nuke
|
||||
from ayon_core.pipeline import load
|
||||
|
||||
|
||||
class MatchmoveLoader(load.LoaderPlugin):
|
||||
"""
|
||||
This will run matchmove script to create track in script.
|
||||
"""
|
||||
|
||||
product_types = {"matchmove"}
|
||||
representations = {"*"}
|
||||
extensions = {"py"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
defaults = ["Camera", "Object"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
icon = "empire"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
path = self.filepath_from_context(context)
|
||||
if path.lower().endswith(".py"):
|
||||
exec(open(path).read())
|
||||
|
||||
else:
|
||||
msg = "Unsupported script type"
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
return True
|
||||
207
server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py
Normal file
207
server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
import nuke
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import maintained_selection
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class AlembicModelLoader(load.LoaderPlugin):
|
||||
"""
|
||||
This will load alembic model or anim into script.
|
||||
"""
|
||||
|
||||
product_types = {"model", "pointcache", "animation"}
|
||||
representations = {"*"}
|
||||
extensions = {"abc"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Alembic"
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
node_color = "0x4ecd91ff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
# get main variables
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
fps = version_attributes.get("fps") or nuke.root()["fps"].getValue()
|
||||
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"]
|
||||
}
|
||||
# add attributes from the version to imprint to metadata knob
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
model_node = nuke.createNode(
|
||||
"ReadGeo2",
|
||||
"name {} file {} ".format(
|
||||
object_name, file),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
model_node.forceValidate()
|
||||
|
||||
# Ensure all items are imported and selected.
|
||||
scene_view = model_node.knob('scene_view')
|
||||
scene_view.setImportedItems(scene_view.getAllItems())
|
||||
scene_view.setSelectedItems(scene_view.getAllItems())
|
||||
|
||||
model_node["frame_rate"].setValue(float(fps))
|
||||
|
||||
# workaround because nuke's bug is not adding
|
||||
# animation keys properly
|
||||
xpos = model_node.xpos()
|
||||
ypos = model_node.ypos()
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
nuke.delete(model_node)
|
||||
nuke.nodePaste("%clipboard%")
|
||||
model_node = nuke.toNode(object_name)
|
||||
model_node.setXYpos(xpos, ypos)
|
||||
|
||||
# color node by correct color by actual version
|
||||
self.node_version_color(project_name, version_entity, model_node)
|
||||
|
||||
return containerise(
|
||||
node=model_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, context):
|
||||
"""
|
||||
Called by Scene Inventory when look should be updated to current
|
||||
version.
|
||||
If any reference edits cannot be applied, eg. shader renamed and
|
||||
material not present, reference is unloaded and cleaned.
|
||||
All failed edits are highlighted to the user via message box.
|
||||
|
||||
Args:
|
||||
container: object that has look to be updated
|
||||
context: (dict): relationship data to get proper
|
||||
representation from DB and persisted
|
||||
data in .json
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
# Get version from io
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# get corresponding node
|
||||
model_node = container["node"]
|
||||
|
||||
# get main variables
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
fps = version_attributes.get("fps") or nuke.root()["fps"].getValue()
|
||||
|
||||
# prepare data for imprinting
|
||||
data_imprint = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"]
|
||||
}
|
||||
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
for k in ["source", "fps"]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# getting file path
|
||||
file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
model_node['selected'].setValue(True)
|
||||
|
||||
# collect input output dependencies
|
||||
dependencies = model_node.dependencies()
|
||||
dependent = model_node.dependent()
|
||||
|
||||
model_node["frame_rate"].setValue(float(fps))
|
||||
model_node["file"].setValue(file)
|
||||
|
||||
# Ensure all items are imported and selected.
|
||||
scene_view = model_node.knob('scene_view')
|
||||
scene_view.setImportedItems(scene_view.getAllItems())
|
||||
scene_view.setSelectedItems(scene_view.getAllItems())
|
||||
|
||||
# workaround because nuke's bug is
|
||||
# not adding animation keys properly
|
||||
xpos = model_node.xpos()
|
||||
ypos = model_node.ypos()
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
nuke.delete(model_node)
|
||||
|
||||
# paste the node back and set the position
|
||||
nuke.nodePaste("%clipboard%")
|
||||
model_node = nuke.selectedNode()
|
||||
model_node.setXYpos(xpos, ypos)
|
||||
|
||||
# link to original input nodes
|
||||
for i, input in enumerate(dependencies):
|
||||
model_node.setInput(i, input)
|
||||
# link to original output nodes
|
||||
for d in dependent:
|
||||
index = next((i for i, dpcy in enumerate(
|
||||
d.dependencies())
|
||||
if model_node is dpcy), 0)
|
||||
d.setInput(index, model_node)
|
||||
|
||||
# color node by correct color by actual version
|
||||
self.node_version_color(project_name, version_entity, model_node)
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
return update_container(model_node, data_imprint)
|
||||
|
||||
def node_version_color(self, project_name, version_entity, node):
|
||||
""" Coloring a node by correct color by actual version"""
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.node_color
|
||||
else:
|
||||
color_value = "0xd88467ff"
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
349
server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py
Normal file
349
server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py
Normal file
|
|
@ -0,0 +1,349 @@
|
|||
import os
|
||||
import json
|
||||
import secrets
|
||||
|
||||
import nuke
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
viewer_update_and_undo_stop,
|
||||
update_container,
|
||||
)
|
||||
|
||||
|
||||
class LoadOcioLookNodes(load.LoaderPlugin):
|
||||
"""Loading Ocio look to the nuke.Node graph"""
|
||||
|
||||
product_types = {"ociolook"}
|
||||
representations = {"*"}
|
||||
extensions = {"json"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load OcioLook [nodes]"
|
||||
order = 0
|
||||
icon = "cc"
|
||||
color = "white"
|
||||
ignore_attr = ["useLifetime"]
|
||||
|
||||
# plugin attributes
|
||||
current_node_color = "0x4ecd91ff"
|
||||
old_node_color = "0xd88467ff"
|
||||
|
||||
# json file variables
|
||||
schema_version = 1
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): namespace name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke.Node: containerized nuke.Node object
|
||||
"""
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
suffix = secrets.token_hex(nbytes=4)
|
||||
node_name = "{}_{}_{}".format(
|
||||
name, namespace, suffix)
|
||||
|
||||
# getting file path
|
||||
filepath = self.filepath_from_context(context)
|
||||
|
||||
json_f = self._load_json_data(filepath)
|
||||
|
||||
group_node = self._create_group_node(
|
||||
filepath, json_f["data"])
|
||||
# renaming group node
|
||||
group_node["name"].setValue(node_name)
|
||||
|
||||
self._node_version_color(
|
||||
context["project"]["name"],
|
||||
context["version"],
|
||||
group_node
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"Loaded lut setup: `{}`".format(group_node["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=group_node,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def _create_group_node(
|
||||
self,
|
||||
filepath,
|
||||
data,
|
||||
group_node=None
|
||||
):
|
||||
"""Creates group node with all the nodes inside.
|
||||
|
||||
Creating mainly `OCIOFileTransform` nodes with `OCIOColorSpace` nodes
|
||||
in between - in case those are needed.
|
||||
|
||||
Arguments:
|
||||
filepath (str): path to json file
|
||||
data (dict): data from json file
|
||||
group_node (Optional[nuke.Node]): group node or None
|
||||
|
||||
Returns:
|
||||
nuke.Node: group node with all the nodes inside
|
||||
"""
|
||||
# get corresponding node
|
||||
|
||||
root_working_colorspace = nuke.root()["workingSpaceLUT"].value()
|
||||
|
||||
dir_path = os.path.dirname(filepath)
|
||||
all_files = os.listdir(dir_path)
|
||||
|
||||
ocio_working_colorspace = _colorspace_name_by_type(
|
||||
data["ocioLookWorkingSpace"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
input_node = None
|
||||
output_node = None
|
||||
if group_node:
|
||||
# remove all nodes between Input and Output nodes
|
||||
for node in group_node.nodes():
|
||||
if node.Class() not in ["Input", "Output"]:
|
||||
nuke.delete(node)
|
||||
elif node.Class() == "Input":
|
||||
input_node = node
|
||||
elif node.Class() == "Output":
|
||||
output_node = node
|
||||
else:
|
||||
group_node = nuke.createNode(
|
||||
"Group",
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# adding content to the group node
|
||||
with group_node:
|
||||
pre_colorspace = root_working_colorspace
|
||||
|
||||
# reusing input node if it exists during update
|
||||
if input_node:
|
||||
pre_node = input_node
|
||||
else:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
# Compare script working colorspace with ocio working colorspace
|
||||
# found in json file and convert to json's if needed
|
||||
if pre_colorspace != ocio_working_colorspace:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
ocio_working_colorspace
|
||||
)
|
||||
pre_colorspace = ocio_working_colorspace
|
||||
|
||||
for ocio_item in data["ocioLookItems"]:
|
||||
input_space = _colorspace_name_by_type(
|
||||
ocio_item["input_colorspace"])
|
||||
output_space = _colorspace_name_by_type(
|
||||
ocio_item["output_colorspace"])
|
||||
|
||||
# making sure we are set to correct colorspace for otio item
|
||||
if pre_colorspace != input_space:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
input_space
|
||||
)
|
||||
|
||||
node = nuke.createNode("OCIOFileTransform")
|
||||
|
||||
# file path from lut representation
|
||||
extension = ocio_item["ext"]
|
||||
item_name = ocio_item["name"]
|
||||
|
||||
item_lut_file = next(
|
||||
(
|
||||
file for file in all_files
|
||||
if file.endswith(extension)
|
||||
),
|
||||
None
|
||||
)
|
||||
if not item_lut_file:
|
||||
raise ValueError(
|
||||
"File with extension '{}' not "
|
||||
"found in directory".format(extension)
|
||||
)
|
||||
|
||||
item_lut_path = os.path.join(
|
||||
dir_path, item_lut_file).replace("\\", "/")
|
||||
node["file"].setValue(item_lut_path)
|
||||
node["name"].setValue(item_name)
|
||||
node["direction"].setValue(ocio_item["direction"])
|
||||
node["interpolation"].setValue(ocio_item["interpolation"])
|
||||
node["working_space"].setValue(input_space)
|
||||
|
||||
pre_node.autoplace()
|
||||
node.setInput(0, pre_node)
|
||||
node.autoplace()
|
||||
# pass output space into pre_colorspace for next iteration
|
||||
# or for output node comparison
|
||||
pre_colorspace = output_space
|
||||
pre_node = node
|
||||
|
||||
# making sure we are back in script working colorspace
|
||||
if pre_colorspace != root_working_colorspace:
|
||||
pre_node = _add_ocio_colorspace_node(
|
||||
pre_node,
|
||||
pre_colorspace,
|
||||
root_working_colorspace
|
||||
)
|
||||
|
||||
# reusing output node if it exists during update
|
||||
if not output_node:
|
||||
output = nuke.createNode("Output")
|
||||
else:
|
||||
output = output_node
|
||||
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
return group_node
|
||||
|
||||
def update(self, container, context):
|
||||
repre_entity = context["representation"]
|
||||
|
||||
group_node = container["node"]
|
||||
|
||||
filepath = get_representation_path(repre_entity)
|
||||
|
||||
json_f = self._load_json_data(filepath)
|
||||
|
||||
group_node = self._create_group_node(
|
||||
filepath,
|
||||
json_f["data"],
|
||||
group_node
|
||||
)
|
||||
|
||||
self._node_version_color(
|
||||
context["project"]["name"], context["version"], group_node
|
||||
)
|
||||
|
||||
self.log.info("Updated lut setup: `{}`".format(
|
||||
group_node["name"].value()))
|
||||
|
||||
return update_container(
|
||||
group_node, {"representation": repre_entity["id"]})
|
||||
|
||||
def _load_json_data(self, filepath):
|
||||
# getting data from json file with unicode conversion
|
||||
with open(filepath, "r") as _file:
|
||||
json_f = {self._bytify(key): self._bytify(value)
|
||||
for key, value in json.load(_file).items()}
|
||||
|
||||
# check if the version in json_f is the same as plugin version
|
||||
if json_f["version"] != self.schema_version:
|
||||
raise KeyError(
|
||||
"Version of json file is not the same as plugin version")
|
||||
|
||||
return json_f
|
||||
|
||||
def _bytify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes through all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self._bytify(key): self._bytify(value)
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self._bytify(element) for element in input]
|
||||
elif isinstance(input, six.text_type):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
||||
def _node_version_color(self, project_name, version_entity, node):
|
||||
""" Coloring a node by correct color by actual version"""
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = self.current_node_color
|
||||
else:
|
||||
color_value = self.old_node_color
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
|
||||
def _colorspace_name_by_type(colorspace_data):
|
||||
"""
|
||||
Returns colorspace name by type
|
||||
|
||||
Arguments:
|
||||
colorspace_data (dict): colorspace data
|
||||
|
||||
Returns:
|
||||
str: colorspace name
|
||||
"""
|
||||
if colorspace_data["type"] == "colorspaces":
|
||||
return colorspace_data["name"]
|
||||
elif colorspace_data["type"] == "roles":
|
||||
return colorspace_data["colorspace"]
|
||||
else:
|
||||
raise KeyError("Unknown colorspace type: {}".format(
|
||||
colorspace_data["type"]))
|
||||
|
||||
|
||||
def _add_ocio_colorspace_node(pre_node, input_space, output_space):
|
||||
"""
|
||||
Adds OCIOColorSpace node to the node graph
|
||||
|
||||
Arguments:
|
||||
pre_node (nuke.Node): node to connect to
|
||||
input_space (str): input colorspace
|
||||
output_space (str): output colorspace
|
||||
|
||||
Returns:
|
||||
nuke.Node: node with OCIOColorSpace node
|
||||
"""
|
||||
node = nuke.createNode("OCIOColorSpace")
|
||||
node.setInput(0, pre_node)
|
||||
node["in_colorspace"].setValue(input_space)
|
||||
node["out_colorspace"].setValue(output_space)
|
||||
|
||||
pre_node.autoplace()
|
||||
node.setInput(0, pre_node)
|
||||
node.autoplace()
|
||||
|
||||
return node
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
import nuke
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_nuke.api.lib import get_avalon_knob_data
|
||||
from ayon_nuke.api import (
|
||||
containerise,
|
||||
update_container,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
|
||||
|
||||
class LinkAsGroup(load.LoaderPlugin):
|
||||
"""Copy the published file to be pasted at the desired location"""
|
||||
|
||||
product_types = {"workfile", "nukenodes"}
|
||||
representations = {"*"}
|
||||
extensions = {"nk"}
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
label = "Load Precomp"
|
||||
order = 0
|
||||
icon = "file"
|
||||
color = "#cc0000"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
# for k, v in context.items():
|
||||
# log.info("key: `{}`, value: {}\n".format(k, v))
|
||||
version_entity = context["version"]
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
first = version_attributes.get("frameStart")
|
||||
last = version_attributes.get("frameEnd")
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
# Fallback to folder name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context["folder"]["name"]
|
||||
|
||||
file = self.filepath_from_context(context).replace("\\", "/")
|
||||
self.log.info("file: {}\n".format(file))
|
||||
|
||||
data_imprint = {
|
||||
"startingFrame": first,
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": version_entity["version"]
|
||||
}
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
for k in [
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"source",
|
||||
"fps"
|
||||
]:
|
||||
data_imprint[k] = version_attributes[k]
|
||||
|
||||
# group context is set to precomp, so back up one level.
|
||||
nuke.endGroup()
|
||||
|
||||
# P = nuke.nodes.LiveGroup("file {}".format(file))
|
||||
P = nuke.createNode(
|
||||
"Precomp",
|
||||
"file {}".format(file),
|
||||
inpanel=False
|
||||
)
|
||||
|
||||
# Set colorspace defined in version data
|
||||
self.log.info("colorspace: {}\n".format(colorspace))
|
||||
|
||||
P["name"].setValue("{}_{}".format(name, namespace))
|
||||
P["useOutput"].setValue(True)
|
||||
|
||||
with P:
|
||||
# iterate through all nodes in group node and find pype writes
|
||||
writes = [n.name() for n in nuke.allNodes()
|
||||
if n.Class() == "Group"
|
||||
if get_avalon_knob_data(n)]
|
||||
|
||||
if writes:
|
||||
# create panel for selecting output
|
||||
panel_choices = " ".join(writes)
|
||||
panel_label = "Select write node for output"
|
||||
p = nuke.Panel("Select Write Node")
|
||||
p.addEnumerationPulldown(
|
||||
panel_label, panel_choices)
|
||||
p.show()
|
||||
P["output"].setValue(p.value(panel_label))
|
||||
|
||||
P["tile_color"].setValue(0xff0ff0ff)
|
||||
|
||||
return containerise(
|
||||
node=P,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def update(self, container, context):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
node = container["node"]
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
version_entity = context["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
root = get_representation_path(repre_entity).replace("\\", "/")
|
||||
|
||||
# Get start frame from version data
|
||||
|
||||
version_attributes = version_entity["attrib"]
|
||||
updated_dict = {
|
||||
"representation": repre_entity["id"],
|
||||
"frameEnd": version_attributes.get("frameEnd"),
|
||||
"version": version_entity["version"],
|
||||
"colorspace": version_attributes.get("colorSpace"),
|
||||
"source": version_attributes.get("source"),
|
||||
"fps": version_attributes.get("fps"),
|
||||
}
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
node,
|
||||
updated_dict
|
||||
)
|
||||
|
||||
node["file"].setValue(root)
|
||||
|
||||
last_version_entity = ayon_api.get_last_version_by_product_id(
|
||||
project_name, version_entity["productId"], fields={"id"}
|
||||
)
|
||||
# change color of node
|
||||
if version_entity["id"] == last_version_entity["id"]:
|
||||
color_value = "0xff0ff0ff"
|
||||
else:
|
||||
color_value = "0xd84f20ff"
|
||||
node["tile_color"].setValue(int(color_value, 16))
|
||||
|
||||
self.log.info(
|
||||
"updated to version: {}".format(version_entity["version"])
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
from pprint import pformat
|
||||
import pyblish.api
|
||||
from ayon_nuke.api import lib as pnlib
|
||||
import nuke
|
||||
|
||||
|
||||
class CollectBackdrops(pyblish.api.InstancePlugin):
|
||||
"""Collect Backdrop node instance and its content
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.22
|
||||
label = "Collect Backdrop"
|
||||
hosts = ["nuke"]
|
||||
families = ["nukenodes"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(pformat(instance.data))
|
||||
|
||||
bckn = instance.data["transientData"]["node"]
|
||||
|
||||
# define size of the backdrop
|
||||
left = bckn.xpos()
|
||||
top = bckn.ypos()
|
||||
right = left + bckn['bdwidth'].value()
|
||||
bottom = top + bckn['bdheight'].value()
|
||||
|
||||
instance.data["transientData"]["childNodes"] = []
|
||||
# iterate all nodes
|
||||
for node in nuke.allNodes():
|
||||
|
||||
# exclude viewer
|
||||
if node.Class() == "Viewer":
|
||||
continue
|
||||
|
||||
# find all related nodes
|
||||
if (node.xpos() > left) \
|
||||
and (node.xpos() + node.screenWidth() < right) \
|
||||
and (node.ypos() > top) \
|
||||
and (node.ypos() + node.screenHeight() < bottom):
|
||||
|
||||
# add contained nodes to instance's node list
|
||||
instance.data["transientData"]["childNodes"].append(node)
|
||||
|
||||
# get all connections from outside of backdrop
|
||||
nodes = instance.data["transientData"]["childNodes"]
|
||||
connections_in, connections_out = pnlib.get_dependent_nodes(nodes)
|
||||
instance.data["transientData"]["nodeConnectionsIn"] = connections_in
|
||||
instance.data["transientData"]["nodeConnectionsOut"] = connections_out
|
||||
|
||||
# make label nicer
|
||||
instance.data["label"] = "{0} ({1} nodes)".format(
|
||||
bckn.name(), len(instance.data["transientData"]["childNodes"]))
|
||||
|
||||
# get version
|
||||
version = instance.context.data.get('version')
|
||||
|
||||
if version:
|
||||
instance.data['version'] = version
|
||||
|
||||
self.log.debug("Backdrop instance collected: `{}`".format(instance))
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
import os
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from ayon_core.lib import get_version_from_path
|
||||
import ayon_nuke.api as napi
|
||||
from ayon_core.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class CollectContextData(pyblish.api.ContextPlugin):
|
||||
"""Collect current context publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.499
|
||||
label = "Collect context data"
|
||||
hosts = ['nuke']
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, context): # sourcery skip: avoid-builtin-shadow
|
||||
root_node = nuke.root()
|
||||
|
||||
current_file = os.path.normpath(root_node.name())
|
||||
|
||||
if current_file.lower() == "root":
|
||||
raise KnownPublishError(
|
||||
"Workfile is not correct file name. \n"
|
||||
"Use workfile tool to manage the name correctly."
|
||||
)
|
||||
|
||||
# Get frame range
|
||||
first_frame = int(root_node["first_frame"].getValue())
|
||||
last_frame = int(root_node["last_frame"].getValue())
|
||||
|
||||
# get instance data from root
|
||||
root_instance_context = napi.get_node_data(
|
||||
root_node, napi.INSTANCE_DATA_KNOB
|
||||
)
|
||||
|
||||
handle_start = root_instance_context["handleStart"]
|
||||
handle_end = root_instance_context["handleEnd"]
|
||||
|
||||
# Get format
|
||||
format = root_node['format'].value()
|
||||
resolution_width = format.width()
|
||||
resolution_height = format.height()
|
||||
pixel_aspect = format.pixelAspect()
|
||||
|
||||
script_data = {
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"pixelAspect": pixel_aspect,
|
||||
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"step": 1,
|
||||
"fps": root_node['fps'].value(),
|
||||
|
||||
"currentFile": current_file,
|
||||
"version": int(get_version_from_path(current_file)),
|
||||
|
||||
"host": pyblish.api.current_host(),
|
||||
"hostVersion": nuke.NUKE_VERSION_STRING
|
||||
}
|
||||
|
||||
context.data["scriptData"] = script_data
|
||||
context.data.update(script_data)
|
||||
|
||||
self.log.debug('Context from Nuke script collected')
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFramerate(pyblish.api.ContextPlugin):
|
||||
"""Collect framerate."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Framerate"
|
||||
hosts = [
|
||||
"nuke",
|
||||
"nukeassist"
|
||||
]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, context):
|
||||
context.data["fps"] = nuke.root()["fps"].getValue()
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
import pyblish.api
|
||||
import nuke
|
||||
|
||||
|
||||
class CollectGizmo(pyblish.api.InstancePlugin):
|
||||
"""Collect Gizmo (group) node instance and its content
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.22
|
||||
label = "Collect Gizmo (group)"
|
||||
hosts = ["nuke"]
|
||||
families = ["gizmo"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
gizmo_node = instance.data["transientData"]["node"]
|
||||
|
||||
# add product type to familiess
|
||||
instance.data["families"].insert(0, instance.data["productType"])
|
||||
# make label nicer
|
||||
instance.data["label"] = gizmo_node.name()
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
families = [instance.data["productType"]] + instance.data["families"]
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": nuke.root().knob('workingSpaceLUT').value(),
|
||||
"families": families,
|
||||
"productName": instance.data["productName"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
})
|
||||
self.log.debug("Gizmo instance collected: `{}`".format(instance))
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class CollectRenderOnFarm(pyblish.api.ContextPlugin):
|
||||
"""Setup instances for render on farm submission."""
|
||||
|
||||
# Needs to be after CollectFromCreateContext
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Collect Render On Farm"
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, context):
|
||||
if not context.data.get("render_on_farm", False):
|
||||
return
|
||||
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
instance.data["active"] = False
|
||||
continue
|
||||
|
||||
# Filter out all other instances.
|
||||
node = instance.data["transientData"]["node"]
|
||||
if node.name() != instance.context.data["node_name"]:
|
||||
instance.data["active"] = False
|
||||
continue
|
||||
|
||||
instance.data["families"].append("render_on_farm")
|
||||
|
||||
# Enable for farm publishing.
|
||||
instance.data["farm"] = True
|
||||
|
||||
# Skip workfile version incremental save.
|
||||
instance.context.data["increment_script_version"] = False
|
||||
|
||||
|
||||
class SetupRenderOnFarm(pyblish.api.InstancePlugin, AYONPyblishPluginMixin):
|
||||
"""Setup instance for render on farm submission."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4999
|
||||
label = "Setup Render On Farm"
|
||||
hosts = ["nuke"]
|
||||
families = ["render_on_farm"]
|
||||
|
||||
def process(self, instance):
|
||||
# Clear the families as we only want the main family, ei. no review
|
||||
# etc.
|
||||
instance.data["families"] = ["render_on_farm"]
|
||||
|
||||
# Use the workfile instead of published.
|
||||
publish_attributes = instance.data["publish_attributes"]
|
||||
plugin_attributes = publish_attributes["NukeSubmitDeadline"]
|
||||
plugin_attributes["use_published_workfile"] = False
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
import pyblish.api
|
||||
import nuke
|
||||
|
||||
|
||||
class CollectModel(pyblish.api.InstancePlugin):
|
||||
"""Collect Model node instance and its content
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.22
|
||||
label = "Collect Model"
|
||||
hosts = ["nuke"]
|
||||
families = ["model"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
geo_node = instance.data["transientData"]["node"]
|
||||
|
||||
# add product type to familiess
|
||||
instance.data["families"].insert(0, instance.data["productType"])
|
||||
# make label nicer
|
||||
instance.data["label"] = geo_node.name()
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
families = [instance.data["productType"]] + instance.data["families"]
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": nuke.root().knob('workingSpaceLUT').value(),
|
||||
"families": families,
|
||||
"productName": instance.data["productName"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
})
|
||||
self.log.debug("Model instance collected: `{}`".format(instance))
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import nuke
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Nuke instance data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Collect Nuke Instance Data"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# presets
|
||||
sync_workfile_version_on_families = []
|
||||
|
||||
def process(self, instance):
|
||||
product_type = instance.data["productType"]
|
||||
|
||||
# Get format
|
||||
root = nuke.root()
|
||||
format_ = root['format'].value()
|
||||
resolution_width = format_.width()
|
||||
resolution_height = format_.height()
|
||||
pixel_aspect = format_.pixelAspect()
|
||||
|
||||
# sync workfile version
|
||||
if product_type in self.sync_workfile_version_on_families:
|
||||
self.log.debug(
|
||||
"Syncing version with workfile for '{}'".format(
|
||||
product_type
|
||||
)
|
||||
)
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
|
||||
instance.data.update({
|
||||
"step": 1,
|
||||
"fps": root['fps'].value(),
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"pixelAspect": pixel_aspect
|
||||
|
||||
})
|
||||
|
||||
# add creator attributes to instance
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
# add review family if review activated on instance
|
||||
if instance.data.get("review"):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
self.log.debug("Collected instance: {}".format(
|
||||
instance.data))
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
import os
|
||||
import re
|
||||
import nuke
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectNukeReads(pyblish.api.InstancePlugin):
|
||||
"""Collect all read nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
label = "Collect Source Reads"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["source"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
if node.Class() != "Read":
|
||||
return
|
||||
|
||||
file_path = node["file"].value()
|
||||
file_name = os.path.basename(file_path)
|
||||
items = file_name.split(".")
|
||||
|
||||
if len(items) < 2:
|
||||
raise ValueError
|
||||
|
||||
ext = items[-1]
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = node['first'].value()
|
||||
last_frame = node['last'].value()
|
||||
|
||||
# colorspace
|
||||
colorspace = node["colorspace"].value()
|
||||
if "default" in colorspace:
|
||||
colorspace = colorspace.replace("default (", "").replace(")", "")
|
||||
|
||||
# # Easier way to sequence - Not tested
|
||||
# isSequence = True
|
||||
# if first_frame == last_frame:
|
||||
# isSequence = False
|
||||
|
||||
isSequence = False
|
||||
if len(items) > 1:
|
||||
sequence = items[-2]
|
||||
hash_regex = re.compile(r'([#*])')
|
||||
seq_regex = re.compile(r'[%0-9*d]')
|
||||
hash_match = re.match(hash_regex, sequence)
|
||||
seq_match = re.match(seq_regex, sequence)
|
||||
if hash_match or seq_match:
|
||||
isSequence = True
|
||||
|
||||
# get source path
|
||||
path = nuke.filename(node)
|
||||
source_dir = os.path.dirname(path)
|
||||
self.log.debug('source dir: {}'.format(source_dir))
|
||||
|
||||
if isSequence:
|
||||
source_files = [f for f in os.listdir(source_dir)
|
||||
if ext in f
|
||||
if items[0] in f]
|
||||
else:
|
||||
source_files = file_name
|
||||
|
||||
# Include start and end render frame in label
|
||||
name = node.name()
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'files': source_files,
|
||||
"stagingDir": source_dir,
|
||||
"frameStart": "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
transfer = node["publish"] if "publish" in node.knobs() else False
|
||||
instance.data['transfer'] = transfer
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": colorspace,
|
||||
"families": [instance.data["productType"]],
|
||||
"productName": instance.data["productName"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
"stagingDir": source_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"colorspace": colorspace,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
import pyblish.api
|
||||
import nuke
|
||||
|
||||
|
||||
class CollectSlate(pyblish.api.InstancePlugin):
|
||||
"""Check if SLATE node is in scene and connected to rendering tree"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.002
|
||||
label = "Collect Slate Node"
|
||||
hosts = ["nuke"]
|
||||
families = ["render"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
node = instance.data["transientData"]["node"]
|
||||
|
||||
slate = next(
|
||||
(
|
||||
n_ for n_ in nuke.allNodes()
|
||||
if "slate" in n_.name().lower()
|
||||
if not n_["disable"].getValue() and
|
||||
"publish_instance" not in n_.knobs() # Exclude instance nodes.
|
||||
),
|
||||
None
|
||||
)
|
||||
|
||||
if slate:
|
||||
# check if slate node is connected to write node tree
|
||||
slate_check = 0
|
||||
slate_node = None
|
||||
while slate_check == 0:
|
||||
try:
|
||||
node = node.dependencies()[0]
|
||||
if slate.name() in node.name():
|
||||
slate_node = node
|
||||
slate_check = 1
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
if slate_node:
|
||||
instance.data["slateNode"] = slate_node
|
||||
instance.data["slate"] = True
|
||||
instance.data["families"].append("slate")
|
||||
self.log.debug(
|
||||
"Slate node is in node graph: `{}`".format(slate.name()))
|
||||
self.log.debug(
|
||||
"__ instance.data: `{}`".format(instance.data))
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import os
|
||||
import nuke
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Workfile"
|
||||
hosts = ['nuke']
|
||||
families = ["workfile"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance): # sourcery skip: avoid-builtin-shadow
|
||||
|
||||
script_data = instance.context.data["scriptData"]
|
||||
current_file = os.path.normpath(nuke.root().name())
|
||||
|
||||
# creating instances per write node
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
base_name = os.path.basename(current_file)
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'nk',
|
||||
'ext': 'nk',
|
||||
'files': base_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
# creating instance data
|
||||
instance.data.update({
|
||||
"name": base_name,
|
||||
"representations": [representation]
|
||||
})
|
||||
|
||||
# adding basic script data
|
||||
instance.data.update(script_data)
|
||||
|
||||
self.log.debug(
|
||||
"Collected current script version: {}".format(current_file)
|
||||
)
|
||||
|
|
@ -0,0 +1,402 @@
|
|||
import os
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from ayon_nuke import api as napi
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Collect all write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.0021
|
||||
label = "Collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render", "prerender", "image"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# cache
|
||||
_write_nodes = {}
|
||||
_frame_ranges = {}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
group_node = instance.data["transientData"]["node"]
|
||||
render_target = instance.data["render_target"]
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
if write_node is None:
|
||||
self.log.warning(
|
||||
"Created node '{}' is missing write node!".format(
|
||||
group_node.name()
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
|
||||
if render_target == "frames":
|
||||
self._set_existing_files_data(instance, colorspace)
|
||||
|
||||
elif render_target == "frames_farm":
|
||||
collected_frames = self._set_existing_files_data(
|
||||
instance, colorspace)
|
||||
|
||||
self._set_expected_files(instance, collected_frames)
|
||||
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
elif render_target == "farm":
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
# set additional instance data
|
||||
self._set_additional_instance_data(instance, render_target, colorspace)
|
||||
|
||||
def _set_existing_files_data(self, instance, colorspace):
|
||||
"""Set existing files data to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
colorspace (str): colorspace
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
collected_frames = self._get_collected_frames(instance)
|
||||
|
||||
representation = self._get_existing_frames_representation(
|
||||
instance, collected_frames
|
||||
)
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _set_expected_files(self, instance, collected_frames):
|
||||
"""Set expected files to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
"""
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
instance.data["expectedFiles"] = [
|
||||
os.path.join(output_dir, source_file)
|
||||
for source_file in collected_frames
|
||||
]
|
||||
|
||||
def _get_frame_range_data(self, instance):
|
||||
"""Get frame range data from instance.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
tuple: first_frame, last_frame
|
||||
"""
|
||||
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
if self._frame_ranges.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._frame_ranges[instance_name]
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# Get frame range from workfile
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# Get frame range from write node if activated
|
||||
if write_node["use_limit"].getValue():
|
||||
first_frame = int(write_node["first"].getValue())
|
||||
last_frame = int(write_node["last"].getValue())
|
||||
|
||||
# add to cache
|
||||
self._frame_ranges[instance_name] = (first_frame, last_frame)
|
||||
|
||||
return first_frame, last_frame
|
||||
|
||||
def _set_additional_instance_data(
|
||||
self, instance, render_target, colorspace
|
||||
):
|
||||
"""Set additional instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
render_target (str): render target
|
||||
colorspace (str): colorspace
|
||||
"""
|
||||
product_type = instance.data["productType"]
|
||||
|
||||
# add targeted family to families
|
||||
instance.data["families"].append(
|
||||
"{}.{}".format(product_type, render_target)
|
||||
)
|
||||
self.log.debug("Appending render target to families: {}.{}".format(
|
||||
product_type, render_target)
|
||||
)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
# determine defined channel type
|
||||
color_channels = write_node["channels"].value()
|
||||
|
||||
# get frame range data
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
# get output paths
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# TODO: remove this when we have proper colorspace support
|
||||
version_data = {
|
||||
"colorspace": colorspace
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": write_file_path,
|
||||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
"colorspace": colorspace,
|
||||
"color_channels": color_channels
|
||||
})
|
||||
|
||||
if product_type == "render":
|
||||
instance.data.update({
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"frameStartHandle": first_frame,
|
||||
"frameEndHandle": last_frame,
|
||||
})
|
||||
else:
|
||||
instance.data.update({
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"frameStartHandle": first_frame,
|
||||
"frameEndHandle": last_frame,
|
||||
})
|
||||
|
||||
# TODO temporarily set stagingDir as persistent for backward
|
||||
# compatibility. This is mainly focused on `renders`folders which
|
||||
# were previously not cleaned up (and could be used in read notes)
|
||||
# this logic should be removed and replaced with custom staging dir
|
||||
instance.data["stagingDir_persistent"] = True
|
||||
|
||||
def _write_node_helper(self, instance):
|
||||
"""Helper function to get write node from instance.
|
||||
|
||||
Also sets instance transient data with child nodes.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
nuke.Node: write node
|
||||
"""
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
if self._write_nodes.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
# get all child nodes from group node
|
||||
child_nodes = napi.get_instance_group_node_childs(instance)
|
||||
|
||||
# set child nodes to instance transient data
|
||||
instance.data["transientData"]["childNodes"] = child_nodes
|
||||
|
||||
write_node = None
|
||||
for node_ in child_nodes:
|
||||
if node_.Class() == "Write":
|
||||
write_node = node_
|
||||
|
||||
if write_node:
|
||||
# for slate frame extraction
|
||||
instance.data["transientData"]["writeNode"] = write_node
|
||||
# add to cache
|
||||
self._write_nodes[instance_name] = write_node
|
||||
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
def _get_existing_frames_representation(
|
||||
self,
|
||||
instance,
|
||||
collected_frames
|
||||
):
|
||||
"""Get existing frames representation.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
|
||||
Returns:
|
||||
dict: representation
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"stagingDir": output_dir,
|
||||
"tags": []
|
||||
}
|
||||
|
||||
# set slate frame
|
||||
collected_frames = self._add_slate_frame_to_collected_frames(
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
)
|
||||
|
||||
if len(collected_frames) == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
|
||||
return representation
|
||||
|
||||
def _get_frame_start_str(self, first_frame, last_frame):
|
||||
"""Get frame start string.
|
||||
|
||||
Args:
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
str: frame start string
|
||||
"""
|
||||
# convert first frame to string with padding
|
||||
return (
|
||||
"{{:0{}d}}".format(len(str(last_frame)))
|
||||
).format(first_frame)
|
||||
|
||||
def _add_slate_frame_to_collected_frames(
|
||||
self,
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
):
|
||||
"""Add slate frame to collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
frame_start_str = self._get_frame_start_str(first_frame, last_frame)
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if (
|
||||
"slate" in instance.data["families"]
|
||||
and frame_length == len(collected_frames)
|
||||
):
|
||||
frame_slate_str = self._get_frame_start_str(
|
||||
first_frame - 1,
|
||||
last_frame
|
||||
)
|
||||
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _add_farm_instance_data(self, instance):
|
||||
"""Add farm publishing related instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
"""
|
||||
|
||||
# make sure rendered sequence on farm will
|
||||
# be used for extract review
|
||||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# Farm rendering
|
||||
instance.data.update({
|
||||
"transfer": False,
|
||||
"farm": True # to skip integrate
|
||||
})
|
||||
self.log.info("Farm rendering ON ...")
|
||||
|
||||
def _get_collected_frames(self, instance):
|
||||
"""Get collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get file path knob
|
||||
node_file_knob = write_node["file"]
|
||||
# list file paths based on input frames
|
||||
expected_paths = list(sorted({
|
||||
node_file_knob.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
|
||||
# convert only to base names
|
||||
expected_filenames = {
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
}
|
||||
|
||||
# make sure files are existing at folder
|
||||
collected_frames = [
|
||||
filename
|
||||
for filename in os.listdir(output_dir)
|
||||
if filename in expected_filenames
|
||||
]
|
||||
|
||||
return collected_frames
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
import os
|
||||
|
||||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection,
|
||||
reset_selection,
|
||||
select_nodes
|
||||
)
|
||||
|
||||
|
||||
class ExtractBackdropNode(publish.Extractor):
|
||||
"""Extracting content of backdrop nodes
|
||||
|
||||
Will create nuke script only with containing nodes.
|
||||
Also it will solve Input and Output nodes.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Backdrop"
|
||||
hosts = ["nuke"]
|
||||
families = ["nukenodes"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
tmp_nodes = []
|
||||
child_nodes = instance.data["transientData"]["childNodes"]
|
||||
# all connections outside of backdrop
|
||||
connections_in = instance.data["transientData"]["nodeConnectionsIn"]
|
||||
connections_out = instance.data["transientData"]["nodeConnectionsOut"]
|
||||
self.log.debug("_ connections_in: `{}`".format(connections_in))
|
||||
self.log.debug("_ connections_out: `{}`".format(connections_out))
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.nk".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# maintain selection
|
||||
with maintained_selection():
|
||||
# create input child_nodes and name them as passing node (*_INP)
|
||||
for n, inputs in connections_in.items():
|
||||
for i, input in inputs:
|
||||
inpn = nuke.createNode("Input")
|
||||
inpn["name"].setValue("{}_{}_INP".format(n.name(), i))
|
||||
n.setInput(i, inpn)
|
||||
inpn.setXYpos(input.xpos(), input.ypos())
|
||||
child_nodes.append(inpn)
|
||||
tmp_nodes.append(inpn)
|
||||
|
||||
reset_selection()
|
||||
|
||||
# connect output node
|
||||
for n, output in connections_out.items():
|
||||
opn = nuke.createNode("Output")
|
||||
output.setInput(
|
||||
next((i for i, d in enumerate(output.dependencies())
|
||||
if d.name() in n.name()), 0), opn)
|
||||
opn.setInput(0, n)
|
||||
opn.autoplace()
|
||||
child_nodes.append(opn)
|
||||
tmp_nodes.append(opn)
|
||||
reset_selection()
|
||||
|
||||
# select child_nodes to copy
|
||||
reset_selection()
|
||||
select_nodes(child_nodes)
|
||||
# create tmp nk file
|
||||
# save file to the path
|
||||
nuke.nodeCopy(path)
|
||||
|
||||
# Clean up
|
||||
for tn in tmp_nodes:
|
||||
nuke.delete(tn)
|
||||
|
||||
# restore original connections
|
||||
# reconnect input node
|
||||
for n, inputs in connections_in.items():
|
||||
for i, input in inputs:
|
||||
n.setInput(i, input)
|
||||
|
||||
# reconnect output node
|
||||
for n, output in connections_out.items():
|
||||
output.setInput(
|
||||
next((i for i, d in enumerate(output.dependencies())
|
||||
if d.name() in n.name()), 0), n)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# create representation
|
||||
representation = {
|
||||
'name': 'nk',
|
||||
'ext': 'nk',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.debug("Extracted instance '{}' to: {}".format(
|
||||
instance.name, path))
|
||||
|
|
@ -0,0 +1,185 @@
|
|||
import os
|
||||
import math
|
||||
|
||||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractCamera(publish.Extractor):
|
||||
""" 3D camera extractor
|
||||
"""
|
||||
label = 'Extract Camera'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["camera"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# presets
|
||||
write_geo_knobs = [
|
||||
("file_type", "abc"),
|
||||
("storageFormat", "Ogawa"),
|
||||
("writeGeometries", False),
|
||||
("writePointClouds", False),
|
||||
("writeAxes", False)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
camera_node = instance.data["transientData"]["node"]
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
step = 1
|
||||
output_range = str(nuke.FrameRange(first_frame, last_frame, step))
|
||||
|
||||
rm_nodes = []
|
||||
self.log.debug("Creating additional nodes for 3D Camera Extractor")
|
||||
product_name = instance.data["productName"]
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# get extension form preset
|
||||
extension = next((k[1] for k in self.write_geo_knobs
|
||||
if k[0] == "file_type"), None)
|
||||
if not extension:
|
||||
raise RuntimeError(
|
||||
"Bad config for extension in presets. "
|
||||
"Talk to your supervisor or pipeline admin")
|
||||
|
||||
# create file name and path
|
||||
filename = product_name + ".{}".format(extension)
|
||||
file_path = os.path.join(staging_dir, filename).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
# bake camera with axeses onto word coordinate XYZ
|
||||
rm_n = bakeCameraWithAxeses(
|
||||
camera_node, output_range)
|
||||
rm_nodes.append(rm_n)
|
||||
|
||||
# create scene node
|
||||
rm_n = nuke.createNode("Scene")
|
||||
rm_nodes.append(rm_n)
|
||||
|
||||
# create write geo node
|
||||
wg_n = nuke.createNode("WriteGeo")
|
||||
wg_n["file"].setValue(file_path)
|
||||
# add path to write to
|
||||
for k, v in self.write_geo_knobs:
|
||||
wg_n[k].setValue(v)
|
||||
rm_nodes.append(wg_n)
|
||||
|
||||
# write out camera
|
||||
nuke.execute(
|
||||
wg_n,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
# erase additional nodes
|
||||
for n in rm_nodes:
|
||||
nuke.delete(n)
|
||||
|
||||
# create representation data
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': extension,
|
||||
'ext': extension,
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.data.update({
|
||||
"path": file_path,
|
||||
"outputDir": staging_dir,
|
||||
"ext": extension,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"frameStartHandle": first_frame,
|
||||
"frameEndHandle": last_frame,
|
||||
})
|
||||
|
||||
self.log.debug("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, file_path))
|
||||
|
||||
|
||||
def bakeCameraWithAxeses(camera_node, output_range):
|
||||
""" Baking all perent hierarchy of axeses into camera
|
||||
with transposition onto word XYZ coordinance
|
||||
"""
|
||||
bakeFocal = False
|
||||
bakeHaperture = False
|
||||
bakeVaperture = False
|
||||
|
||||
camera_matrix = camera_node['world_matrix']
|
||||
|
||||
new_cam_n = nuke.createNode("Camera2")
|
||||
new_cam_n.setInput(0, None)
|
||||
new_cam_n['rotate'].setAnimated()
|
||||
new_cam_n['translate'].setAnimated()
|
||||
|
||||
old_focal = camera_node['focal']
|
||||
if old_focal.isAnimated() and not (old_focal.animation(0).constant()):
|
||||
new_cam_n['focal'].setAnimated()
|
||||
bakeFocal = True
|
||||
else:
|
||||
new_cam_n['focal'].setValue(old_focal.value())
|
||||
|
||||
old_haperture = camera_node['haperture']
|
||||
if old_haperture.isAnimated() and not (
|
||||
old_haperture.animation(0).constant()):
|
||||
new_cam_n['haperture'].setAnimated()
|
||||
bakeHaperture = True
|
||||
else:
|
||||
new_cam_n['haperture'].setValue(old_haperture.value())
|
||||
|
||||
old_vaperture = camera_node['vaperture']
|
||||
if old_vaperture.isAnimated() and not (
|
||||
old_vaperture.animation(0).constant()):
|
||||
new_cam_n['vaperture'].setAnimated()
|
||||
bakeVaperture = True
|
||||
else:
|
||||
new_cam_n['vaperture'].setValue(old_vaperture.value())
|
||||
|
||||
new_cam_n['win_translate'].setValue(camera_node['win_translate'].value())
|
||||
new_cam_n['win_scale'].setValue(camera_node['win_scale'].value())
|
||||
|
||||
for x in nuke.FrameRange(output_range):
|
||||
math_matrix = nuke.math.Matrix4()
|
||||
for y in range(camera_matrix.height()):
|
||||
for z in range(camera_matrix.width()):
|
||||
matrix_pointer = z + (y * camera_matrix.width())
|
||||
math_matrix[matrix_pointer] = camera_matrix.getValueAt(
|
||||
x, (y + (z * camera_matrix.width())))
|
||||
|
||||
rot_matrix = nuke.math.Matrix4(math_matrix)
|
||||
rot_matrix.rotationOnly()
|
||||
rot = rot_matrix.rotationsZXY()
|
||||
|
||||
new_cam_n['rotate'].setValueAt(math.degrees(rot[0]), x, 0)
|
||||
new_cam_n['rotate'].setValueAt(math.degrees(rot[1]), x, 1)
|
||||
new_cam_n['rotate'].setValueAt(math.degrees(rot[2]), x, 2)
|
||||
new_cam_n['translate'].setValueAt(
|
||||
camera_matrix.getValueAt(x, 3), x, 0)
|
||||
new_cam_n['translate'].setValueAt(
|
||||
camera_matrix.getValueAt(x, 7), x, 1)
|
||||
new_cam_n['translate'].setValueAt(
|
||||
camera_matrix.getValueAt(x, 11), x, 2)
|
||||
|
||||
if bakeFocal:
|
||||
new_cam_n['focal'].setValueAt(old_focal.getValueAt(x), x)
|
||||
if bakeHaperture:
|
||||
new_cam_n['haperture'].setValueAt(old_haperture.getValueAt(x), x)
|
||||
if bakeVaperture:
|
||||
new_cam_n['vaperture'].setValueAt(old_vaperture.getValueAt(x), x)
|
||||
|
||||
return new_cam_n
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api import utils as pnutils
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection,
|
||||
reset_selection,
|
||||
select_nodes
|
||||
)
|
||||
|
||||
|
||||
class ExtractGizmo(publish.Extractor):
|
||||
"""Extracting Gizmo (Group) node
|
||||
|
||||
Will create nuke script only with the Gizmo node.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Gizmo (group)"
|
||||
hosts = ["nuke"]
|
||||
families = ["gizmo"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
tmp_nodes = []
|
||||
orig_grpn = instance.data["transientData"]["node"]
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.nk".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# maintain selection
|
||||
with maintained_selection():
|
||||
orig_grpn_name = orig_grpn.name()
|
||||
tmp_grpn_name = orig_grpn_name + "_tmp"
|
||||
# select original group node
|
||||
select_nodes([orig_grpn])
|
||||
|
||||
# copy to clipboard
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
|
||||
# reset selection to none
|
||||
reset_selection()
|
||||
|
||||
# paste clipboard
|
||||
nuke.nodePaste("%clipboard%")
|
||||
|
||||
# assign pasted node
|
||||
copy_grpn = nuke.selectedNode()
|
||||
copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos())
|
||||
|
||||
# convert gizmos to groups
|
||||
pnutils.bake_gizmos_recursively(copy_grpn)
|
||||
|
||||
# add to temporary nodes
|
||||
tmp_nodes.append(copy_grpn)
|
||||
|
||||
# swap names
|
||||
orig_grpn.setName(tmp_grpn_name)
|
||||
copy_grpn.setName(orig_grpn_name)
|
||||
|
||||
# create tmp nk file
|
||||
# save file to the path
|
||||
nuke.nodeCopy(path)
|
||||
|
||||
# Clean up
|
||||
for tn in tmp_nodes:
|
||||
nuke.delete(tn)
|
||||
|
||||
# rename back to original
|
||||
orig_grpn.setName(orig_grpn_name)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# create representation
|
||||
representation = {
|
||||
'name': 'gizmo',
|
||||
'ext': 'nk',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.debug("Extracted instance '{}' to: {}".format(
|
||||
instance.name, path))
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
|
||||
|
||||
class ExtractRenderOnFarm(pyblish.api.InstancePlugin):
|
||||
"""Copy the workfile to a timestamped copy."""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.499
|
||||
label = "Extract Render On Farm"
|
||||
hosts = ["nuke"]
|
||||
families = ["render_on_farm"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.context.data.get("render_on_farm", False):
|
||||
return
|
||||
|
||||
host = registered_host()
|
||||
current_datetime = datetime.now()
|
||||
formatted_timestamp = current_datetime.strftime("%Y%m%d%H%M%S")
|
||||
base, ext = os.path.splitext(host.current_file())
|
||||
|
||||
directory = os.path.join(os.path.dirname(base), "farm_submissions")
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
filename = "{}_{}{}".format(
|
||||
os.path.basename(base), formatted_timestamp, ext
|
||||
)
|
||||
path = os.path.join(directory, filename).replace("\\", "/")
|
||||
instance.context.data["currentFile"] = path
|
||||
shutil.copy(host.current_file(), path)
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import nuke
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api.lib import (
|
||||
maintained_selection,
|
||||
select_nodes
|
||||
)
|
||||
|
||||
|
||||
class ExtractModel(publish.Extractor):
|
||||
""" 3D model extractor
|
||||
"""
|
||||
label = 'Extract Model'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["model"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# presets
|
||||
write_geo_knobs = [
|
||||
("file_type", "abc"),
|
||||
("storageFormat", "Ogawa"),
|
||||
("writeGeometries", True),
|
||||
("writePointClouds", False),
|
||||
("writeAxes", False)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
self.log.debug("instance.data: `{}`".format(
|
||||
pformat(instance.data)))
|
||||
|
||||
rm_nodes = []
|
||||
model_node = instance.data["transientData"]["node"]
|
||||
|
||||
self.log.debug("Creating additional nodes for Extract Model")
|
||||
product_name = instance.data["productName"]
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
extension = next((k[1] for k in self.write_geo_knobs
|
||||
if k[0] == "file_type"), None)
|
||||
if not extension:
|
||||
raise RuntimeError(
|
||||
"Bad config for extension in presets. "
|
||||
"Talk to your supervisor or pipeline admin")
|
||||
|
||||
# create file name and path
|
||||
filename = product_name + ".{}".format(extension)
|
||||
file_path = os.path.join(staging_dir, filename).replace("\\", "/")
|
||||
|
||||
with maintained_selection():
|
||||
# select model node
|
||||
select_nodes([model_node])
|
||||
|
||||
# create write geo node
|
||||
wg_n = nuke.createNode("WriteGeo")
|
||||
wg_n["file"].setValue(file_path)
|
||||
# add path to write to
|
||||
for k, v in self.write_geo_knobs:
|
||||
wg_n[k].setValue(v)
|
||||
rm_nodes.append(wg_n)
|
||||
|
||||
# write out model
|
||||
nuke.execute(
|
||||
wg_n,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
# erase additional nodes
|
||||
for n in rm_nodes:
|
||||
nuke.delete(n)
|
||||
|
||||
self.log.debug("Filepath: {}".format(file_path))
|
||||
|
||||
# create representation data
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': extension,
|
||||
'ext': extension,
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.data.update({
|
||||
"path": file_path,
|
||||
"outputDir": staging_dir,
|
||||
"ext": extension,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"frameStartHandle": first_frame,
|
||||
"frameEndHandle": last_frame,
|
||||
})
|
||||
|
||||
self.log.debug("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
import nuke
|
||||
import pyblish.api
|
||||
from ayon_nuke.api.lib import maintained_selection
|
||||
|
||||
|
||||
class CreateOutputNode(pyblish.api.ContextPlugin):
|
||||
"""Adding output node for each output write node
|
||||
So when latly user will want to Load .nk as LifeGroup or Precomp
|
||||
Nuke will not complain about missing Output node
|
||||
"""
|
||||
label = 'Output Node Create'
|
||||
order = pyblish.api.ExtractorOrder + 0.4
|
||||
families = ["workfile"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, context):
|
||||
# capture selection state
|
||||
with maintained_selection():
|
||||
|
||||
active_node = [
|
||||
inst.data.get("transientData", {}).get("node")
|
||||
for inst in context
|
||||
if inst.data.get("transientData", {}).get("node")
|
||||
if inst.data.get(
|
||||
"transientData", {}).get("node").Class() != "Root"
|
||||
]
|
||||
|
||||
if active_node:
|
||||
active_node = active_node.pop()
|
||||
self.log.debug("Active node: {}".format(active_node))
|
||||
active_node['selected'].setValue(True)
|
||||
|
||||
# select only instance render node
|
||||
output_node = nuke.createNode("Output")
|
||||
|
||||
# deselect all and select the original selection
|
||||
output_node['selected'].setValue(False)
|
||||
|
||||
# save script
|
||||
nuke.scriptSave()
|
||||
|
||||
# add node to instance node list
|
||||
context.data["outputNode"] = output_node
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractOutputDirectory(pyblish.api.InstancePlugin):
|
||||
"""Extracts the output path for any collection or single output_path."""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.05
|
||||
label = "Output Directory"
|
||||
optional = True
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
path = None
|
||||
|
||||
if "output_path" in instance.data.keys():
|
||||
path = instance.data["path"]
|
||||
|
||||
if not path:
|
||||
return
|
||||
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
|
|
@ -0,0 +1,218 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
import clique
|
||||
import nuke
|
||||
from ayon_nuke import api as napi
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import collect_frames
|
||||
|
||||
|
||||
class NukeRenderLocal(publish.Extractor,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Render the current Nuke composition locally.
|
||||
|
||||
Extract the result of savers by starting a comp render
|
||||
This will run the local render of Fusion.
|
||||
|
||||
Allows to use last published frames and overwrite only specific ones
|
||||
(set in instance.data.get("frames_to_fix"))
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Render Local"
|
||||
hosts = ["nuke"]
|
||||
families = ["render.local", "prerender.local", "image.local"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
child_nodes = (
|
||||
instance.data.get("transientData", {}).get("childNodes")
|
||||
or instance
|
||||
)
|
||||
|
||||
node = None
|
||||
for x in child_nodes:
|
||||
if x.Class() == "Write":
|
||||
node = x
|
||||
|
||||
self.log.debug("instance collected: {}".format(instance.data))
|
||||
|
||||
node_product_name = instance.data.get("name", None)
|
||||
|
||||
first_frame = instance.data.get("frameStartHandle", None)
|
||||
last_frame = instance.data.get("frameEndHandle", None)
|
||||
|
||||
filenames = []
|
||||
node_file = node["file"]
|
||||
# Collect expected filepaths for each frame
|
||||
# - for cases that output is still image is first created set of
|
||||
# paths which is then sorted and converted to list
|
||||
expected_paths = list(sorted({
|
||||
node_file.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
# Extract only filenames for representation
|
||||
filenames.extend([
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
])
|
||||
|
||||
# Ensure output directory exists.
|
||||
out_dir = os.path.dirname(expected_paths[0])
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
frames_to_render = [(first_frame, last_frame)]
|
||||
|
||||
frames_to_fix = instance.data.get("frames_to_fix")
|
||||
if instance.data.get("last_version_published_files") and frames_to_fix:
|
||||
frames_to_render = self._get_frames_to_render(frames_to_fix)
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
self._copy_last_published(anatomy, instance, out_dir,
|
||||
filenames)
|
||||
|
||||
for render_first_frame, render_last_frame in frames_to_render:
|
||||
|
||||
self.log.info("Starting render")
|
||||
self.log.info("Start frame: {}".format(render_first_frame))
|
||||
self.log.info("End frame: {}".format(render_last_frame))
|
||||
|
||||
# Render frames
|
||||
nuke.execute(
|
||||
str(node_product_name),
|
||||
int(render_first_frame),
|
||||
int(render_last_frame)
|
||||
)
|
||||
|
||||
ext = node["file_type"].value()
|
||||
colorspace = napi.get_colorspace_from_node(node)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
if len(filenames) == 1:
|
||||
repre = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'files': filenames[0],
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
else:
|
||||
repre = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'frameStart': (
|
||||
"{{:0>{}}}"
|
||||
.format(len(str(last_frame)))
|
||||
.format(first_frame)
|
||||
),
|
||||
'files': filenames,
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
repre, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
self.log.debug("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name,
|
||||
out_dir
|
||||
))
|
||||
|
||||
families = instance.data["families"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
# redefinition of families
|
||||
if "render.local" in families:
|
||||
instance.data["family"] = "render"
|
||||
instance.data["productType"] = "render"
|
||||
families.remove("render.local")
|
||||
families.insert(0, "render2d")
|
||||
anatomy_data["family"] = "render"
|
||||
anatomy_data["product"]["type"] = "render"
|
||||
elif "prerender.local" in families:
|
||||
instance.data["family"] = "prerender"
|
||||
instance.data["productType"] = "prerender"
|
||||
families.remove("prerender.local")
|
||||
families.insert(0, "prerender")
|
||||
anatomy_data["family"] = "prerender"
|
||||
anatomy_data["product"]["type"] = "prerender"
|
||||
elif "image.local" in families:
|
||||
instance.data["family"] = "image"
|
||||
instance.data["productType"] = "image"
|
||||
families.remove("image.local")
|
||||
anatomy_data["family"] = "image"
|
||||
anatomy_data["product"]["type"] = "image"
|
||||
instance.data["families"] = families
|
||||
|
||||
collections, remainder = clique.assemble(filenames)
|
||||
self.log.debug('collections: {}'.format(str(collections)))
|
||||
|
||||
if collections:
|
||||
collection = collections[0]
|
||||
instance.data['collection'] = collection
|
||||
|
||||
self.log.info('Finished render')
|
||||
|
||||
self.log.debug("_ instance.data: {}".format(instance.data))
|
||||
|
||||
def _copy_last_published(self, anatomy, instance, out_dir,
|
||||
expected_filenames):
|
||||
"""Copies last published files to temporary out_dir.
|
||||
|
||||
These are base of files which will be extended/fixed for specific
|
||||
frames.
|
||||
Renames published file to expected file name based on frame, eg.
|
||||
test_project_test_asset_product_v005.1001.exr > new_render.1001.exr
|
||||
"""
|
||||
last_published = instance.data["last_version_published_files"]
|
||||
last_published_and_frames = collect_frames(last_published)
|
||||
|
||||
expected_and_frames = collect_frames(expected_filenames)
|
||||
frames_and_expected = {v: k for k, v in expected_and_frames.items()}
|
||||
for file_path, frame in last_published_and_frames.items():
|
||||
file_path = anatomy.fill_root(file_path)
|
||||
if not os.path.exists(file_path):
|
||||
continue
|
||||
target_file_name = frames_and_expected.get(frame)
|
||||
if not target_file_name:
|
||||
continue
|
||||
|
||||
out_path = os.path.join(out_dir, target_file_name)
|
||||
self.log.debug("Copying '{}' -> '{}'".format(file_path, out_path))
|
||||
shutil.copy(file_path, out_path)
|
||||
|
||||
# TODO shouldn't this be uncommented
|
||||
# instance.context.data["cleanupFullPaths"].append(out_path)
|
||||
|
||||
def _get_frames_to_render(self, frames_to_fix):
|
||||
"""Return list of frame range tuples to render
|
||||
|
||||
Args:
|
||||
frames_to_fix (str): specific or range of frames to be rerendered
|
||||
(1005, 1009-1010)
|
||||
Returns:
|
||||
(list): [(1005, 1005), (1009-1010)]
|
||||
"""
|
||||
frames_to_render = []
|
||||
|
||||
for frame_range in frames_to_fix.split(","):
|
||||
if frame_range.isdigit():
|
||||
render_first_frame = frame_range
|
||||
render_last_frame = frame_range
|
||||
elif '-' in frame_range:
|
||||
frames = frame_range.split('-')
|
||||
render_first_frame = int(frames[0])
|
||||
render_last_frame = int(frames[1])
|
||||
else:
|
||||
raise ValueError("Wrong format of frames to fix {}"
|
||||
.format(frames_to_fix))
|
||||
frames_to_render.append((render_first_frame,
|
||||
render_last_frame))
|
||||
return frames_to_render
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractReviewData(publish.Extractor):
|
||||
"""Extracts review tag into available representation
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
# order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Extract Review Data"
|
||||
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
fpath = instance.data["path"]
|
||||
ext = os.path.splitext(fpath)[-1][1:]
|
||||
|
||||
representations = instance.data.get("representations", [])
|
||||
|
||||
# review can be removed since `ProcessSubmittedJobOnFarm` will create
|
||||
# reviewable representation if needed
|
||||
if (
|
||||
instance.data.get("farm")
|
||||
and "review" in instance.data["families"]
|
||||
):
|
||||
instance.data["families"].remove("review")
|
||||
|
||||
# iterate representations and add `review` tag
|
||||
for repre in representations:
|
||||
if ext != repre["ext"]:
|
||||
continue
|
||||
|
||||
if not repre.get("tags"):
|
||||
repre["tags"] = []
|
||||
|
||||
if "review" not in repre["tags"]:
|
||||
repre["tags"].append("review")
|
||||
|
||||
self.log.debug("Matching representation: {}".format(
|
||||
pformat(repre)
|
||||
))
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api import plugin
|
||||
from ayon_nuke.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractReviewDataLut(publish.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
||||
must be run after extract_render_local.py
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.005
|
||||
label = "Extract Review Data Lut"
|
||||
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("Creating staging dir...")
|
||||
if "representations" in instance.data:
|
||||
staging_dir = instance.data[
|
||||
"representations"][0]["stagingDir"].replace("\\", "/")
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
instance.data["representations"][0]["tags"] = ["review"]
|
||||
else:
|
||||
instance.data["representations"] = []
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
staging_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.debug(
|
||||
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
|
||||
|
||||
# generate data
|
||||
with maintained_selection():
|
||||
exporter = plugin.ExporterReviewLut(
|
||||
self, instance
|
||||
)
|
||||
data = exporter.generate_lut()
|
||||
|
||||
# assign to representations
|
||||
instance.data["lutPath"] = os.path.join(
|
||||
exporter.stagingDir, exporter.file).replace("\\", "/")
|
||||
instance.data["representations"] += data["representations"]
|
||||
|
||||
# review can be removed since `ProcessSubmittedJobOnFarm` will create
|
||||
# reviewable representation if needed
|
||||
if (
|
||||
instance.data.get("farm")
|
||||
and "review" in instance.data["families"]
|
||||
):
|
||||
instance.data["families"].remove("review")
|
||||
|
||||
self.log.debug(
|
||||
"_ lutPath: {}".format(instance.data["lutPath"]))
|
||||
self.log.debug(
|
||||
"_ representations: {}".format(instance.data["representations"]))
|
||||
|
|
@ -0,0 +1,184 @@
|
|||
import os
|
||||
import re
|
||||
from pprint import pformat
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api import plugin
|
||||
from ayon_nuke.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractReviewIntermediates(publish.Extractor):
|
||||
"""Extracting intermediate videos or sequences with
|
||||
thumbnail for transcoding.
|
||||
|
||||
must be run after extract_render_local.py
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
label = "Extract Review Intermediates"
|
||||
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# presets
|
||||
viewer_lut_raw = None
|
||||
outputs = {}
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
"""Apply the settings from the deprecated
|
||||
ExtractReviewDataMov plugin for backwards compatibility
|
||||
"""
|
||||
nuke_publish = project_settings["nuke"]["publish"]
|
||||
deprecated_setting = nuke_publish["ExtractReviewDataMov"]
|
||||
current_setting = nuke_publish.get("ExtractReviewIntermediates")
|
||||
if not deprecated_setting["enabled"] and (
|
||||
not current_setting["enabled"]
|
||||
):
|
||||
cls.enabled = False
|
||||
|
||||
if deprecated_setting["enabled"]:
|
||||
# Use deprecated settings if they are still enabled
|
||||
cls.viewer_lut_raw = deprecated_setting["viewer_lut_raw"]
|
||||
cls.outputs = deprecated_setting["outputs"]
|
||||
elif current_setting is None:
|
||||
pass
|
||||
elif current_setting["enabled"]:
|
||||
cls.viewer_lut_raw = current_setting["viewer_lut_raw"]
|
||||
cls.outputs = current_setting["outputs"]
|
||||
|
||||
def process(self, instance):
|
||||
# TODO 'families' should not be included for filtering of outputs
|
||||
families = set(instance.data["families"])
|
||||
|
||||
# Add product type to families
|
||||
families.add(instance.data["productType"])
|
||||
|
||||
task_type = instance.context.data["taskType"]
|
||||
product_name = instance.data["productName"]
|
||||
self.log.debug("Creating staging dir...")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
staging_dir = os.path.normpath(
|
||||
os.path.dirname(instance.data["path"]))
|
||||
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.debug(
|
||||
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
|
||||
|
||||
self.log.debug("Outputs: {}".format(self.outputs))
|
||||
|
||||
# generate data
|
||||
with maintained_selection():
|
||||
generated_repres = []
|
||||
for o_data in self.outputs:
|
||||
o_name = o_data["name"]
|
||||
self.log.debug(
|
||||
"o_name: {}, o_data: {}".format(o_name, pformat(o_data)))
|
||||
f_product_types = o_data["filter"]["product_types"]
|
||||
f_task_types = o_data["filter"]["task_types"]
|
||||
product_names = o_data["filter"]["product_names"]
|
||||
|
||||
self.log.debug(
|
||||
"f_product_types `{}` > families: {}".format(
|
||||
f_product_types, families))
|
||||
|
||||
self.log.debug(
|
||||
"f_task_types `{}` > task_type: {}".format(
|
||||
f_task_types, task_type))
|
||||
|
||||
self.log.debug(
|
||||
"product_names `{}` > product: {}".format(
|
||||
product_names, product_name))
|
||||
|
||||
# test if family found in context
|
||||
# using intersection to make sure all defined
|
||||
# families are present in combination
|
||||
if (
|
||||
f_product_types
|
||||
and not families.intersection(f_product_types)
|
||||
):
|
||||
continue
|
||||
|
||||
# test task types from filter
|
||||
if f_task_types and task_type not in f_task_types:
|
||||
continue
|
||||
|
||||
# test products from filter
|
||||
if product_names and not any(
|
||||
re.search(p, product_name) for p in product_names
|
||||
):
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Baking output `{}` with settings: {}".format(
|
||||
o_name, o_data)
|
||||
)
|
||||
|
||||
# check if settings have more then one preset
|
||||
# so we dont need to add outputName to representation
|
||||
# in case there is only one preset
|
||||
multiple_presets = len(self.outputs) > 1
|
||||
|
||||
# adding bake presets to instance data for other plugins
|
||||
if not instance.data.get("bakePresets"):
|
||||
instance.data["bakePresets"] = {}
|
||||
# add preset to bakePresets
|
||||
instance.data["bakePresets"][o_name] = o_data
|
||||
|
||||
# create exporter instance
|
||||
exporter = plugin.ExporterReviewMov(
|
||||
self, instance, o_name, o_data["extension"],
|
||||
multiple_presets)
|
||||
|
||||
delete = not o_data.get("publish", False)
|
||||
|
||||
if instance.data.get("farm"):
|
||||
if "review" in instance.data["families"]:
|
||||
instance.data["families"].remove("review")
|
||||
|
||||
data = exporter.generate_mov(
|
||||
farm=True, delete=delete, **o_data
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
"_ data: {}".format(data))
|
||||
|
||||
if not instance.data.get("bakingNukeScripts"):
|
||||
instance.data["bakingNukeScripts"] = []
|
||||
|
||||
instance.data["bakingNukeScripts"].append({
|
||||
"bakeRenderPath": data.get("bakeRenderPath"),
|
||||
"bakeScriptPath": data.get("bakeScriptPath"),
|
||||
"bakeWriteNodeName": data.get("bakeWriteNodeName")
|
||||
})
|
||||
else:
|
||||
data = exporter.generate_mov(delete=delete, **o_data)
|
||||
|
||||
# add representation generated by exporter
|
||||
generated_repres.extend(data["representations"])
|
||||
self.log.debug(
|
||||
"__ generated_repres: {}".format(generated_repres))
|
||||
|
||||
if generated_repres:
|
||||
# assign to representations
|
||||
instance.data["representations"] += generated_repres
|
||||
instance.data["useSequenceForReview"] = False
|
||||
else:
|
||||
instance.data["families"].remove("review")
|
||||
self.log.debug(
|
||||
"Removing `review` from families. "
|
||||
"Not available baking profile."
|
||||
)
|
||||
self.log.debug(instance.data["families"])
|
||||
|
||||
self.log.debug(
|
||||
"_ representations: {}".format(
|
||||
instance.data["representations"]))
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import nuke
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractScriptSave(pyblish.api.InstancePlugin):
|
||||
"""Save current Nuke workfile script"""
|
||||
label = 'Script Save'
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.debug('Saving current script')
|
||||
nuke.scriptSave()
|
||||
|
|
@ -0,0 +1,366 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import nuke
|
||||
import copy
|
||||
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_nuke.api import (
|
||||
maintained_selection,
|
||||
duplicate_node,
|
||||
get_view_process_node
|
||||
)
|
||||
|
||||
|
||||
class ExtractSlateFrame(publish.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
||||
must be run after extract_render_local.py
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.011
|
||||
label = "Extract Slate Frame"
|
||||
|
||||
families = ["slate"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
settings_category = "nuke"
|
||||
|
||||
# Settings values
|
||||
key_value_mapping = {
|
||||
"f_submission_note": {
|
||||
"enabled": True, "template": "{comment}"
|
||||
},
|
||||
"f_submitting_for": {
|
||||
"enabled": True, "template": "{intent[value]}"
|
||||
},
|
||||
"f_vfx_scope_of_work": {
|
||||
"enabled": False, "template": ""
|
||||
}
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
self._create_staging_dir(instance)
|
||||
|
||||
with maintained_selection():
|
||||
self.log.debug("instance: {}".format(instance))
|
||||
self.log.debug("instance.data[families]: {}".format(
|
||||
instance.data["families"]))
|
||||
|
||||
if instance.data.get("bakePresets"):
|
||||
for o_name, o_data in instance.data["bakePresets"].items():
|
||||
self.log.debug("_ o_name: {}, o_data: {}".format(
|
||||
o_name, pformat(o_data)))
|
||||
self.render_slate(
|
||||
instance,
|
||||
o_name,
|
||||
o_data["bake_viewer_process"],
|
||||
o_data["bake_viewer_input_process"]
|
||||
)
|
||||
else:
|
||||
# backward compatibility
|
||||
self.render_slate(instance)
|
||||
|
||||
# also render image to sequence
|
||||
self._render_slate_to_sequence(instance)
|
||||
|
||||
def _create_staging_dir(self, instance):
|
||||
|
||||
self.log.debug("Creating staging dir...")
|
||||
|
||||
staging_dir = os.path.normpath(
|
||||
os.path.dirname(instance.data["path"]))
|
||||
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.debug(
|
||||
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
|
||||
|
||||
def _check_frames_exists(self, instance):
|
||||
# rendering path from group write node
|
||||
fpath = instance.data["path"]
|
||||
|
||||
# instance frame range with handles
|
||||
first = instance.data["frameStartHandle"]
|
||||
last = instance.data["frameEndHandle"]
|
||||
|
||||
padding = fpath.count('#')
|
||||
|
||||
test_path_template = fpath
|
||||
if padding:
|
||||
repl_string = "#" * padding
|
||||
test_path_template = fpath.replace(
|
||||
repl_string, "%0{}d".format(padding))
|
||||
|
||||
for frame in range(first, last + 1):
|
||||
test_file = test_path_template % frame
|
||||
if not os.path.exists(test_file):
|
||||
self.log.debug("__ test_file: `{}`".format(test_file))
|
||||
return None
|
||||
|
||||
return True
|
||||
|
||||
def render_slate(
|
||||
self,
|
||||
instance,
|
||||
output_name=None,
|
||||
bake_viewer_process=True,
|
||||
bake_viewer_input_process=True
|
||||
):
|
||||
"""Slate frame renderer
|
||||
|
||||
Args:
|
||||
instance (PyblishInstance): Pyblish instance with product data
|
||||
output_name (str, optional):
|
||||
Slate variation name. Defaults to None.
|
||||
bake_viewer_process (bool, optional):
|
||||
Switch for viewer profile baking. Defaults to True.
|
||||
bake_viewer_input_process (bool, optional):
|
||||
Switch for input process node baking. Defaults to True.
|
||||
"""
|
||||
slate_node = instance.data["slateNode"]
|
||||
|
||||
# rendering path from group write node
|
||||
fpath = instance.data["path"]
|
||||
|
||||
# instance frame range with handles
|
||||
first_frame = instance.data["frameStartHandle"]
|
||||
last_frame = instance.data["frameEndHandle"]
|
||||
|
||||
# fill slate node with comments
|
||||
self.add_comment_slate_node(instance, slate_node)
|
||||
|
||||
# solve output name if any is set
|
||||
_output_name = output_name or ""
|
||||
if _output_name:
|
||||
_output_name = "_" + _output_name
|
||||
|
||||
slate_first_frame = first_frame - 1
|
||||
|
||||
collection = instance.data.get("collection", None)
|
||||
|
||||
if collection:
|
||||
# get path
|
||||
fname = os.path.basename(collection.format(
|
||||
"{head}{padding}{tail}"))
|
||||
fhead = collection.format("{head}")
|
||||
else:
|
||||
fname = os.path.basename(fpath)
|
||||
fhead = os.path.splitext(fname)[0] + "."
|
||||
|
||||
if "#" in fhead:
|
||||
fhead = fhead.replace("#", "")[:-1]
|
||||
|
||||
self.log.debug("__ first_frame: {}".format(first_frame))
|
||||
self.log.debug("__ slate_first_frame: {}".format(slate_first_frame))
|
||||
|
||||
above_slate_node = slate_node.dependencies().pop()
|
||||
# fallback if files does not exists
|
||||
if self._check_frames_exists(instance):
|
||||
# Read node
|
||||
r_node = nuke.createNode("Read")
|
||||
r_node["file"].setValue(fpath)
|
||||
r_node["first"].setValue(first_frame)
|
||||
r_node["origfirst"].setValue(first_frame)
|
||||
r_node["last"].setValue(last_frame)
|
||||
r_node["origlast"].setValue(last_frame)
|
||||
r_node["colorspace"].setValue(instance.data["colorspace"])
|
||||
previous_node = r_node
|
||||
temporary_nodes = [previous_node]
|
||||
|
||||
# adding copy metadata node for correct frame metadata
|
||||
cm_node = nuke.createNode("CopyMetaData")
|
||||
cm_node.setInput(0, previous_node)
|
||||
cm_node.setInput(1, above_slate_node)
|
||||
previous_node = cm_node
|
||||
temporary_nodes.append(cm_node)
|
||||
|
||||
else:
|
||||
previous_node = above_slate_node
|
||||
temporary_nodes = []
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
if bake_viewer_process:
|
||||
if bake_viewer_input_process:
|
||||
# get input process and connect it to baking
|
||||
ipn = get_view_process_node()
|
||||
if ipn is not None:
|
||||
ipn.setInput(0, previous_node)
|
||||
previous_node = ipn
|
||||
temporary_nodes.append(ipn)
|
||||
|
||||
# add duplicate slate node and connect to previous
|
||||
duply_slate_node = duplicate_node(slate_node)
|
||||
duply_slate_node.setInput(0, previous_node)
|
||||
previous_node = duply_slate_node
|
||||
temporary_nodes.append(duply_slate_node)
|
||||
|
||||
# add viewer display transformation node
|
||||
dag_node = nuke.createNode("OCIODisplay")
|
||||
dag_node.setInput(0, previous_node)
|
||||
previous_node = dag_node
|
||||
temporary_nodes.append(dag_node)
|
||||
|
||||
else:
|
||||
# add duplicate slate node and connect to previous
|
||||
duply_slate_node = duplicate_node(slate_node)
|
||||
duply_slate_node.setInput(0, previous_node)
|
||||
previous_node = duply_slate_node
|
||||
temporary_nodes.append(duply_slate_node)
|
||||
|
||||
# create write node
|
||||
write_node = nuke.createNode("Write")
|
||||
file = fhead[:-1] + _output_name + "_slate.png"
|
||||
path = os.path.join(
|
||||
instance.data["stagingDir"], file).replace("\\", "/")
|
||||
|
||||
# add slate path to `slateFrames` instance data attr
|
||||
if not instance.data.get("slateFrames"):
|
||||
instance.data["slateFrames"] = {}
|
||||
|
||||
instance.data["slateFrames"][output_name or "*"] = path
|
||||
|
||||
# create write node
|
||||
write_node["file"].setValue(path)
|
||||
write_node["file_type"].setValue("png")
|
||||
write_node["raw"].setValue(1)
|
||||
write_node.setInput(0, previous_node)
|
||||
temporary_nodes.append(write_node)
|
||||
|
||||
# Render frames
|
||||
nuke.execute(
|
||||
write_node.name(), int(slate_first_frame), int(slate_first_frame))
|
||||
|
||||
# Clean up
|
||||
for node in temporary_nodes:
|
||||
nuke.delete(node)
|
||||
|
||||
def _render_slate_to_sequence(self, instance):
|
||||
# set slate frame
|
||||
first_frame = instance.data["frameStartHandle"]
|
||||
last_frame = instance.data["frameEndHandle"]
|
||||
slate_first_frame = first_frame - 1
|
||||
|
||||
# render slate as sequence frame
|
||||
nuke.execute(
|
||||
instance.data["name"],
|
||||
int(slate_first_frame),
|
||||
int(slate_first_frame)
|
||||
)
|
||||
|
||||
# Add file to representation files
|
||||
# - get write node
|
||||
write_node = instance.data["transientData"]["writeNode"]
|
||||
# - evaluate filepaths for first frame and slate frame
|
||||
first_filename = os.path.basename(
|
||||
write_node["file"].evaluate(first_frame))
|
||||
slate_filename = os.path.basename(
|
||||
write_node["file"].evaluate(slate_first_frame))
|
||||
|
||||
# Find matching representation based on first filename
|
||||
matching_repre = None
|
||||
is_sequence = None
|
||||
for repre in instance.data["representations"]:
|
||||
files = repre["files"]
|
||||
if (
|
||||
not isinstance(files, six.string_types)
|
||||
and first_filename in files
|
||||
):
|
||||
matching_repre = repre
|
||||
is_sequence = True
|
||||
break
|
||||
|
||||
elif files == first_filename:
|
||||
matching_repre = repre
|
||||
is_sequence = False
|
||||
break
|
||||
|
||||
if not matching_repre:
|
||||
self.log.info(
|
||||
"Matching representation was not found."
|
||||
" Representation files were not filled with slate."
|
||||
)
|
||||
return
|
||||
|
||||
# Add frame to matching representation files
|
||||
if not is_sequence:
|
||||
matching_repre["files"] = [first_filename, slate_filename]
|
||||
elif slate_filename not in matching_repre["files"]:
|
||||
matching_repre["files"].insert(0, slate_filename)
|
||||
matching_repre["frameStart"] = (
|
||||
"{{:0>{}}}"
|
||||
.format(len(str(last_frame)))
|
||||
.format(slate_first_frame)
|
||||
)
|
||||
self.log.debug(
|
||||
"__ matching_repre: {}".format(pformat(matching_repre)))
|
||||
|
||||
data = matching_repre.get("data", {})
|
||||
data["slateFrames"] = 1
|
||||
matching_repre["data"] = data
|
||||
|
||||
self.log.info("Added slate frame to representation files")
|
||||
|
||||
def add_comment_slate_node(self, instance, node):
|
||||
|
||||
comment = instance.data["comment"]
|
||||
intent = instance.context.data.get("intent")
|
||||
if not isinstance(intent, dict):
|
||||
intent = {
|
||||
"label": intent,
|
||||
"value": intent
|
||||
}
|
||||
|
||||
fill_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
fill_data.update({
|
||||
"custom": copy.deepcopy(
|
||||
instance.data.get("customData") or {}
|
||||
),
|
||||
"comment": comment,
|
||||
"intent": intent
|
||||
})
|
||||
|
||||
for key, _values in self.key_value_mapping.items():
|
||||
if not _values["enabled"]:
|
||||
self.log.debug("Key \"{}\" is disabled".format(key))
|
||||
continue
|
||||
|
||||
template = _values["template"]
|
||||
try:
|
||||
value = template.format(**fill_data)
|
||||
|
||||
except ValueError:
|
||||
self.log.warning(
|
||||
"Couldn't fill template \"{}\" with data: {}".format(
|
||||
template, fill_data
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
except KeyError:
|
||||
self.log.warning(
|
||||
(
|
||||
"Template contains unknown key."
|
||||
" Template \"{}\" Data: {}"
|
||||
).format(template, fill_data),
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
node[key].setValue(value)
|
||||
self.log.debug("Change key \"{}\" to value \"{}\"".format(
|
||||
key, value
|
||||
))
|
||||
except NameError:
|
||||
self.log.warning((
|
||||
"Failed to set value \"{0}\" on node attribute \"{0}\""
|
||||
).format(value))
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Folder path</title>
|
||||
<description>
|
||||
## Publishing to a different folder context
|
||||
|
||||
There are publish instances present which are publishing into a different folder than your current context.
|
||||
|
||||
Usually this is not what you want but there can be cases where you might want to publish into another folder/shot or task.
|
||||
|
||||
If that's the case you can disable the validation on the instance to ignore it.
|
||||
|
||||
The wrong node's name is: \`{node_name}\`
|
||||
|
||||
### Correct context keys and values:
|
||||
|
||||
\`{correct_values}\`
|
||||
|
||||
### Wrong keys and values:
|
||||
|
||||
\`{wrong_values}\`.
|
||||
|
||||
|
||||
## How to repair?
|
||||
|
||||
1. Use \"Repair\" button.
|
||||
2. Hit Reload button on the publisher.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="multiple_outputs">
|
||||
<title>Found multiple outputs</title>
|
||||
<description>
|
||||
## Invalid output amount
|
||||
|
||||
Backdrop is having more than one outgoing connections.
|
||||
|
||||
### How to repair?
|
||||
|
||||
1. Use button `Center node in node graph` and navigate to the backdrop.
|
||||
2. Reorganize nodes the way only one outgoing connection is present.
|
||||
3. Hit reload button on the publisher.
|
||||
</description>
|
||||
<detail>
|
||||
### How could this happen?
|
||||
|
||||
More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="no_nodes">
|
||||
<title>Empty backdrop</title>
|
||||
<description>
|
||||
## Invalid empty backdrop
|
||||
|
||||
Backdrop is empty and no nodes are found above it.
|
||||
|
||||
### How to repair?
|
||||
|
||||
1. Use button `Center node in node graph` and navigate to the backdrop.
|
||||
2. Add any node above it or delete it.
|
||||
3. Hit reload button on the publisher.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="multiple_outputs">
|
||||
<title>Found multiple outputs</title>
|
||||
<description>
|
||||
## Invalid amount of Output nodes
|
||||
|
||||
Group node `{node_name}` is having more than one Output node.
|
||||
|
||||
### How to repair?
|
||||
|
||||
1. Use button `Open Group`.
|
||||
2. Remove redundant Output node.
|
||||
3. Hit reload button on the publisher.
|
||||
</description>
|
||||
<detail>
|
||||
### How could this happen?
|
||||
|
||||
Perhaps you had created exciently more than one Output node.
|
||||
</detail>
|
||||
</error>
|
||||
<error id="no_inputs">
|
||||
<title>Missing Input nodes</title>
|
||||
<description>
|
||||
## Missing Input nodes
|
||||
|
||||
Make sure there is at least one connected Input node inside the group node with name `{node_name}`
|
||||
|
||||
### How to repair?
|
||||
|
||||
1. Use button `Open Group`.
|
||||
2. Add at least one Input node and connect to other nodes.
|
||||
3. Hit reload button on the publisher.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Knobs value</title>
|
||||
<description>
|
||||
## Invalid node's knobs values
|
||||
|
||||
Following node knobs needs to be repaired:
|
||||
|
||||
{invalid_items}
|
||||
|
||||
### How to repair?
|
||||
|
||||
1. Use Repair button.
|
||||
2. Hit Reload button on the publisher.
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue