Merge branch 'develop' into feature/staging_icon

This commit is contained in:
iLLiCiTiT 2021-09-16 18:59:54 +02:00
commit 4f19f335ca
52 changed files with 696 additions and 235 deletions

View file

@ -10,16 +10,16 @@ log = Logger().get_logger(__name__)
def tag_data():
return {
"Retiming": {
"editable": "1",
"note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
"icon": "retiming.png",
"metadata": {
"family": "retiming",
"marginIn": 1,
"marginOut": 1
}
},
# "Retiming": {
# "editable": "1",
# "note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
# "icon": "retiming.png",
# "metadata": {
# "family": "retiming",
# "marginIn": 1,
# "marginOut": 1
# }
# },
"[Lenses]": {
"Set lense here": {
"editable": "1",
@ -31,15 +31,15 @@ def tag_data():
}
}
},
"NukeScript": {
"editable": "1",
"note": "Collecting track items to Nuke scripts.",
"icon": "icons:TagNuke.png",
"metadata": {
"family": "nukescript",
"subset": "main"
}
},
# "NukeScript": {
# "editable": "1",
# "note": "Collecting track items to Nuke scripts.",
# "icon": "icons:TagNuke.png",
# "metadata": {
# "family": "nukescript",
# "subset": "main"
# }
# },
"Comment": {
"editable": "1",
"note": "Comment on a shot.",
@ -78,8 +78,7 @@ def update_tag(tag, data):
# set icon if any available in input data
if data.get("icon"):
tag.setIcon(str(data["icon"]))
# set note description of tag
tag.setNote(data["note"])
# get metadata of tag
mtd = tag.metadata()
# get metadata key from data
@ -97,6 +96,9 @@ def update_tag(tag, data):
"tag.{}".format(str(k)),
str(v)
)
# set note description of tag
tag.setNote(str(data["note"]))
return tag
@ -106,6 +108,26 @@ def add_tags_to_workfile():
"""
from .lib import get_current_project
def add_tag_to_bin(root_bin, name, data):
# for Tags to be created in root level Bin
# at first check if any of input data tag is not already created
done_tag = next((t for t in root_bin.items()
if str(name) in t.name()), None)
if not done_tag:
# create Tag
tag = create_tag(name, data)
tag.setName(str(name))
log.debug("__ creating tag: {}".format(tag))
# adding Tag to Root Bin
root_bin.addItem(tag)
else:
# update only non hierarchy tags
update_tag(done_tag, data)
done_tag.setName(str(name))
log.debug("__ updating tag: {}".format(done_tag))
# get project and root bin object
project = get_current_project()
root_bin = project.tagsBin()
@ -125,10 +147,8 @@ def add_tags_to_workfile():
for task_type in tasks.keys():
nks_pres_tags["[Tasks]"][task_type.lower()] = {
"editable": "1",
"note": "",
"icon": {
"path": "icons:TagGood.png"
},
"note": task_type,
"icon": "icons:TagGood.png",
"metadata": {
"family": "task",
"type": task_type
@ -157,10 +177,10 @@ def add_tags_to_workfile():
# check if key is not decorated with [] so it is defined as bin
bin_find = None
pattern = re.compile(r"\[(.*)\]")
bin_finds = pattern.findall(_k)
_bin_finds = pattern.findall(_k)
# if there is available any then pop it to string
if bin_finds:
bin_find = bin_finds.pop()
if _bin_finds:
bin_find = _bin_finds.pop()
# if bin was found then create or update
if bin_find:
@ -168,7 +188,6 @@ def add_tags_to_workfile():
# first check if in root lever is not already created bins
bins = [b for b in root_bin.items()
if b.name() in str(bin_find)]
log.debug(">>> bins: {}".format(bins))
if bins:
bin = bins.pop()
@ -178,49 +197,14 @@ def add_tags_to_workfile():
bin = hiero.core.Bin(str(bin_find))
# update or create tags in the bin
for k, v in _val.items():
tags = [t for t in bin.items()
if str(k) in t.name()
if len(str(k)) == len(t.name())]
if not tags:
# create Tag obj
tag = create_tag(k, v)
# adding Tag to Bin
bin.addItem(tag)
else:
update_tag(tags.pop(), v)
for __k, __v in _val.items():
add_tag_to_bin(bin, __k, __v)
# finally add the Bin object to the root level Bin
if root_add:
# adding Tag to Root Bin
root_bin.addItem(bin)
else:
# for Tags to be created in root level Bin
# at first check if any of input data tag is not already created
tags = None
tags = [t for t in root_bin.items()
if str(_k) in t.name()]
if not tags:
# create Tag
tag = create_tag(_k, _val)
# adding Tag to Root Bin
root_bin.addItem(tag)
else:
# update Tags if they already exists
for _t in tags:
# skip bin objects
if isinstance(_t, hiero.core.Bin):
continue
# check if Hierarchy in name and skip it
# because hierarchy could be edited
if "hierarchy" in _t.name().lower():
continue
# update only non hierarchy tags
update_tag(_t, _val)
add_tag_to_bin(root_bin, _k, _val)
log.info("Default Tags were set...")

View file

@ -727,7 +727,7 @@ class WorkfileSettings(object):
log.error(msg)
nuke.message(msg)
log.warning(">> root_dict: {}".format(root_dict))
log.debug(">> root_dict: {}".format(root_dict))
# first set OCIO
if self._root_node["colorManagement"].value() \
@ -1277,6 +1277,7 @@ class ExporterReview:
def clean_nodes(self):
for node in self._temp_nodes:
nuke.delete(node)
self._temp_nodes = []
self.log.info("Deleted nodes...")
@ -1301,6 +1302,7 @@ class ExporterReviewLut(ExporterReview):
lut_style=None):
# initialize parent class
ExporterReview.__init__(self, klass, instance)
self._temp_nodes = []
# deal with now lut defined in viewer lut
if hasattr(klass, "viewer_lut_raw"):

View file

@ -2,6 +2,7 @@ import nuke
import pyblish.api
from avalon.nuke import maintained_selection
class CreateOutputNode(pyblish.api.ContextPlugin):
"""Adding output node for each ouput write node
So when latly user will want to Load .nk as LifeGroup or Precomp
@ -15,8 +16,8 @@ class CreateOutputNode(pyblish.api.ContextPlugin):
def process(self, context):
# capture selection state
with maintained_selection():
active_node = [node for inst in context[:]
for node in inst[:]
active_node = [node for inst in context
for node in inst
if "ak:family" in node.knobs()]
if active_node:

View file

@ -3,6 +3,12 @@ import pyblish.api
from avalon.nuke import lib as anlib
from openpype.hosts.nuke.api import lib as pnlib
import openpype
try:
from __builtin__ import reload
except ImportError:
from importlib import reload
reload(pnlib)

View file

@ -4,6 +4,13 @@ from avalon.nuke import lib as anlib
from openpype.hosts.nuke.api import lib as pnlib
import openpype
try:
from __builtin__ import reload
except ImportError:
from importlib import reload
reload(pnlib)
class ExtractReviewDataMov(openpype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts

View file

@ -1,3 +1,4 @@
import sys
import os
import nuke
from avalon.nuke import lib as anlib
@ -5,6 +6,10 @@ import pyblish.api
import openpype
if sys.version_info[0] >= 3:
unicode = str
class ExtractThumbnail(openpype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts

View file

@ -3,7 +3,6 @@ import pyblish.api
import os
import openpype.api as pype
from avalon.nuke import lib as anlib
reload(anlib)
class CollectWorkfile(pyblish.api.ContextPlugin):

View file

@ -69,7 +69,8 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
frames = sorted(frames)
firstframe = frames[0]
lastframe = frames[len(frames) - 1]
if lastframe < 0:
if int(lastframe) < 0:
lastframe = firstframe
return filepath, firstframe, lastframe

View file

@ -160,6 +160,11 @@ from .editorial import (
make_sequence_collection
)
from .pype_info import (
get_openpype_version,
get_build_version
)
terminal = Terminal
__all__ = [
@ -280,5 +285,8 @@ __all__ = [
"frames_to_timecode",
"make_sequence_collection",
"create_project_folders",
"get_project_basic_paths"
"get_project_basic_paths",
"get_openpype_version",
"get_build_version",
]

View file

@ -178,7 +178,9 @@ def _list_path_items(folder_structure):
if not isinstance(path, (list, tuple)):
path = [path]
output.append([key, *path])
item = [key]
item.extend(path)
output.append(item)
return output

View file

@ -9,11 +9,53 @@ import openpype.version
from openpype.settings.lib import get_local_settings
from .execute import get_pype_execute_args
from .local_settings import get_local_site_id
from .python_module_tools import import_filepath
def get_openpype_version():
"""Version of pype that is currently used."""
return openpype.version.__version__
def get_pype_version():
"""Version of pype that is currently used."""
return openpype.version.__version__
"""Backwards compatibility. Remove when 100% not used."""
print((
"Using deprecated function 'openpype.lib.pype_info.get_pype_version'"
" replace with 'openpype.lib.pype_info.get_openpype_version'."
))
return get_openpype_version()
def get_build_version():
"""OpenPype version of build."""
# Return OpenPype version if is running from code
if not is_running_from_build():
return get_openpype_version()
# Import `version.py` from build directory
version_filepath = os.path.join(
os.environ["OPENPYPE_ROOT"],
"openpype",
"version.py"
)
if not os.path.exists(version_filepath):
return None
module = import_filepath(version_filepath, "openpype_build_version")
return getattr(module, "__version__", None)
def is_running_from_build():
"""Determine if current process is running from build or code.
Returns:
bool: True if running from build.
"""
executable_path = os.environ["OPENPYPE_EXECUTABLE"]
executable_filename = os.path.basename(executable_path)
if "python" in executable_filename.lower():
return False
return True
def is_running_staging():
@ -30,13 +72,13 @@ def is_running_staging():
def get_pype_info():
"""Information about currently used Pype process."""
executable_args = get_pype_execute_args()
if len(executable_args) == 1:
if is_running_from_build():
version_type = "build"
else:
version_type = "code"
return {
"version": get_pype_version(),
"version": get_openpype_version(),
"version_type": version_type,
"executable": executable_args[-1],
"pype_root": os.environ["OPENPYPE_REPOS_ROOT"],
@ -84,7 +126,7 @@ def extract_pype_info_to_file(dirpath):
filepath (str): Full path to file where data were extracted.
"""
filename = "{}_{}_{}.json".format(
get_pype_version(),
get_openpype_version(),
get_local_site_id(),
datetime.datetime.now().strftime("%y%m%d%H%M%S")
)

View file

@ -1,4 +1,6 @@
import time
import sys
import json
import traceback
from openpype_modules.ftrack.lib import ServerAction
@ -52,17 +54,80 @@ class SyncToAvalonServer(ServerAction):
return False
def launch(self, session, in_entities, event):
self.log.debug("{}: Creating job".format(self.label))
user_entity = session.query(
"User where id is {}".format(event["source"]["user"]["id"])
).one()
job_entity = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Sync to avalon is running..."
})
})
session.commit()
project_entity = self.get_project_from_entity(in_entities[0])
project_name = project_entity["full_name"]
try:
result = self.synchronization(event, project_name)
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
description = "Sync to avalon Crashed (Download traceback)"
self.add_traceback_to_job(
job_entity, session, sys.exc_info(), description
)
msg = "An error has happened during synchronization"
title = "Synchronization report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": (
"<p>Download report from job for more information.</p>"
)
})
report = {}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items") or []
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event, submit_btn_label="Ok")
return {"success": True, "message": msg}
job_entity["status"] = "done"
job_entity["data"] = json.dumps({
"description": "Sync to avalon finished."
})
session.commit()
return result
def synchronization(self, event, project_name):
time_start = time.time()
self.show_message(event, "Synchronization - Preparing data", True)
# Get ftrack project
if in_entities[0].entity_type.lower() == "project":
ft_project_name = in_entities[0]["full_name"]
else:
ft_project_name = in_entities[0]["project"]["full_name"]
try:
output = self.entities_factory.launch_setup(ft_project_name)
output = self.entities_factory.launch_setup(project_name)
if output is not None:
return output
@ -72,7 +137,7 @@ class SyncToAvalonServer(ServerAction):
time_2 = time.time()
# This must happen before all filtering!!!
self.entities_factory.prepare_avalon_entities(ft_project_name)
self.entities_factory.prepare_avalon_entities(project_name)
time_3 = time.time()
self.entities_factory.filter_by_ignore_sync()
@ -118,7 +183,7 @@ class SyncToAvalonServer(ServerAction):
report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
ft_project_name
project_name
)
self.show_interface(
items=report["items"],
@ -130,46 +195,6 @@ class SyncToAvalonServer(ServerAction):
"message": "Synchronization Finished"
}
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
msg = "An error has happened during synchronization"
title = "Synchronization report ({}):".format(ft_project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": "## Traceback of the error"
})
items.append({
"type": "label",
"value": "<p>{}</p>".format(
str(traceback.format_exc()).replace(
"\n", "<br>").replace(
" ", "&nbsp;"
)
)
})
report = {"items": []}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items", [])
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event)
return {"success": True, "message": msg}
finally:
try:
self.entities_factory.dbcon.uninstall()

View file

@ -1,4 +1,6 @@
import time
import sys
import json
import traceback
from openpype_modules.ftrack.lib import BaseAction, statics_icon
@ -30,17 +32,10 @@ class SyncToAvalonLocal(BaseAction):
- or do it manually (Not recommended)
"""
#: Action identifier.
identifier = "sync.to.avalon.local"
#: Action label.
label = "OpenPype Admin"
#: Action variant
variant = "- Sync To Avalon (Local)"
#: Action description.
description = "Send data from Ftrack to Avalon"
#: priority
priority = 200
#: roles that are allowed to register this action
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "sync_to_avalon_local"
@ -63,17 +58,80 @@ class SyncToAvalonLocal(BaseAction):
return is_valid
def launch(self, session, in_entities, event):
self.log.debug("{}: Creating job".format(self.label))
user_entity = session.query(
"User where id is {}".format(event["source"]["user"]["id"])
).one()
job_entity = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Sync to avalon is running..."
})
})
session.commit()
project_entity = self.get_project_from_entity(in_entities[0])
project_name = project_entity["full_name"]
try:
result = self.synchronization(event, project_name)
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
description = "Sync to avalon Crashed (Download traceback)"
self.add_traceback_to_job(
job_entity, session, sys.exc_info(), description
)
msg = "An error has happened during synchronization"
title = "Synchronization report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": (
"<p>Download report from job for more information.</p>"
)
})
report = {}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items") or []
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event, submit_btn_label="Ok")
return {"success": True, "message": msg}
job_entity["status"] = "done"
job_entity["data"] = json.dumps({
"description": "Sync to avalon finished."
})
session.commit()
return result
def synchronization(self, event, project_name):
time_start = time.time()
self.show_message(event, "Synchronization - Preparing data", True)
# Get ftrack project
if in_entities[0].entity_type.lower() == "project":
ft_project_name = in_entities[0]["full_name"]
else:
ft_project_name = in_entities[0]["project"]["full_name"]
try:
output = self.entities_factory.launch_setup(ft_project_name)
output = self.entities_factory.launch_setup(project_name)
if output is not None:
return output
@ -83,7 +141,7 @@ class SyncToAvalonLocal(BaseAction):
time_2 = time.time()
# This must happen before all filtering!!!
self.entities_factory.prepare_avalon_entities(ft_project_name)
self.entities_factory.prepare_avalon_entities(project_name)
time_3 = time.time()
self.entities_factory.filter_by_ignore_sync()
@ -129,7 +187,7 @@ class SyncToAvalonLocal(BaseAction):
report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
ft_project_name
project_name
)
self.show_interface(
items=report["items"],
@ -141,46 +199,6 @@ class SyncToAvalonLocal(BaseAction):
"message": "Synchronization Finished"
}
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
msg = "An error occurred during synchronization"
title = "Synchronization report ({}):".format(ft_project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": "## Traceback of the error"
})
items.append({
"type": "label",
"value": "<p>{}</p>".format(
str(traceback.format_exc()).replace(
"\n", "<br>").replace(
" ", "&nbsp;"
)
)
})
report = {"items": []}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items", [])
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event)
return {"success": True, "message": msg}
finally:
try:
self.entities_factory.dbcon.uninstall()

View file

@ -6,7 +6,6 @@ import subprocess
import socket
import json
import platform
import argparse
import getpass
import atexit
import time
@ -16,7 +15,9 @@ import ftrack_api
import pymongo
from openpype.lib import (
get_pype_execute_args,
OpenPypeMongoConnection
OpenPypeMongoConnection,
get_openpype_version,
get_build_version
)
from openpype_modules.ftrack import FTRACK_MODULE_DIR
from openpype_modules.ftrack.lib import credentials
@ -236,14 +237,16 @@ def main_loop(ftrack_url):
statuser_thread=statuser_thread
)
system_name, pc_name = platform.uname()[:2]
host_name = socket.gethostname()
main_info = {
"created_at": datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S"),
"Username": getpass.getuser(),
"Host Name": host_name,
"Host IP": socket.gethostbyname(host_name)
}
main_info = [
["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")],
["Username", getpass.getuser()],
["Host Name", host_name],
["Host IP", socket.gethostbyname(host_name)],
["OpenPype executable", get_pype_execute_args()[-1]],
["OpenPype version", get_openpype_version() or "N/A"],
["OpenPype build version", get_build_version() or "N/A"]
]
main_info_str = json.dumps(main_info)
# Main loop
while True:

View file

@ -384,8 +384,8 @@ class BaseHandler(object):
)
def show_interface(
self, items, title='',
event=None, user=None, username=None, user_id=None
self, items, title="", event=None, user=None,
username=None, user_id=None, submit_btn_label=None
):
"""
Shows interface to user
@ -428,14 +428,18 @@ class BaseHandler(object):
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
event_data = {
"type": "widget",
"items": items,
"title": title
}
if submit_btn_label:
event_data["submit_button_label"] = submit_btn_label
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='widget',
items=items,
title=title
),
data=event_data,
target=target
),
on_error='ignore'
@ -443,7 +447,7 @@ class BaseHandler(object):
def show_interface_from_dict(
self, messages, title="", event=None,
user=None, username=None, user_id=None
user=None, username=None, user_id=None, submit_btn_label=None
):
if not messages:
self.log.debug("No messages to show! (messages dict is empty)")
@ -469,7 +473,9 @@ class BaseHandler(object):
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
items.append(message)
self.show_interface(items, title, event, user, username, user_id)
self.show_interface(
items, title, event, user, username, user_id, submit_btn_label
)
def trigger_action(
self, action_name, event=None, session=None,

View file

@ -13,6 +13,11 @@ from openpype_modules.ftrack.ftrack_server.lib import (
from openpype.modules import ModulesManager
from openpype.api import Logger
from openpype.lib import (
get_openpype_version,
get_build_version
)
import ftrack_api
@ -40,9 +45,11 @@ def send_status(event):
new_event_data = {
"subprocess_id": subprocess_id,
"source": "processor",
"status_info": {
"created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S")
}
"status_info": [
["created_at", subprocess_started.strftime("%Y.%m.%d %H:%M:%S")],
["OpenPype version", get_openpype_version() or "N/A"],
["OpenPype build version", get_build_version() or "N/A"]
]
}
new_event = ftrack_api.event.base.Event(

View file

@ -2,6 +2,7 @@ import os
import sys
import json
import threading
import collections
import signal
import socket
import datetime
@ -165,7 +166,7 @@ class StatusFactory:
return
source = event["data"]["source"]
data = event["data"]["status_info"]
data = collections.OrderedDict(event["data"]["status_info"])
self.update_status_info(source, data)
@ -348,7 +349,7 @@ def heartbeat():
def main(args):
port = int(args[-1])
server_info = json.loads(args[-2])
server_info = collections.OrderedDict(json.loads(args[-2]))
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

View file

@ -14,7 +14,11 @@ from openpype_modules.ftrack.ftrack_server.lib import (
TOPIC_STATUS_SERVER_RESULT
)
from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info
from openpype.lib import OpenPypeMongoConnection
from openpype.lib import (
OpenPypeMongoConnection,
get_openpype_version,
get_build_version
)
from openpype.api import Logger
log = Logger.get_logger("Event storer")
@ -153,9 +157,11 @@ def send_status(event):
new_event_data = {
"subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"],
"source": "storer",
"status_info": {
"created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S")
}
"status_info": [
["created_at", subprocess_started.strftime("%Y.%m.%d %H:%M:%S")],
["OpenPype version", get_openpype_version() or "N/A"],
["OpenPype build version", get_build_version() or "N/A"]
]
}
new_event = ftrack_api.event.base.Event(

View file

@ -387,8 +387,6 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
self.setStyleSheet(load_stylesheet())
self.resize(self.default_width, self.default_height)
self._init_from_registry()
if self._tab_widget.count() < 1:
@ -396,16 +394,23 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
def _init_from_registry(self):
setting_registry = PythonInterpreterRegistry()
width = None
height = None
try:
width = setting_registry.get_item("width")
height = setting_registry.get_item("height")
if width is not None and height is not None:
self.resize(width, height)
except ValueError:
pass
if width is None or width < 200:
width = self.default_width
if height is None or height < 200:
height = self.default_height
self.resize(width, height)
try:
sizes = setting_registry.get_item("splitter_sizes")
if len(sizes) == len(self._widgets_splitter.sizes()):

View file

@ -195,7 +195,7 @@
"environment": {}
},
"__dynamic_keys_labels__": {
"13-0": "13.0 (Testing only)",
"13-0": "13.0",
"12-2": "12.2",
"12-0": "12.0",
"11-3": "11.3",
@ -331,7 +331,7 @@
"environment": {}
},
"__dynamic_keys_labels__": {
"13-0": "13.0 (Testing only)",
"13-0": "13.0",
"12-2": "12.2",
"12-0": "12.0",
"11-3": "11.3",

@ -1 +1 @@
Subproject commit b3e49597786c931c13bca207769727d5fc56d5f6
Subproject commit 1e94241ffe2dd7ce65ca66b08e452ffc03180235

View file

@ -55,7 +55,7 @@ openpype_console tray --debug
---
### `launch` arguments {#eventserver-arguments}
You have to set either proper environment variables to provide URL and credentials or use
option to specify them. If you use `--store_credentials` provided credentials will be stored for later use.
option to specify them.
| Argument | Description |
| --- | --- |
@ -63,16 +63,13 @@ option to specify them. If you use `--store_credentials` provided credentials wi
| `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) |
| `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) |
| `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) |
| `--ftrack-events-path` | path to event server plugins (can be set with `FTRACK_EVENTS_PATH`) |
| `--no-stored-credentials` | will use credential specified with options above |
| `--store-credentials` | will store credentials to file for later use |
| `--legacy` | run event server without mongo storing |
| `--clockify-api-key` | Clockify API key (can be set with `CLOCKIFY_API_KEY`) |
| `--clockify-workspace` | Clockify workspace (can be set with `CLOCKIFY_WORKSPACE`) |
To run ftrack event server:
```shell
openpype_console eventserver --ftrack-url=<url> --ftrack-user=<user> --ftrack-api-key=<key> --ftrack-events-path=<path> --no-stored-credentials --store-credentials
openpype_console eventserver --ftrack-url=<url> --ftrack-user=<user> --ftrack-api-key=<key>
```
---

View file

@ -0,0 +1,337 @@
---
id: artist_hosts_nuke_tut
title: Nuke
sidebar_label: Nuke
---
:::note
OpenPype supports Nuke version **`11.0`** and above.
:::
## OpenPype global tools
- [Set Context](artist_tools.md#set-context)
- [Work Files](artist_tools.md#workfiles)
- [Create](artist_tools.md#creator)
- [Load](artist_tools.md#loader)
- [Manage (Inventory)](artist_tools.md#inventory)
- [Publish](artist_tools.md#publisher)
- [Library Loader](artist_tools.md#library-loader)
## Nuke specific tools
<div class="row markdown">
<div class="col col--6 markdown">
### Set Frame Ranges
Use this feature in case you are not sure the frame range is correct.
##### Result
- setting Frame Range in script settings
- setting Frame Range in viewers (timeline)
</div>
<div class="col col--6 markdown">
![Set Frame Ranges](assets/nuke_setFrameRanges.png) <!-- picture needs to be changed -->
</div>
</div>
<figure>
![Set Frame Ranges Timeline](assets/nuke_setFrameRanges_timeline.png)
<figcaption>
1. limiting to Frame Range without handles
2. **Input** handle on start
3. **Output** handle on end
</figcaption>
</figure>
### Set Resolution
<div class="row markdown">
<div class="col col--6 markdown">
This menu item will set correct resolution format for you defined by your production.
##### Result
- creates new item in formats with project name
- sets the new format as used
</div>
<div class="col col--6 markdown">
![Set Resolution](assets/nuke_setResolution.png) <!-- picture needs to be changed -->
</div>
</div>
### Set Colorspace
<div class="row markdown">
<div class="col col--6 markdown">
This menu item will set correct Colorspace definitions for you. All has to be configured by your production (Project coordinator).
##### Result
- set Colorspace in your script settings
- set preview LUT to your viewers
- set correct colorspace to all discovered Read nodes (following expression set in settings)
</div>
<div class="col col--6 markdown">
![Set Colorspace](assets/nuke_setColorspace.png) <!-- picture needs to be changed -->
</div>
</div>
### Apply All Settings
<div class="row markdown">
<div class="col col--6 markdown">
It is usually enough if you once per while use this option just to make yourself sure the workfile is having set correct properties.
##### Result
- set Frame Ranges
- set Colorspace
- set Resolution
</div>
<div class="col col--6 markdown">
![Apply All Settings](assets/nuke_applyAllSettings.png) <!-- picture needs to be changed -->
</div>
</div>
### Build Workfile
<div class="row markdown">
<div class="col col--6 markdown">
This tool will append all available subsets into an actual node graph. It will look into database and get all last [versions](artist_concepts.md#version) of available [subsets](artist_concepts.md#subset).
##### Result
- adds all last versions of subsets (rendered image sequences) as read nodes
- ~~adds publishable write node as `renderMain` subset~~
</div>
<div class="col col--6 markdown">
![Build First Work File](assets/nuke_buildFirstWorkfile.png)
</div>
</div>
## Nuke QuickStart
This QuickStart is short introduction to what OpenPype can do for you. It attempts to make an overview for compositing artists, and simplifies processes that are better described in specific parts of the documentation.
### Launch Nuke - Shot and Task Context
OpenPype has to know what shot and task you are working on. You need to run Nuke in context of the task, using Ftrack Action or OpenPype Launcher to select the task and run Nuke.
![Run Nuke From Ftrack](assets/nuke_tut/nuke_RunNukeFtrackAction_p3.png)
![Run Nuke From Launcher](assets/nuke_tut/nuke_RunNukeLauncher_p2.png)
:::tip Admin Tip - Nuke version
You can [configure](admin_settings_project_anatomy.md#Attributes) which DCC version(s) will be available for current project in **Studio Settings → Project → Anatomy → Attributes → Applications**
:::
### Nuke Initial setup
Nuke OpenPype menu shows the current context
![Context](assets/nuke_tut/nuke_Context.png)
Launching Nuke with context stops your timer, and starts the clock on the shot and task you picked.
Openpype makes initial setup for your Nuke script. It is the same as running [Apply All Settings](artist_hosts_nuke.md#apply-all-settings) from the OpenPype menu.
- Reads frame range and resolution from Avalon database, sets it in Nuke Project Settings,
Creates Viewer node, sets its range and indicates handles by In and Out points.
- Reads Color settings from the project configuration, and sets it in Nuke Project Settings and Viewer.
- Sets project directory in the Nuke Project Settings to the Nuke Script Directory
:::tip Tip - Project Settings
After Nuke starts it will automatically **Apply All Settings** for you. If you are sure the settings are wrong just contact your supervisor and he will set them correctly for you in project database.
:::
### Save Nuke script the Work File
Use OpenPype - Work files menu to create a new Nuke script. Openpype offers you the preconfigured naming.
![Context](assets/nuke_tut/nuke_WorkFileSaveAs.png)
The Next Available Version checks the work folder for already used versions and offers the lowest unused version number automatically.
Subversion can be used to distinguish or name versions. For example used to add shortened artist name.
More about [workfiles](artist_tools#workfiles).
:::tip Admin Tips
- **Workfile Naming**
- The [workfile naming](admin_settings_project_anatomy#templates) is configured in anatomy, see **Studio Settings → Project → Anatomy → Templates → Work**
- **Open Workfile**
- You can [configure](project_settings/settings_project_nuke#create-first-workfile) Nuke to automatically open the last version, or create a file on startup. See **Studio Settings → Project → Global → Tools → Workfiles**
- **Nuke Color Settings**
- [Color setting](project_settings/settings_project_nuke) for Nuke can be found in **Studio Settings → Project → Anatomy → Color Management and Output Formats → Nuke**
:::
### Load plate
Use Load from OpenPype menu to load any plates or renders available.
![Asset Load](assets/nuke_tut/nuke_AssetLoader.png)
Pick the plate asset, right click and choose Load Image Sequence to create a Read node in Nuke.
Note that the Read node created by OpenPype is green. Green color indicates the highest version of asset is loaded. Asset versions could be easily changed by [Manage](#managing-versions). Lower versions will be highlighted by orange color on the read node.
![Asset Load](assets/nuke_tut/nuke_AssetLoadOutOfDate.png)
More about [Asset loader](artist_tools#loader).
### Create Write Node
To create OpenPype managed Write node, select the Read node you just created, from OpenPype menu, pick Create.
In the Instance Creator, pick Create Write Render, and Create.
![OpenPype Create](assets/nuke_tut/nuke_Creator.png)
This will create a Group with a Write node inside.
![OpenPype Create](assets/nuke_tut/nuke_WriteNodeCreated.png)
:::tip Admin Tip - Configuring write node
You can configure write node parameters in **Studio Settings → Project → Anatomy → Color Management and Output Formats → Nuke → Nodes**
:::
#### What Nuke Publish Does
From Artist perspective, Nuke publish gathers all the stuff found in the Nuke script with Publish checkbox set to on, exports stuff and raises the Nuke script (workfile) version.
The Pyblish dialog shows the progress of the process.
The left column of the dialog shows what will be published. Typically it is one or more renders or prerenders, plus work file.
![OpenPype Publish](assets/nuke_tut/nuke_PyblishDialogNuke.png)
The right column shows the publish steps
##### Publish steps
1. Gathers all the stuff found in the Nuke script with Publish checkbox set to on
2. Collects all the info (from the script, database…)
3. Validates components to be published (checks render range and resolution...)
4. Extracts data from the script
- generates thumbnail
- creates review(s) like h264
- adds burnins to review(s)
- Copies and renames components like render(s), review(s), Nuke script... to publish folder
5. Integrates components (writes to database, sends preview of the render to Ftrack ...
6. Increments Nuke script version, cleans up the render directory
Gathering all the info and validating usually takes just a few seconds. Creating reviews for long, high resolution shots can however take significant amount of time when publishing locally.
##### Pyblish Note and Intent
![Note and Intent](assets/nuke_tut/nuke_PyblishDialogNukeNoteIntent.png)
Artist can add Note and Intent before firing the publish button. The Note and Intent is ment for easy communication between artist and supervisor. After publish, Note and Intent can be seen in Ftrack notes.
##### Pyblish Checkbox
![Note and Intent](assets/nuke_tut/nuke_PyblishCheckBox.png)
Pyblish Dialog tries to pack a lot of info in a small area. One of the more tricky parts is that it uses non-standard checkboxes. Some squares can be turned on and off by the artist, some are mandatory.
If you run the publish and decide to not publish the Nuke script, you can turn it off right in the Pyblish dialog by clicking on the checkbox. If you decide to render and publish the shot in lower resolution to speed up the turnaround, you have to turn off the Write Resolution validator. If you want to use an older version of the asset (older version of the plate...), you have to turn off the Validate containers, and so on.
More info about [Using Pyblish](artist_tools#publisher)
:::tip Admin Tip - Configuring validators
You can configure Nuke validators like Output Resolution in **Studio Settings → Project → Nuke → Publish plugins**
:::
### Review
![Write Node Review](assets/nuke_tut/nuke_WriteNodeReview.png)
When you turn the review checkbox on in your OpenPype write node, here is what happens:
- OpenPype uses the current Nuke script to
- Load the render
- Optionally apply LUT
- Render Prores 4444 with the same resolution as your render
- Use Ffmpeg to convert the Prores to whatever review(s) you defined
- Use Ffmpeg to add (optional) burnin to the review(s) from previous step
Creating reviews is a part of the publishing process. If you choose to do a local publish or to use existing frames, review will be processed also on the artist's machine.
If you choose to publish on the farm, you will render and do reviews on the farm.
So far there is no option for using existing frames (from your local / check render) and just do the review on the farm.
More info about [configuring reviews](pype2/admin_presets_plugins#extractreview).
:::tip Admin Tip - Configuring Reviews
You can configure reviewsin **Studio Settings → Project → Global → Publish plugins → ExtractReview / ExtractBurnin**
Reviews can be configured separately for each host, task, or family. For example Maya can produce different review to Nuke, animation task can have different burnin then modelling, and plate can have different review then model.
:::
### Render and Publish
![OpenPype Create](assets/nuke_tut/nuke_WriteNode.png)
Lets say you want to render and publish the shot right now, with only a Read and Write node. You need to decide if you want to render, check the render and then publish it, or you want to execute the render and publish in one go.
If you wish to check your render before publishing, you can use your local machine or your farm to render the write node as you would do without OpenPype, load and check your render (OpenPype Write has a convenience button for that), and if happy, use publish with Use existing frames option selected in the write node to generate the review on your local machine.
If you want to render and publish on the farm in one go, run publish with On farm option selected in the write node to render and make the review on farm.
![Versionless](assets/nuke_tut/nuke_RenderLocalFarm.png)
### Version-less Render
![Versionless](assets/nuke_tut/nuke_versionless.png)
OpenPype is configured so your render file names have no version number until the render is fully finished and published. The main advantage is that you can keep the render from the previous version and re-render only part of the shot. With care, this is handy.
Main disadvantage of this approach is that you can render only one version of your shot at one time. Otherwise you risk to partially overwrite your shot render before publishing copies and renames the rendered files to the properly versioned publish folder.
When making quick farm publishes, like making two versions with different color correction, care must be taken to let the first job (first version) completely finish before the second version starts rendering.
### Managing Versions
![Versionless](assets/nuke_tut/nuke_ManageVersion.png)
OpenPype checks all the assets loaded to Nuke on script open. All out of date assets are colored orange, up to date assets are colored green.
Use Manage to switch versions for loaded assets.
## Troubleshooting
### Fixing Validate Containers
![Versionless](assets/nuke_tut/nuke_ValidateContainers.png)
If your Pyblish dialog fails on Validate Containers, you might have an old asset loaded. Use OpenPype - Manage... to switch the asset(s) to the latest version.
### Fixing Validate Version
If your Pyblish dialog fails on Validate Version, you might be trying to publish already published version. Rise your version in the OpenPype WorkFiles SaveAs.
Or maybe you accidentaly copied write node from different shot to your current one. Check the write publishes on the left side of the Pyblish dialog. Typically you publish only one write. Locate and delete the stray write from other shot.

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 147 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 74 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

View file

@ -51,10 +51,7 @@ There are specific launch arguments for event server. With `openpype_console eve
- **`--ftrack-user "your.username"`** : Ftrack Username
- **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key
- **`--store-crededentials`** : Entered credentials will be stored for next launch with this argument _(It is not needed to enter **ftrackuser** and **ftrackapikey** args on next launch)_
- **`--no-stored-credentials`** : Stored credentials are loaded first so if you want to change credentials use this argument
- `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_
- `--ftrack-events-path "//Paths/To/Events/"` : Paths to events folder. May contain multiple paths separated by `;`. _(it is not needed to enter if you have set `FTRACK_EVENTS_PATH` in OpenPype' environments)_
So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe eventserver`.
@ -64,8 +61,6 @@ So if you want to use OpenPype's environments then you can launch event server f
- `FTRACK_API_USER` - Username _("your.username")_
- `FTRACK_API_KEY` - User's API key _("00000aaa-11bb-22cc-33dd-444444eeeee")_
- `FTRACK_SERVER` - Ftrack server url _("<https://yourdomain.ftrackapp.com/">)_
- `FTRACK_EVENTS_PATH` - Paths to events _("//Paths/To/Events/")_
We do not recommend you this way.
</TabItem>
</Tabs>
@ -103,10 +98,12 @@ Event server should **not** run more than once! It may cause major issues.
`sudo vi /opt/openpype/run_event_server.sh`
- add content to the file:
```sh
#!/usr/bin/env
export OPENPYPE_DEBUG=3
pushd /mnt/pipeline/prod/openpype-setup
. openpype_console eventserver --ftrack-user <openpype-admin-user> --ftrack-api-key <api-key>
#!/usr/bin/env bash
export OPENPYPE_DEBUG=1
export OPENPYPE_MONGO=<openpype-mongo-url>
pushd /mnt/path/to/openpype
./openpype_console eventserver --ftrack-user <openpype-admin-user> --ftrack-api-key <api-key>
```
- change file permission:
`sudo chmod 0755 /opt/openpype/run_event_server.sh`
@ -146,9 +143,11 @@ WantedBy=multi-user.target
- add content to the service file:
```sh
@echo off
set OPENPYPE_DEBUG=3
pushd \\path\to\file\
openpype_console.exe eventserver --ftrack-user <openpype-admin-user> --ftrack-api-key <api-key>
set OPENPYPE_DEBUG=1
set OPENPYPE_MONGO=<openpype-mongo-url>
pushd \\path\to\openpype
openpype_console.exe eventserver --ftrack-user <openpype-admin-user> --ftrack-api-key <api-key>
```
- download and install `nssm.cc`
- create Windows service according to nssm.cc manual

View file

@ -18,7 +18,7 @@ module.exports = {
label: "Integrations",
items: [
"artist_hosts_hiero",
"artist_hosts_nuke",
"artist_hosts_nuke_tut",
"artist_hosts_maya",
"artist_hosts_blender",
"artist_hosts_harmony",