[Automated] Merged develop into main
6
.github/workflows/prerelease.yml
vendored
|
|
@ -43,11 +43,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
breakingLabel: '**💥 Breaking**'
|
||||
enhancementLabel: '**🚀 Enhancements**'
|
||||
bugsLabel: '**🐛 Bug fixes**'
|
||||
deprecatedLabel: '**⚠️ Deprecations**'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"### 🆕 New features","labels":["feature"]},}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"feature":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"feature":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"feature":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"feature":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, }'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
|
|||
|
|
@ -10,16 +10,16 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
def tag_data():
|
||||
return {
|
||||
"Retiming": {
|
||||
"editable": "1",
|
||||
"note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
|
||||
"icon": "retiming.png",
|
||||
"metadata": {
|
||||
"family": "retiming",
|
||||
"marginIn": 1,
|
||||
"marginOut": 1
|
||||
}
|
||||
},
|
||||
# "Retiming": {
|
||||
# "editable": "1",
|
||||
# "note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
|
||||
# "icon": "retiming.png",
|
||||
# "metadata": {
|
||||
# "family": "retiming",
|
||||
# "marginIn": 1,
|
||||
# "marginOut": 1
|
||||
# }
|
||||
# },
|
||||
"[Lenses]": {
|
||||
"Set lense here": {
|
||||
"editable": "1",
|
||||
|
|
@ -31,15 +31,15 @@ def tag_data():
|
|||
}
|
||||
}
|
||||
},
|
||||
"NukeScript": {
|
||||
"editable": "1",
|
||||
"note": "Collecting track items to Nuke scripts.",
|
||||
"icon": "icons:TagNuke.png",
|
||||
"metadata": {
|
||||
"family": "nukescript",
|
||||
"subset": "main"
|
||||
}
|
||||
},
|
||||
# "NukeScript": {
|
||||
# "editable": "1",
|
||||
# "note": "Collecting track items to Nuke scripts.",
|
||||
# "icon": "icons:TagNuke.png",
|
||||
# "metadata": {
|
||||
# "family": "nukescript",
|
||||
# "subset": "main"
|
||||
# }
|
||||
# },
|
||||
"Comment": {
|
||||
"editable": "1",
|
||||
"note": "Comment on a shot.",
|
||||
|
|
@ -78,8 +78,7 @@ def update_tag(tag, data):
|
|||
# set icon if any available in input data
|
||||
if data.get("icon"):
|
||||
tag.setIcon(str(data["icon"]))
|
||||
# set note description of tag
|
||||
tag.setNote(data["note"])
|
||||
|
||||
# get metadata of tag
|
||||
mtd = tag.metadata()
|
||||
# get metadata key from data
|
||||
|
|
@ -97,6 +96,9 @@ def update_tag(tag, data):
|
|||
"tag.{}".format(str(k)),
|
||||
str(v)
|
||||
)
|
||||
|
||||
# set note description of tag
|
||||
tag.setNote(str(data["note"]))
|
||||
return tag
|
||||
|
||||
|
||||
|
|
@ -106,6 +108,26 @@ def add_tags_to_workfile():
|
|||
"""
|
||||
from .lib import get_current_project
|
||||
|
||||
def add_tag_to_bin(root_bin, name, data):
|
||||
# for Tags to be created in root level Bin
|
||||
# at first check if any of input data tag is not already created
|
||||
done_tag = next((t for t in root_bin.items()
|
||||
if str(name) in t.name()), None)
|
||||
|
||||
if not done_tag:
|
||||
# create Tag
|
||||
tag = create_tag(name, data)
|
||||
tag.setName(str(name))
|
||||
|
||||
log.debug("__ creating tag: {}".format(tag))
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(tag)
|
||||
else:
|
||||
# update only non hierarchy tags
|
||||
update_tag(done_tag, data)
|
||||
done_tag.setName(str(name))
|
||||
log.debug("__ updating tag: {}".format(done_tag))
|
||||
|
||||
# get project and root bin object
|
||||
project = get_current_project()
|
||||
root_bin = project.tagsBin()
|
||||
|
|
@ -125,10 +147,8 @@ def add_tags_to_workfile():
|
|||
for task_type in tasks.keys():
|
||||
nks_pres_tags["[Tasks]"][task_type.lower()] = {
|
||||
"editable": "1",
|
||||
"note": "",
|
||||
"icon": {
|
||||
"path": "icons:TagGood.png"
|
||||
},
|
||||
"note": task_type,
|
||||
"icon": "icons:TagGood.png",
|
||||
"metadata": {
|
||||
"family": "task",
|
||||
"type": task_type
|
||||
|
|
@ -157,10 +177,10 @@ def add_tags_to_workfile():
|
|||
# check if key is not decorated with [] so it is defined as bin
|
||||
bin_find = None
|
||||
pattern = re.compile(r"\[(.*)\]")
|
||||
bin_finds = pattern.findall(_k)
|
||||
_bin_finds = pattern.findall(_k)
|
||||
# if there is available any then pop it to string
|
||||
if bin_finds:
|
||||
bin_find = bin_finds.pop()
|
||||
if _bin_finds:
|
||||
bin_find = _bin_finds.pop()
|
||||
|
||||
# if bin was found then create or update
|
||||
if bin_find:
|
||||
|
|
@ -168,7 +188,6 @@ def add_tags_to_workfile():
|
|||
# first check if in root lever is not already created bins
|
||||
bins = [b for b in root_bin.items()
|
||||
if b.name() in str(bin_find)]
|
||||
log.debug(">>> bins: {}".format(bins))
|
||||
|
||||
if bins:
|
||||
bin = bins.pop()
|
||||
|
|
@ -178,49 +197,14 @@ def add_tags_to_workfile():
|
|||
bin = hiero.core.Bin(str(bin_find))
|
||||
|
||||
# update or create tags in the bin
|
||||
for k, v in _val.items():
|
||||
tags = [t for t in bin.items()
|
||||
if str(k) in t.name()
|
||||
if len(str(k)) == len(t.name())]
|
||||
if not tags:
|
||||
# create Tag obj
|
||||
tag = create_tag(k, v)
|
||||
|
||||
# adding Tag to Bin
|
||||
bin.addItem(tag)
|
||||
else:
|
||||
update_tag(tags.pop(), v)
|
||||
for __k, __v in _val.items():
|
||||
add_tag_to_bin(bin, __k, __v)
|
||||
|
||||
# finally add the Bin object to the root level Bin
|
||||
if root_add:
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(bin)
|
||||
else:
|
||||
# for Tags to be created in root level Bin
|
||||
# at first check if any of input data tag is not already created
|
||||
tags = None
|
||||
tags = [t for t in root_bin.items()
|
||||
if str(_k) in t.name()]
|
||||
|
||||
if not tags:
|
||||
# create Tag
|
||||
tag = create_tag(_k, _val)
|
||||
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(tag)
|
||||
else:
|
||||
# update Tags if they already exists
|
||||
for _t in tags:
|
||||
# skip bin objects
|
||||
if isinstance(_t, hiero.core.Bin):
|
||||
continue
|
||||
|
||||
# check if Hierarchy in name and skip it
|
||||
# because hierarchy could be edited
|
||||
if "hierarchy" in _t.name().lower():
|
||||
continue
|
||||
|
||||
# update only non hierarchy tags
|
||||
update_tag(_t, _val)
|
||||
add_tag_to_bin(root_bin, _k, _val)
|
||||
|
||||
log.info("Default Tags were set...")
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class ShaderDefinitionsEditor(QtWidgets.QWidget):
|
|||
|
||||
self.setObjectName("shaderDefinitionEditor")
|
||||
self.setWindowTitle("OpenPype shader name definition editor")
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setParent(parent)
|
||||
|
|
|
|||
|
|
@ -287,7 +287,7 @@ def script_name():
|
|||
|
||||
def add_button_write_to_read(node):
|
||||
name = "createReadNode"
|
||||
label = "Cread Read From Rendered"
|
||||
label = "Create Read From Rendered"
|
||||
value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())"
|
||||
knob = nuke.PyScript_Knob(name, label, value)
|
||||
knob.clearFlag(nuke.STARTLINE)
|
||||
|
|
@ -727,7 +727,7 @@ class WorkfileSettings(object):
|
|||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
log.warning(">> root_dict: {}".format(root_dict))
|
||||
log.debug(">> root_dict: {}".format(root_dict))
|
||||
|
||||
# first set OCIO
|
||||
if self._root_node["colorManagement"].value() \
|
||||
|
|
@ -1277,6 +1277,7 @@ class ExporterReview:
|
|||
def clean_nodes(self):
|
||||
for node in self._temp_nodes:
|
||||
nuke.delete(node)
|
||||
self._temp_nodes = []
|
||||
self.log.info("Deleted nodes...")
|
||||
|
||||
|
||||
|
|
@ -1301,6 +1302,7 @@ class ExporterReviewLut(ExporterReview):
|
|||
lut_style=None):
|
||||
# initialize parent class
|
||||
ExporterReview.__init__(self, klass, instance)
|
||||
self._temp_nodes = []
|
||||
|
||||
# deal with now lut defined in viewer lut
|
||||
if hasattr(klass, "viewer_lut_raw"):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import nuke
|
|||
import pyblish.api
|
||||
from avalon.nuke import maintained_selection
|
||||
|
||||
|
||||
class CreateOutputNode(pyblish.api.ContextPlugin):
|
||||
"""Adding output node for each ouput write node
|
||||
So when latly user will want to Load .nk as LifeGroup or Precomp
|
||||
|
|
@ -15,8 +16,8 @@ class CreateOutputNode(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
# capture selection state
|
||||
with maintained_selection():
|
||||
active_node = [node for inst in context[:]
|
||||
for node in inst[:]
|
||||
active_node = [node for inst in context
|
||||
for node in inst
|
||||
if "ak:family" in node.knobs()]
|
||||
|
||||
if active_node:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,12 @@ import pyblish.api
|
|||
from avalon.nuke import lib as anlib
|
||||
from openpype.hosts.nuke.api import lib as pnlib
|
||||
import openpype
|
||||
|
||||
try:
|
||||
from __builtin__ import reload
|
||||
except ImportError:
|
||||
from importlib import reload
|
||||
|
||||
reload(pnlib)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ from avalon.nuke import lib as anlib
|
|||
from openpype.hosts.nuke.api import lib as pnlib
|
||||
import openpype
|
||||
|
||||
try:
|
||||
from __builtin__ import reload
|
||||
except ImportError:
|
||||
from importlib import reload
|
||||
|
||||
reload(pnlib)
|
||||
|
||||
|
||||
class ExtractReviewDataMov(openpype.api.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import sys
|
||||
import os
|
||||
import nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
|
|
@ -5,6 +6,10 @@ import pyblish.api
|
|||
import openpype
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
unicode = str
|
||||
|
||||
|
||||
class ExtractThumbnail(openpype.api.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
# presets
|
||||
sync_workfile_version = False
|
||||
sync_workfile_version_on_families = []
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({
|
||||
|
|
@ -120,11 +120,12 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
# sync workfile version
|
||||
_families_test = [family] + families
|
||||
self.log.debug("__ _families_test: `{}`".format(_families_test))
|
||||
if not next((f for f in _families_test
|
||||
if "prerender" in f),
|
||||
None) and self.sync_workfile_version:
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
for family_test in _families_test:
|
||||
if family_test in self.sync_workfile_version_on_families:
|
||||
self.log.debug("Syncing version with workfile for '{}'"
|
||||
.format(family_test))
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import pyblish.api
|
|||
import os
|
||||
import openpype.api as pype
|
||||
from avalon.nuke import lib as anlib
|
||||
reload(anlib)
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -69,7 +69,8 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
|||
frames = sorted(frames)
|
||||
firstframe = frames[0]
|
||||
lastframe = frames[len(frames) - 1]
|
||||
if lastframe < 0:
|
||||
|
||||
if int(lastframe) < 0:
|
||||
lastframe = firstframe
|
||||
|
||||
return filepath, firstframe, lastframe
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ Provides:
|
|||
import os
|
||||
import json
|
||||
import clique
|
||||
import tempfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
|
@ -94,7 +95,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
instance.data["families"] = families
|
||||
instance.data["version"] = \
|
||||
self._get_last_version(asset, subset) + 1
|
||||
instance.data["stagingDir"] = task_dir
|
||||
instance.data["stagingDir"] = tempfile.mkdtemp()
|
||||
instance.data["source"] = "webpublisher"
|
||||
|
||||
# to store logging info into DB openpype.webpublishes
|
||||
|
|
@ -113,6 +114,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
instance.data["frameEnd"] = \
|
||||
instance.data["representations"][0]["frameEnd"]
|
||||
else:
|
||||
instance.data["frameStart"] = 0
|
||||
instance.data["frameEnd"] = 1
|
||||
instance.data["representations"] = self._get_single_repre(
|
||||
task_dir, task_data["files"], tags
|
||||
)
|
||||
|
|
@ -174,7 +177,11 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
(family, [families], subset_template_name, tags) tuple
|
||||
AssertionError if not matching family found
|
||||
"""
|
||||
task_obj = settings.get(task_type)
|
||||
task_type = task_type.lower()
|
||||
lower_cased_task_types = {}
|
||||
for t_type, task in settings.items():
|
||||
lower_cased_task_types[t_type.lower()] = task
|
||||
task_obj = lower_cased_task_types.get(task_type)
|
||||
assert task_obj, "No family configuration for '{}'".format(task_type)
|
||||
|
||||
found_family = None
|
||||
|
|
|
|||
|
|
@ -59,6 +59,11 @@ from .python_module_tools import (
|
|||
import_module_from_dirpath
|
||||
)
|
||||
|
||||
from .profiles_filtering import (
|
||||
compile_list_of_regexes,
|
||||
filter_profiles
|
||||
)
|
||||
|
||||
from .avalon_context import (
|
||||
CURRENT_DOC_SCHEMAS,
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
|
|
@ -118,13 +123,9 @@ from .applications import (
|
|||
prepare_host_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value,
|
||||
|
||||
compile_list_of_regexes
|
||||
apply_project_environments_value
|
||||
)
|
||||
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
from .plugin_tools import (
|
||||
TaskNotSetError,
|
||||
get_subset_name,
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ from . import (
|
|||
PypeLogger,
|
||||
Anatomy
|
||||
)
|
||||
from .profiles_filtering import filter_profiles
|
||||
from .local_settings import get_openpype_username
|
||||
from .avalon_context import (
|
||||
get_workdir_data,
|
||||
|
|
@ -1244,6 +1245,9 @@ def prepare_context_environments(data):
|
|||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
# Temp solution how to pass task type to `_prepare_last_workfile`
|
||||
data["task_type"] = task_type
|
||||
|
||||
workfile_template_key = get_workfile_template_key(
|
||||
task_type,
|
||||
app.host_name,
|
||||
|
|
@ -1320,13 +1324,14 @@ def _prepare_last_workfile(data, workdir, workfile_template_key):
|
|||
workdir_data = copy.deepcopy(_workdir_data)
|
||||
project_name = data["project_name"]
|
||||
task_name = data["task_name"]
|
||||
task_type = data["task_type"]
|
||||
start_last_workfile = should_start_last_workfile(
|
||||
project_name, app.host_name, task_name
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["start_last_workfile"] = start_last_workfile
|
||||
|
||||
workfile_startup = should_workfile_tool_start(
|
||||
project_name, app.host_name, task_name
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["workfile_startup"] = workfile_startup
|
||||
|
||||
|
|
@ -1375,54 +1380,8 @@ def _prepare_last_workfile(data, workdir, workfile_template_key):
|
|||
data["last_workfile_path"] = last_workfile_path
|
||||
|
||||
|
||||
def get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output
|
||||
):
|
||||
host_name_lowered = host_name.lower()
|
||||
task_name_lowered = task_name.lower()
|
||||
|
||||
max_points = 2
|
||||
matching_points = -1
|
||||
matching_item = None
|
||||
for item in startup_presets:
|
||||
hosts = item.get("hosts") or tuple()
|
||||
tasks = item.get("tasks") or tuple()
|
||||
|
||||
hosts_lowered = tuple(_host_name.lower() for _host_name in hosts)
|
||||
# Skip item if has set hosts and current host is not in
|
||||
if hosts_lowered and host_name_lowered not in hosts_lowered:
|
||||
continue
|
||||
|
||||
tasks_lowered = tuple(_task_name.lower() for _task_name in tasks)
|
||||
# Skip item if has set tasks and current task is not in
|
||||
if tasks_lowered:
|
||||
task_match = False
|
||||
for task_regex in compile_list_of_regexes(tasks_lowered):
|
||||
if re.match(task_regex, task_name_lowered):
|
||||
task_match = True
|
||||
break
|
||||
|
||||
if not task_match:
|
||||
continue
|
||||
|
||||
points = int(bool(hosts_lowered)) + int(bool(tasks_lowered))
|
||||
if points > matching_points:
|
||||
matching_item = item
|
||||
matching_points = points
|
||||
|
||||
if matching_points == max_points:
|
||||
break
|
||||
|
||||
if matching_item is not None:
|
||||
output = matching_item.get("enabled")
|
||||
if output is None:
|
||||
output = default_output
|
||||
return output
|
||||
return default_output
|
||||
|
||||
|
||||
def should_start_last_workfile(
|
||||
project_name, host_name, task_name, default_output=False
|
||||
project_name, host_name, task_name, task_type, default_output=False
|
||||
):
|
||||
"""Define if host should start last version workfile if possible.
|
||||
|
||||
|
|
@ -1444,7 +1403,7 @@ def should_start_last_workfile(
|
|||
"""
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
startup_presets = (
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
|
|
@ -1452,15 +1411,27 @@ def should_start_last_workfile(
|
|||
["last_workfile_on_startup"]
|
||||
)
|
||||
|
||||
if not startup_presets:
|
||||
if not profiles:
|
||||
return default_output
|
||||
|
||||
return get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output)
|
||||
filter_data = {
|
||||
"tasks": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
matching_item = filter_profiles(profiles, filter_data)
|
||||
|
||||
output = None
|
||||
if matching_item:
|
||||
output = matching_item.get("enabled")
|
||||
|
||||
if output is None:
|
||||
return default_output
|
||||
return output
|
||||
|
||||
|
||||
def should_workfile_tool_start(
|
||||
project_name, host_name, task_name, default_output=False
|
||||
project_name, host_name, task_name, task_type, default_output=False
|
||||
):
|
||||
"""Define if host should start workfile tool at host launch.
|
||||
|
||||
|
|
@ -1482,7 +1453,7 @@ def should_workfile_tool_start(
|
|||
"""
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
startup_presets = (
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
|
|
@ -1490,27 +1461,20 @@ def should_workfile_tool_start(
|
|||
["open_workfile_tool_on_startup"]
|
||||
)
|
||||
|
||||
if not startup_presets:
|
||||
if not profiles:
|
||||
return default_output
|
||||
|
||||
return get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output)
|
||||
filter_data = {
|
||||
"tasks": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
matching_item = filter_profiles(profiles, filter_data)
|
||||
|
||||
output = None
|
||||
if matching_item:
|
||||
output = matching_item.get("enabled")
|
||||
|
||||
def compile_list_of_regexes(in_list):
|
||||
"""Convert strings in entered list to compiled regex objects."""
|
||||
regexes = list()
|
||||
if not in_list:
|
||||
return regexes
|
||||
|
||||
for item in in_list:
|
||||
if not item:
|
||||
continue
|
||||
try:
|
||||
regexes.append(re.compile(item))
|
||||
except TypeError:
|
||||
print((
|
||||
"Invalid type \"{}\" value \"{}\"."
|
||||
" Expected string based object. Skipping."
|
||||
).format(str(type(item)), str(item)))
|
||||
return regexes
|
||||
if output is None:
|
||||
return default_output
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import functools
|
|||
|
||||
from openpype.settings import get_project_settings
|
||||
from .anatomy import Anatomy
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
# avalon module is not imported at the top
|
||||
# - may not be in path at the time of pype.lib initialization
|
||||
|
|
@ -453,8 +454,6 @@ def get_workfile_template_key(
|
|||
if not profiles:
|
||||
return default
|
||||
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
profile_filter = {
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
|
|
@ -791,7 +790,9 @@ class BuildWorkfile:
|
|||
current_task_name = avalon.io.Session["AVALON_TASK"]
|
||||
|
||||
# Load workfile presets for task
|
||||
self.build_presets = self.get_build_presets(current_task_name)
|
||||
self.build_presets = self.get_build_presets(
|
||||
current_task_name, current_asset_entity
|
||||
)
|
||||
|
||||
# Skip if there are any presets for task
|
||||
if not self.build_presets:
|
||||
|
|
@ -875,7 +876,7 @@ class BuildWorkfile:
|
|||
return loaded_containers
|
||||
|
||||
@with_avalon
|
||||
def get_build_presets(self, task_name):
|
||||
def get_build_presets(self, task_name, asset_doc):
|
||||
""" Returns presets to build workfile for task name.
|
||||
|
||||
Presets are loaded for current project set in
|
||||
|
|
@ -889,30 +890,33 @@ class BuildWorkfile:
|
|||
(dict): preset per entered task name
|
||||
"""
|
||||
host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1]
|
||||
presets = get_project_settings(avalon.io.Session["AVALON_PROJECT"])
|
||||
project_settings = get_project_settings(
|
||||
avalon.io.Session["AVALON_PROJECT"]
|
||||
)
|
||||
|
||||
host_settings = project_settings.get(host_name) or {}
|
||||
# Get presets for host
|
||||
wb_settings = presets.get(host_name, {}).get("workfile_builder")
|
||||
|
||||
wb_settings = host_settings.get("workfile_builder")
|
||||
if not wb_settings:
|
||||
# backward compatibility
|
||||
wb_settings = presets.get(host_name, {}).get("workfile_build")
|
||||
wb_settings = host_settings.get("workfile_build") or {}
|
||||
|
||||
builder_presets = wb_settings.get("profiles")
|
||||
builder_profiles = wb_settings.get("profiles")
|
||||
if not builder_profiles:
|
||||
return None
|
||||
|
||||
if not builder_presets:
|
||||
return
|
||||
|
||||
task_name_low = task_name.lower()
|
||||
per_task_preset = None
|
||||
for preset in builder_presets:
|
||||
preset_tasks = preset.get("tasks") or []
|
||||
preset_tasks_low = [task.lower() for task in preset_tasks]
|
||||
if task_name_low in preset_tasks_low:
|
||||
per_task_preset = preset
|
||||
break
|
||||
|
||||
return per_task_preset
|
||||
task_type = (
|
||||
asset_doc
|
||||
.get("data", {})
|
||||
.get("tasks", {})
|
||||
.get(task_name, {})
|
||||
.get("type")
|
||||
)
|
||||
filter_data = {
|
||||
"task_types": task_type,
|
||||
"tasks": task_name
|
||||
}
|
||||
return filter_profiles(builder_profiles, filter_data)
|
||||
|
||||
def _filter_build_profiles(self, build_profiles, loaders_by_name):
|
||||
""" Filter build profiles by loaders and prepare process data.
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ def get_subset_name(
|
|||
project_name=None,
|
||||
host_name=None,
|
||||
default_template=None,
|
||||
dynamic_data=None
|
||||
dynamic_data=None,
|
||||
dbcon=None
|
||||
):
|
||||
if not family:
|
||||
return ""
|
||||
|
|
@ -46,13 +47,42 @@ def get_subset_name(
|
|||
# Use only last part of class family value split by dot (`.`)
|
||||
family = family.rsplit(".", 1)[-1]
|
||||
|
||||
if project_name is None:
|
||||
import avalon.api
|
||||
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
|
||||
# Function should expect asset document instead of asset id
|
||||
# - that way `dbcon` is not needed
|
||||
if dbcon is None:
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
dbcon.install()
|
||||
|
||||
asset_doc = dbcon.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"_id": asset_id
|
||||
},
|
||||
{
|
||||
"data.tasks": True
|
||||
}
|
||||
)
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
|
||||
# Get settings
|
||||
tools_settings = get_project_settings(project_name)["global"]["tools"]
|
||||
profiles = tools_settings["creator"]["subset_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"families": family,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
}
|
||||
|
||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,28 @@
|
|||
import re
|
||||
import logging
|
||||
from .applications import compile_list_of_regexes
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def compile_list_of_regexes(in_list):
|
||||
"""Convert strings in entered list to compiled regex objects."""
|
||||
regexes = list()
|
||||
if not in_list:
|
||||
return regexes
|
||||
|
||||
for item in in_list:
|
||||
if not item:
|
||||
continue
|
||||
try:
|
||||
regexes.append(re.compile(item))
|
||||
except TypeError:
|
||||
print((
|
||||
"Invalid type \"{}\" value \"{}\"."
|
||||
" Expected string based object. Skipping."
|
||||
).format(str(type(item)), str(item)))
|
||||
return regexes
|
||||
|
||||
|
||||
def _profile_exclusion(matching_profiles, logger):
|
||||
"""Find out most matching profile byt host, task and family match.
|
||||
|
||||
|
|
|
|||
|
|
@ -58,6 +58,17 @@ def is_running_from_build():
|
|||
return True
|
||||
|
||||
|
||||
def is_running_staging():
|
||||
"""Currently used OpenPype is staging version.
|
||||
|
||||
Returns:
|
||||
bool: True if openpype version containt 'staging'.
|
||||
"""
|
||||
if "staging" in get_openpype_version():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_pype_info():
|
||||
"""Information about currently used Pype process."""
|
||||
executable_args = get_pype_execute_args()
|
||||
|
|
|
|||
|
|
@ -417,7 +417,6 @@ class OpenPypeModule:
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Connect with other enabled modules."""
|
||||
pass
|
||||
|
|
@ -438,10 +437,6 @@ class OpenPypeAddOn(OpenPypeModule):
|
|||
"""Initialization is not be required for most of addons."""
|
||||
pass
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Do not require to implement connection with modules for addon."""
|
||||
pass
|
||||
|
||||
|
||||
class ModulesManager:
|
||||
"""Manager of Pype modules helps to load and prepare them to work.
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
from Qt import QtGui
|
||||
|
||||
self.libraryloader = app.Window(
|
||||
icon=QtGui.QIcon(resources.pype_icon_filepath()),
|
||||
icon=QtGui.QIcon(resources.get_openpype_icon_filepath()),
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
|
|
@ -71,9 +71,6 @@ class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
def connect_with_modules(self, _enabled_modules):
|
||||
return
|
||||
|
||||
def webserver_initialization(self, server_manager):
|
||||
"""Implementation of IWebServerRoutes interface."""
|
||||
|
||||
|
|
|
|||
|
|
@ -94,9 +94,6 @@ class ClockifyModule(
|
|||
"server": [CLOCKIFY_FTRACK_SERVER_PATH]
|
||||
}
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def clockify_timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
# Call `ITimersManager` method
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class MessageWidget(QtWidgets.QWidget):
|
|||
super(MessageWidget, self).__init__()
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
|
|
@ -90,7 +90,7 @@ class ClockifySettings(QtWidgets.QWidget):
|
|||
self.validated = False
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowTitle("Clockify settings")
|
||||
|
|
|
|||
|
|
@ -26,9 +26,6 @@ class DeadlineModule(OpenPypeModule, IPluginPaths):
|
|||
"not specified. Disabling module."))
|
||||
return
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
|
|
|||
|
|
@ -23,6 +23,8 @@ class DeleteOldVersions(BaseAction):
|
|||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
|
||||
|
||||
settings_key = "delete_old_versions"
|
||||
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
inteface_title = "Choose your preferences"
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
self._is_logged = False
|
||||
self._in_advance_mode = False
|
||||
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
|
|
|
|||
|
|
@ -40,10 +40,6 @@ class LogViewModule(OpenPypeModule, ITrayModule):
|
|||
def tray_exit(self):
|
||||
return
|
||||
|
||||
def connect_with_modules(self, _enabled_modules):
|
||||
"""Nothing special."""
|
||||
return
|
||||
|
||||
def _show_logs_gui(self):
|
||||
if self.window:
|
||||
self.window.show()
|
||||
|
|
|
|||
|
|
@ -54,9 +54,6 @@ class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
"""Nothing special for Muster."""
|
||||
return
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
"""Add **change credentials** option to tray menu."""
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class MusterLogin(QtWidgets.QWidget):
|
|||
self.module = module
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
|
|
|
|||
|
|
@ -17,9 +17,6 @@ class ProjectManagerAction(OpenPypeModule, ITrayAction):
|
|||
# Tray attributes
|
||||
self.project_manager_window = None
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def tray_init(self):
|
||||
"""Initialization in tray implementation of ITrayAction."""
|
||||
self.create_project_manager_window()
|
||||
|
|
|
|||
|
|
@ -18,9 +18,6 @@ class PythonInterpreterAction(OpenPypeModule, ITrayAction):
|
|||
if self._interpreter_window is not None:
|
||||
self._interpreter_window.save_registry()
|
||||
|
||||
def connect_with_modules(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def create_interpreter_window(self):
|
||||
"""Initializa Settings Qt window."""
|
||||
if self._interpreter_window:
|
||||
|
|
|
|||
|
|
@ -331,7 +331,7 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
super(PythonInterpreterWidget, self).__init__(parent)
|
||||
|
||||
self.setWindowTitle("OpenPype Console")
|
||||
self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
|
||||
self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
|
||||
|
||||
self.ansi_escape = re.compile(
|
||||
r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]"
|
||||
|
|
@ -387,8 +387,6 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
|
||||
self.resize(self.default_width, self.default_height)
|
||||
|
||||
self._init_from_registry()
|
||||
|
||||
if self._tab_widget.count() < 1:
|
||||
|
|
@ -396,16 +394,23 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
|||
|
||||
def _init_from_registry(self):
|
||||
setting_registry = PythonInterpreterRegistry()
|
||||
|
||||
width = None
|
||||
height = None
|
||||
try:
|
||||
width = setting_registry.get_item("width")
|
||||
height = setting_registry.get_item("height")
|
||||
if width is not None and height is not None:
|
||||
self.resize(width, height)
|
||||
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if width is None or width < 200:
|
||||
width = self.default_width
|
||||
|
||||
if height is None or height < 200:
|
||||
height = self.default_height
|
||||
|
||||
self.resize(width, height)
|
||||
|
||||
try:
|
||||
sizes = setting_registry.get_item("splitter_sizes")
|
||||
if len(sizes) == len(self._widgets_splitter.sizes()):
|
||||
|
|
|
|||
|
|
@ -19,9 +19,6 @@ class SettingsAction(OpenPypeModule, ITrayAction):
|
|||
# Tray attributes
|
||||
self.settings_window = None
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def tray_init(self):
|
||||
"""Initialization in tray implementation of ITrayAction."""
|
||||
self.create_settings_window()
|
||||
|
|
@ -84,9 +81,6 @@ class LocalSettingsAction(OpenPypeModule, ITrayAction):
|
|||
self.settings_window = None
|
||||
self._first_trigger = True
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def tray_init(self):
|
||||
"""Initialization in tray implementation of ITrayAction."""
|
||||
self.create_settings_window()
|
||||
|
|
|
|||
|
|
@ -17,10 +17,6 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths):
|
|||
slack_settings = modules_settings[self.name]
|
||||
self.enabled = slack_settings["enabled"]
|
||||
|
||||
def connect_with_modules(self, _enabled_modules):
|
||||
"""Nothing special."""
|
||||
return
|
||||
|
||||
def get_launch_hook_paths(self):
|
||||
"""Implementation of `ILaunchHookPaths`."""
|
||||
return os.path.join(SLACK_MODULE_DIR, "launch_hooks")
|
||||
|
|
|
|||
|
|
@ -680,9 +680,6 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return sites
|
||||
|
||||
def connect_with_modules(self, *_a, **kw):
|
||||
return
|
||||
|
||||
def tray_init(self):
|
||||
"""
|
||||
Actual initialization of Sync Server.
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class SyncServerWindow(QtWidgets.QDialog):
|
|||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
|
||||
self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
|
||||
self.resize(1450, 700)
|
||||
|
||||
self.timer = QtCore.QTimer()
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class WidgetUserIdle(QtWidgets.QWidget):
|
|||
|
||||
self.module = module
|
||||
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
|
|||
self._set_representations(contexts)
|
||||
|
||||
self.setWindowTitle("OpenPype - Deliver versions")
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
|
|
|
|||
|
|
@ -106,12 +106,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"family", "hierarchy", "task", "username"
|
||||
]
|
||||
default_template_name = "publish"
|
||||
template_name_profiles = None
|
||||
|
||||
# suffix to denote temporary files, use without '.'
|
||||
TMP_FILE_EXT = 'tmp'
|
||||
|
||||
# file_url : file_size of all published and uploaded files
|
||||
integrated_file_sizes = {}
|
||||
|
||||
TMP_FILE_EXT = 'tmp' # suffix to denote temporary files, use without '.'
|
||||
# Attributes set by settings
|
||||
template_name_profiles = None
|
||||
subset_grouping_profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
self.integrated_file_sizes = {}
|
||||
|
|
@ -165,10 +169,24 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
hierarchy = "/".join(parents)
|
||||
anatomy_data["hierarchy"] = hierarchy
|
||||
|
||||
# Make sure task name in anatomy data is same as on instance.data
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
anatomy_data["task"] = task_name
|
||||
else:
|
||||
# Just set 'task_name' variable to context task
|
||||
task_name = anatomy_data["task"]
|
||||
|
||||
# Find task type for current task name
|
||||
# - this should be already prepared on instance
|
||||
asset_tasks = (
|
||||
asset_entity.get("data", {}).get("tasks")
|
||||
) or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
instance.data["task_type"] = task_type
|
||||
|
||||
# Fill family in anatomy data
|
||||
anatomy_data["family"] = instance.data.get("family")
|
||||
|
||||
stagingdir = instance.data.get("stagingDir")
|
||||
|
|
@ -298,14 +316,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
orig_transfers = list(instance.data['transfers'])
|
||||
|
||||
task_name = io.Session.get("AVALON_TASK")
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
key_values = {"families": family,
|
||||
"tasks": task_name,
|
||||
"hosts": instance.data["anatomyData"]["app"]}
|
||||
profile = filter_profiles(self.template_name_profiles, key_values,
|
||||
logger=self.log)
|
||||
key_values = {
|
||||
"families": family,
|
||||
"tasks": task_name,
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task_type
|
||||
}
|
||||
profile = filter_profiles(
|
||||
self.template_name_profiles,
|
||||
key_values,
|
||||
logger=self.log
|
||||
)
|
||||
|
||||
template_name = "publish"
|
||||
if profile:
|
||||
|
|
@ -730,6 +753,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
# QUESTION Why is changing of group and updating it's
|
||||
# families in 'get_subset'?
|
||||
self._set_subset_group(instance, subset["_id"])
|
||||
|
||||
# Update families on subset.
|
||||
|
|
@ -753,54 +778,74 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
subset_id (str): DB's subset _id
|
||||
|
||||
"""
|
||||
# add group if available
|
||||
integrate_new_sett = (instance.context.data["project_settings"]
|
||||
["global"]
|
||||
["publish"]
|
||||
["IntegrateAssetNew"])
|
||||
|
||||
profiles = integrate_new_sett["subset_grouping_profiles"]
|
||||
|
||||
filtering_criteria = {
|
||||
"families": instance.data["family"],
|
||||
"hosts": instance.data["anatomyData"]["app"],
|
||||
"tasks": instance.data["anatomyData"]["task"] or
|
||||
io.Session["AVALON_TASK"]
|
||||
}
|
||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
|
||||
filled_template = None
|
||||
if matching_profile:
|
||||
template = matching_profile["template"]
|
||||
fill_pairs = (
|
||||
("family", filtering_criteria["families"]),
|
||||
("task", filtering_criteria["tasks"]),
|
||||
("host", filtering_criteria["hosts"]),
|
||||
("subset", instance.data["subset"]),
|
||||
("renderlayer", instance.data.get("renderlayer"))
|
||||
)
|
||||
fill_pairs = prepare_template_data(fill_pairs)
|
||||
|
||||
try:
|
||||
filled_template = \
|
||||
format_template_with_optional_keys(fill_pairs, template)
|
||||
except KeyError:
|
||||
keys = []
|
||||
if fill_pairs:
|
||||
keys = fill_pairs.keys()
|
||||
|
||||
msg = "Subset grouping failed. " \
|
||||
"Only {} are expected in Settings".format(','.join(keys))
|
||||
self.log.warning(msg)
|
||||
|
||||
if instance.data.get("subsetGroup") or filled_template:
|
||||
subset_group = instance.data.get('subsetGroup') or filled_template
|
||||
# Fist look into instance data
|
||||
subset_group = instance.data.get("subsetGroup")
|
||||
if not subset_group:
|
||||
subset_group = self._get_subset_group(instance)
|
||||
|
||||
if subset_group:
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset_id)
|
||||
}, {'$set': {'data.subsetGroup': subset_group}})
|
||||
|
||||
def _get_subset_group(self, instance):
|
||||
"""Look into subset group profiles set by settings.
|
||||
|
||||
Attribute 'subset_grouping_profiles' is defined by OpenPype settings.
|
||||
"""
|
||||
# Skip if 'subset_grouping_profiles' is empty
|
||||
if not self.subset_grouping_profiles:
|
||||
return None
|
||||
|
||||
# QUESTION
|
||||
# - is there a chance that task name is not filled in anatomy
|
||||
# data?
|
||||
# - should we use context task in that case?
|
||||
task_name = (
|
||||
instance.data["anatomyData"]["task"]
|
||||
or io.Session["AVALON_TASK"]
|
||||
)
|
||||
task_type = instance.data["task_type"]
|
||||
filtering_criteria = {
|
||||
"families": instance.data["family"],
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
}
|
||||
matching_profile = filter_profiles(
|
||||
self.subset_grouping_profiles,
|
||||
filtering_criteria
|
||||
)
|
||||
# Skip if there is not matchin profile
|
||||
if not matching_profile:
|
||||
return None
|
||||
|
||||
filled_template = None
|
||||
template = matching_profile["template"]
|
||||
fill_pairs = (
|
||||
("family", filtering_criteria["families"]),
|
||||
("task", filtering_criteria["tasks"]),
|
||||
("host", filtering_criteria["hosts"]),
|
||||
("subset", instance.data["subset"]),
|
||||
("renderlayer", instance.data.get("renderlayer"))
|
||||
)
|
||||
fill_pairs = prepare_template_data(fill_pairs)
|
||||
|
||||
try:
|
||||
filled_template = \
|
||||
format_template_with_optional_keys(fill_pairs, template)
|
||||
except KeyError:
|
||||
keys = []
|
||||
if fill_pairs:
|
||||
keys = fill_pairs.keys()
|
||||
|
||||
msg = "Subset grouping failed. " \
|
||||
"Only {} are expected in Settings".format(','.join(keys))
|
||||
self.log.warning(msg)
|
||||
|
||||
return filled_template
|
||||
|
||||
def create_version(self, subset, version_number, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
|
||||
from openpype.lib.pype_info import is_running_staging
|
||||
|
||||
RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
@ -30,22 +30,22 @@ def get_liberation_font_path(bold=False, italic=False):
|
|||
return font_path
|
||||
|
||||
|
||||
def pype_icon_filepath(debug=None):
|
||||
if debug is None:
|
||||
debug = bool(os.getenv("OPENPYPE_DEV"))
|
||||
def get_openpype_icon_filepath(staging=None):
|
||||
if staging is None:
|
||||
staging = is_running_staging()
|
||||
|
||||
if debug:
|
||||
if staging:
|
||||
icon_file_name = "openpype_icon_staging.png"
|
||||
else:
|
||||
icon_file_name = "openpype_icon.png"
|
||||
return get_resource("icons", icon_file_name)
|
||||
|
||||
|
||||
def pype_splash_filepath(debug=None):
|
||||
if debug is None:
|
||||
debug = bool(os.getenv("OPENPYPE_DEV"))
|
||||
def get_openpype_splash_filepath(staging=None):
|
||||
if staging is None:
|
||||
staging = is_running_staging()
|
||||
|
||||
if debug:
|
||||
if staging:
|
||||
splash_file_name = "openpype_splash_staging.png"
|
||||
else:
|
||||
splash_file_name = "openpype_splash.png"
|
||||
|
|
|
|||
|
|
@ -209,6 +209,7 @@
|
|||
"standalonepublisher"
|
||||
],
|
||||
"families": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
|
|
@ -221,6 +222,7 @@
|
|||
"matchmove",
|
||||
"shot"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": false,
|
||||
"advanced_filtering": []
|
||||
|
|
@ -232,6 +234,7 @@
|
|||
"families": [
|
||||
"plate"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": false,
|
||||
"advanced_filtering": [
|
||||
|
|
@ -256,6 +259,7 @@
|
|||
"rig",
|
||||
"camera"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
|
|
@ -267,6 +271,7 @@
|
|||
"families": [
|
||||
"renderPass"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": false,
|
||||
"advanced_filtering": []
|
||||
|
|
@ -276,6 +281,7 @@
|
|||
"tvpaint"
|
||||
],
|
||||
"families": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
|
|
@ -288,6 +294,7 @@
|
|||
"write",
|
||||
"render"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": false,
|
||||
"advanced_filtering": [
|
||||
|
|
@ -307,6 +314,7 @@
|
|||
"render",
|
||||
"workfile"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"add_ftrack_family": true,
|
||||
"advanced_filtering": []
|
||||
|
|
|
|||
|
|
@ -152,6 +152,7 @@
|
|||
{
|
||||
"families": [],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template_name": "publish"
|
||||
},
|
||||
|
|
@ -162,6 +163,7 @@
|
|||
"prerender"
|
||||
],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template_name": "render"
|
||||
}
|
||||
|
|
@ -170,6 +172,7 @@
|
|||
{
|
||||
"families": [],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": ""
|
||||
}
|
||||
|
|
@ -205,6 +208,7 @@
|
|||
{
|
||||
"families": [],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": "{family}{Variant}"
|
||||
},
|
||||
|
|
@ -213,6 +217,7 @@
|
|||
"render"
|
||||
],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": "{family}{Task}{Variant}"
|
||||
},
|
||||
|
|
@ -224,6 +229,7 @@
|
|||
"hosts": [
|
||||
"tvpaint"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": "{family}{Task}_{Render_layer}_{Render_pass}"
|
||||
},
|
||||
|
|
@ -235,6 +241,7 @@
|
|||
"hosts": [
|
||||
"tvpaint"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": "{family}{Task}"
|
||||
},
|
||||
|
|
@ -245,6 +252,7 @@
|
|||
"hosts": [
|
||||
"aftereffects"
|
||||
],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"template": "render{Task}{Variant}"
|
||||
}
|
||||
|
|
@ -261,6 +269,7 @@
|
|||
"last_workfile_on_startup": [
|
||||
{
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"enabled": true
|
||||
}
|
||||
|
|
@ -268,6 +277,7 @@
|
|||
"open_workfile_tool_on_startup": [
|
||||
{
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"enabled": false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -520,6 +520,7 @@
|
|||
"workfile_build": {
|
||||
"profiles": [
|
||||
{
|
||||
"task_types": [],
|
||||
"tasks": [
|
||||
"Lighting"
|
||||
],
|
||||
|
|
|
|||
|
|
@ -30,7 +30,13 @@
|
|||
},
|
||||
"publish": {
|
||||
"PreCollectNukeInstances": {
|
||||
"sync_workfile_version": true
|
||||
"sync_workfile_version_on_families": [
|
||||
"nukenodes",
|
||||
"camera",
|
||||
"gizmo",
|
||||
"source",
|
||||
"render"
|
||||
]
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": true,
|
||||
|
|
@ -163,6 +169,7 @@
|
|||
"builder_on_start": false,
|
||||
"profiles": [
|
||||
{
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"current_context": [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -7,8 +7,9 @@
|
|||
"profiles": [
|
||||
{
|
||||
"families": [],
|
||||
"tasks": [],
|
||||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"channel_messages": []
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@
|
|||
"environment": {}
|
||||
},
|
||||
"__dynamic_keys_labels__": {
|
||||
"13-0": "13.0 (Testing only)",
|
||||
"13-0": "13.0",
|
||||
"12-2": "12.2",
|
||||
"12-0": "12.0",
|
||||
"11-3": "11.3",
|
||||
|
|
@ -331,7 +331,7 @@
|
|||
"environment": {}
|
||||
},
|
||||
"__dynamic_keys_labels__": {
|
||||
"13-0": "13.0 (Testing only)",
|
||||
"13-0": "13.0",
|
||||
"12-2": "12.2",
|
||||
"12-0": "12.0",
|
||||
"11-3": "11.3",
|
||||
|
|
|
|||
|
|
@ -650,6 +650,11 @@
|
|||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
|
|
|
|||
|
|
@ -52,18 +52,23 @@
|
|||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "hosts-enum",
|
||||
"key": "hosts",
|
||||
"label": "Host names",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -502,6 +502,11 @@
|
|||
"label": "Hosts",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
|
|
@ -543,6 +548,11 @@
|
|||
"label": "Hosts",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
|
|
|
|||
|
|
@ -40,6 +40,11 @@
|
|||
"label": "Hosts",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
|
|
@ -126,9 +131,14 @@
|
|||
"unreal"
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
|
|
@ -161,9 +171,15 @@
|
|||
"nuke"
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "list",
|
||||
"object_type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -16,9 +16,30 @@
|
|||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "sync_workfile_version",
|
||||
"label": "Sync Version from workfile"
|
||||
"type": "enum",
|
||||
"key": "sync_workfile_version_on_families",
|
||||
"label": "Sync workfile version for families",
|
||||
"multiselection": true,
|
||||
"enum_items": [
|
||||
{
|
||||
"nukenodes": "nukenodes"
|
||||
},
|
||||
{
|
||||
"camera": "camera"
|
||||
},
|
||||
{
|
||||
"gizmo": "gizmo"
|
||||
},
|
||||
{
|
||||
"source": "source"
|
||||
},
|
||||
{
|
||||
"prerender": "prerender"
|
||||
},
|
||||
{
|
||||
"render": "render"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -11,9 +11,14 @@
|
|||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
|
|
@ -94,4 +99,4 @@
|
|||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -55,9 +55,14 @@
|
|||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -91,4 +91,4 @@ def load_stylesheet():
|
|||
|
||||
|
||||
def app_icon_path():
|
||||
return resources.pype_icon_filepath()
|
||||
return resources.get_openpype_icon_filepath()
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ class ApplicationAction(api.Action):
|
|||
|
||||
def _show_message_box(self, title, message, details=None):
|
||||
dialog = QtWidgets.QMessageBox()
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
dialog.setWindowIcon(icon)
|
||||
dialog.setStyleSheet(style.load_stylesheet())
|
||||
dialog.setWindowTitle(title)
|
||||
|
|
|
|||
|
|
@ -261,7 +261,7 @@ class LauncherWindow(QtWidgets.QDialog):
|
|||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
self.setAttribute(QtCore.Qt.WA_DeleteOnClose, False)
|
||||
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class ProjectManagerWindow(QtWidgets.QWidget):
|
|||
self._user_passed = False
|
||||
|
||||
self.setWindowTitle("OpenPype Project Manager")
|
||||
self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
|
||||
self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
|
||||
|
||||
# Top part of window
|
||||
top_part_widget = QtWidgets.QWidget(self)
|
||||
|
|
|
|||
|
|
@ -10,4 +10,4 @@ def load_stylesheet():
|
|||
|
||||
|
||||
def app_icon_path():
|
||||
return resources.pype_icon_filepath()
|
||||
return resources.get_openpype_icon_filepath()
|
||||
|
|
|
|||
|
|
@ -231,7 +231,7 @@ def main():
|
|||
qt_app = QtWidgets.QApplication([])
|
||||
# app.setQuitOnLastWindowClosed(False)
|
||||
qt_app.setStyleSheet(style.load_stylesheet())
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
qt_app.setWindowIcon(icon)
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ class PypeInfoWidget(QtWidgets.QWidget):
|
|||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowTitle("OpenPype info")
|
||||
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
|
|||
doubleclick_time_ms = 100
|
||||
|
||||
def __init__(self, parent):
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
|
||||
super(SystemTrayIcon, self).__init__(icon, parent)
|
||||
|
||||
|
|
@ -308,7 +308,7 @@ class PypeTrayApplication(QtWidgets.QApplication):
|
|||
splash_widget.hide()
|
||||
|
||||
def set_splash(self):
|
||||
splash_pix = QtGui.QPixmap(resources.pype_splash_filepath())
|
||||
splash_pix = QtGui.QPixmap(resources.get_openpype_splash_filepath())
|
||||
splash = QtWidgets.QSplashScreen(splash_pix)
|
||||
splash.setMask(splash_pix.mask())
|
||||
splash.setEnabled(False)
|
||||
|
|
|
|||
|
|
@ -376,6 +376,9 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
task (str): Name of the task to select.
|
||||
|
||||
"""
|
||||
task_view_model = self._tasks_view.model()
|
||||
if not task_view_model:
|
||||
return
|
||||
|
||||
# Clear selection
|
||||
selection_model = self._tasks_view.selectionModel()
|
||||
|
|
@ -383,8 +386,8 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
|
||||
# Select the task
|
||||
mode = selection_model.Select | selection_model.Rows
|
||||
for row in range(self._tasks_model.rowCount()):
|
||||
index = self._tasks_model.index(row, 0)
|
||||
for row in range(task_view_model.rowCount()):
|
||||
index = task_view_model.index(row, 0)
|
||||
name = index.data(TASK_NAME_ROLE)
|
||||
if name == task_name:
|
||||
selection_model.select(index, mode)
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit b3e49597786c931c13bca207769727d5fc56d5f6
|
||||
Subproject commit 1e94241ffe2dd7ce65ca66b08e452ffc03180235
|
||||
337
website/docs/artist_hosts_nuke_tut.md
Normal file
|
|
@ -0,0 +1,337 @@
|
|||
---
|
||||
id: artist_hosts_nuke_tut
|
||||
title: Nuke
|
||||
sidebar_label: Nuke
|
||||
---
|
||||
|
||||
:::note
|
||||
OpenPype supports Nuke version **`11.0`** and above.
|
||||
:::
|
||||
|
||||
## OpenPype global tools
|
||||
|
||||
- [Set Context](artist_tools.md#set-context)
|
||||
- [Work Files](artist_tools.md#workfiles)
|
||||
- [Create](artist_tools.md#creator)
|
||||
- [Load](artist_tools.md#loader)
|
||||
- [Manage (Inventory)](artist_tools.md#inventory)
|
||||
- [Publish](artist_tools.md#publisher)
|
||||
- [Library Loader](artist_tools.md#library-loader)
|
||||
|
||||
## Nuke specific tools
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
### Set Frame Ranges
|
||||
|
||||
Use this feature in case you are not sure the frame range is correct.
|
||||
|
||||
##### Result
|
||||
|
||||
- setting Frame Range in script settings
|
||||
- setting Frame Range in viewers (timeline)
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
 <!-- picture needs to be changed -->
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<figure>
|
||||
|
||||

|
||||
|
||||
<figcaption>
|
||||
|
||||
1. limiting to Frame Range without handles
|
||||
2. **Input** handle on start
|
||||
3. **Output** handle on end
|
||||
|
||||
</figcaption>
|
||||
</figure>
|
||||
|
||||
### Set Resolution
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
|
||||
This menu item will set correct resolution format for you defined by your production.
|
||||
|
||||
##### Result
|
||||
|
||||
- creates new item in formats with project name
|
||||
- sets the new format as used
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
 <!-- picture needs to be changed -->
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
### Set Colorspace
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
This menu item will set correct Colorspace definitions for you. All has to be configured by your production (Project coordinator).
|
||||
|
||||
##### Result
|
||||
|
||||
- set Colorspace in your script settings
|
||||
- set preview LUT to your viewers
|
||||
- set correct colorspace to all discovered Read nodes (following expression set in settings)
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
 <!-- picture needs to be changed -->
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
### Apply All Settings
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
It is usually enough if you once per while use this option just to make yourself sure the workfile is having set correct properties.
|
||||
|
||||
##### Result
|
||||
|
||||
- set Frame Ranges
|
||||
- set Colorspace
|
||||
- set Resolution
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
 <!-- picture needs to be changed -->
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Build Workfile
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
This tool will append all available subsets into an actual node graph. It will look into database and get all last [versions](artist_concepts.md#version) of available [subsets](artist_concepts.md#subset).
|
||||
|
||||
|
||||
##### Result
|
||||
|
||||
- adds all last versions of subsets (rendered image sequences) as read nodes
|
||||
- ~~adds publishable write node as `renderMain` subset~~
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## Nuke QuickStart
|
||||
|
||||
This QuickStart is short introduction to what OpenPype can do for you. It attempts to make an overview for compositing artists, and simplifies processes that are better described in specific parts of the documentation.
|
||||
|
||||
### Launch Nuke - Shot and Task Context
|
||||
OpenPype has to know what shot and task you are working on. You need to run Nuke in context of the task, using Ftrack Action or OpenPype Launcher to select the task and run Nuke.
|
||||
|
||||

|
||||

|
||||
|
||||
:::tip Admin Tip - Nuke version
|
||||
You can [configure](admin_settings_project_anatomy.md#Attributes) which DCC version(s) will be available for current project in **Studio Settings → Project → Anatomy → Attributes → Applications**
|
||||
:::
|
||||
|
||||
### Nuke Initial setup
|
||||
Nuke OpenPype menu shows the current context
|
||||
|
||||

|
||||
|
||||
Launching Nuke with context stops your timer, and starts the clock on the shot and task you picked.
|
||||
|
||||
Openpype makes initial setup for your Nuke script. It is the same as running [Apply All Settings](artist_hosts_nuke.md#apply-all-settings) from the OpenPype menu.
|
||||
|
||||
- Reads frame range and resolution from Avalon database, sets it in Nuke Project Settings,
|
||||
Creates Viewer node, sets it’s range and indicates handles by In and Out points.
|
||||
|
||||
- Reads Color settings from the project configuration, and sets it in Nuke Project Settings and Viewer.
|
||||
|
||||
- Sets project directory in the Nuke Project Settings to the Nuke Script Directory
|
||||
|
||||
:::tip Tip - Project Settings
|
||||
After Nuke starts it will automatically **Apply All Settings** for you. If you are sure the settings are wrong just contact your supervisor and he will set them correctly for you in project database.
|
||||
:::
|
||||
|
||||
### Save Nuke script – the Work File
|
||||
Use OpenPype - Work files menu to create a new Nuke script. Openpype offers you the preconfigured naming.
|
||||

|
||||
|
||||
The Next Available Version checks the work folder for already used versions and offers the lowest unused version number automatically.
|
||||
|
||||
Subversion can be used to distinguish or name versions. For example used to add shortened artist name.
|
||||
|
||||
More about [workfiles](artist_tools#workfiles).
|
||||
|
||||
|
||||
:::tip Admin Tips
|
||||
- **Workfile Naming**
|
||||
|
||||
- The [workfile naming](admin_settings_project_anatomy#templates) is configured in anatomy, see **Studio Settings → Project → Anatomy → Templates → Work**
|
||||
|
||||
- **Open Workfile**
|
||||
|
||||
- You can [configure](project_settings/settings_project_nuke#create-first-workfile) Nuke to automatically open the last version, or create a file on startup. See **Studio Settings → Project → Global → Tools → Workfiles**
|
||||
|
||||
- **Nuke Color Settings**
|
||||
|
||||
- [Color setting](project_settings/settings_project_nuke) for Nuke can be found in **Studio Settings → Project → Anatomy → Color Management and Output Formats → Nuke**
|
||||
:::
|
||||
|
||||
### Load plate
|
||||
Use Load from OpenPype menu to load any plates or renders available.
|
||||
|
||||

|
||||
|
||||
Pick the plate asset, right click and choose Load Image Sequence to create a Read node in Nuke.
|
||||
|
||||
Note that the Read node created by OpenPype is green. Green color indicates the highest version of asset is loaded. Asset versions could be easily changed by [Manage](#managing-versions). Lower versions will be highlighted by orange color on the read node.
|
||||
|
||||

|
||||
|
||||
More about [Asset loader](artist_tools#loader).
|
||||
|
||||
### Create Write Node
|
||||
To create OpenPype managed Write node, select the Read node you just created, from OpenPype menu, pick Create.
|
||||
In the Instance Creator, pick Create Write Render, and Create.
|
||||
|
||||

|
||||
|
||||
This will create a Group with a Write node inside.
|
||||
|
||||

|
||||
|
||||
:::tip Admin Tip - Configuring write node
|
||||
You can configure write node parameters in **Studio Settings → Project → Anatomy → Color Management and Output Formats → Nuke → Nodes**
|
||||
:::
|
||||
|
||||
#### What Nuke Publish Does
|
||||
From Artist perspective, Nuke publish gathers all the stuff found in the Nuke script with Publish checkbox set to on, exports stuff and raises the Nuke script (workfile) version.
|
||||
|
||||
The Pyblish dialog shows the progress of the process.
|
||||
|
||||
The left column of the dialog shows what will be published. Typically it is one or more renders or prerenders, plus work file.
|
||||
|
||||

|
||||
|
||||
The right column shows the publish steps
|
||||
|
||||
##### Publish steps
|
||||
1. Gathers all the stuff found in the Nuke script with Publish checkbox set to on
|
||||
2. Collects all the info (from the script, database…)
|
||||
3. Validates components to be published (checks render range and resolution...)
|
||||
4. Extracts data from the script
|
||||
- generates thumbnail
|
||||
- creates review(s) like h264
|
||||
- adds burnins to review(s)
|
||||
- Copies and renames components like render(s), review(s), Nuke script... to publish folder
|
||||
5. Integrates components (writes to database, sends preview of the render to Ftrack ...
|
||||
6. Increments Nuke script version, cleans up the render directory
|
||||
|
||||
Gathering all the info and validating usually takes just a few seconds. Creating reviews for long, high resolution shots can however take significant amount of time when publishing locally.
|
||||
|
||||
##### Pyblish Note and Intent
|
||||

|
||||
|
||||
Artist can add Note and Intent before firing the publish button. The Note and Intent is ment for easy communication between artist and supervisor. After publish, Note and Intent can be seen in Ftrack notes.
|
||||
|
||||
##### Pyblish Checkbox
|
||||
|
||||

|
||||
|
||||
Pyblish Dialog tries to pack a lot of info in a small area. One of the more tricky parts is that it uses non-standard checkboxes. Some squares can be turned on and off by the artist, some are mandatory.
|
||||
|
||||
If you run the publish and decide to not publish the Nuke script, you can turn it off right in the Pyblish dialog by clicking on the checkbox. If you decide to render and publish the shot in lower resolution to speed up the turnaround, you have to turn off the Write Resolution validator. If you want to use an older version of the asset (older version of the plate...), you have to turn off the Validate containers, and so on.
|
||||
|
||||
More info about [Using Pyblish](artist_tools#publisher)
|
||||
|
||||
:::tip Admin Tip - Configuring validators
|
||||
You can configure Nuke validators like Output Resolution in **Studio Settings → Project → Nuke → Publish plugins**
|
||||
:::
|
||||
|
||||
### Review
|
||||

|
||||
|
||||
When you turn the review checkbox on in your OpenPype write node, here is what happens:
|
||||
- OpenPype uses the current Nuke script to
|
||||
- Load the render
|
||||
- Optionally apply LUT
|
||||
- Render Prores 4444 with the same resolution as your render
|
||||
- Use Ffmpeg to convert the Prores to whatever review(s) you defined
|
||||
- Use Ffmpeg to add (optional) burnin to the review(s) from previous step
|
||||
|
||||
Creating reviews is a part of the publishing process. If you choose to do a local publish or to use existing frames, review will be processed also on the artist's machine.
|
||||
If you choose to publish on the farm, you will render and do reviews on the farm.
|
||||
|
||||
So far there is no option for using existing frames (from your local / check render) and just do the review on the farm.
|
||||
|
||||
More info about [configuring reviews](pype2/admin_presets_plugins#extractreview).
|
||||
|
||||
:::tip Admin Tip - Configuring Reviews
|
||||
You can configure reviewsin **Studio Settings → Project → Global → Publish plugins → ExtractReview / ExtractBurnin**
|
||||
Reviews can be configured separately for each host, task, or family. For example Maya can produce different review to Nuke, animation task can have different burnin then modelling, and plate can have different review then model.
|
||||
:::
|
||||
|
||||
### Render and Publish
|
||||
|
||||

|
||||
|
||||
Let’s say you want to render and publish the shot right now, with only a Read and Write node. You need to decide if you want to render, check the render and then publish it, or you want to execute the render and publish in one go.
|
||||
|
||||
If you wish to check your render before publishing, you can use your local machine or your farm to render the write node as you would do without OpenPype, load and check your render (OpenPype Write has a convenience button for that), and if happy, use publish with Use existing frames option selected in the write node to generate the review on your local machine.
|
||||
|
||||
If you want to render and publish on the farm in one go, run publish with On farm option selected in the write node to render and make the review on farm.
|
||||
|
||||

|
||||
|
||||
### Version-less Render
|
||||
|
||||

|
||||
|
||||
OpenPype is configured so your render file names have no version number until the render is fully finished and published. The main advantage is that you can keep the render from the previous version and re-render only part of the shot. With care, this is handy.
|
||||
|
||||
Main disadvantage of this approach is that you can render only one version of your shot at one time. Otherwise you risk to partially overwrite your shot render before publishing copies and renames the rendered files to the properly versioned publish folder.
|
||||
|
||||
When making quick farm publishes, like making two versions with different color correction, care must be taken to let the first job (first version) completely finish before the second version starts rendering.
|
||||
|
||||
### Managing Versions
|
||||
|
||||

|
||||
|
||||
OpenPype checks all the assets loaded to Nuke on script open. All out of date assets are colored orange, up to date assets are colored green.
|
||||
|
||||
Use Manage to switch versions for loaded assets.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Fixing Validate Containers
|
||||
|
||||

|
||||
|
||||
If your Pyblish dialog fails on Validate Containers, you might have an old asset loaded. Use OpenPype - Manage... to switch the asset(s) to the latest version.
|
||||
|
||||
### Fixing Validate Version
|
||||
If your Pyblish dialog fails on Validate Version, you might be trying to publish already published version. Rise your version in the OpenPype WorkFiles SaveAs.
|
||||
|
||||
Or maybe you accidentaly copied write node from different shot to your current one. Check the write publishes on the left side of the Pyblish dialog. Typically you publish only one write. Locate and delete the stray write from other shot.
|
||||
BIN
website/docs/assets/nuke_tut/nuke_AnatomyAppsVersions.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
website/docs/assets/nuke_tut/nuke_AssetLoadOutOfDate.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
BIN
website/docs/assets/nuke_tut/nuke_AssetLoader.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
website/docs/assets/nuke_tut/nuke_Context.png
Normal file
|
After Width: | Height: | Size: 5.4 KiB |
BIN
website/docs/assets/nuke_tut/nuke_Create.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
BIN
website/docs/assets/nuke_tut/nuke_Creator.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/nuke_tut/nuke_Load.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
BIN
website/docs/assets/nuke_tut/nuke_ManageVersion.png
Normal file
|
After Width: | Height: | Size: 70 KiB |
BIN
website/docs/assets/nuke_tut/nuke_NukeColor.png
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
website/docs/assets/nuke_tut/nuke_Publish.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
BIN
website/docs/assets/nuke_tut/nuke_PyblishCheckBox.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
website/docs/assets/nuke_tut/nuke_PyblishDialogNuke.png
Normal file
|
After Width: | Height: | Size: 65 KiB |
|
After Width: | Height: | Size: 8.4 KiB |
BIN
website/docs/assets/nuke_tut/nuke_RenderLocalFarm.png
Normal file
|
After Width: | Height: | Size: 65 KiB |
BIN
website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction.png
Normal file
|
After Width: | Height: | Size: 147 KiB |
BIN
website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction_p3.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
website/docs/assets/nuke_tut/nuke_RunNukeLauncher.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
BIN
website/docs/assets/nuke_tut/nuke_RunNukeLauncher_p2.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/nuke_tut/nuke_ValidateContainers.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WorkFileNamingAnatomy.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WorkFileSaveAs.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WorkfileOnStartup.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WriteNode.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WriteNodeCreated.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WriteNodeReview.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
website/docs/assets/nuke_tut/nuke_WriteSettings.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
website/docs/assets/nuke_tut/nuke_versionless.png
Normal file
|
After Width: | Height: | Size: 4.9 KiB |
|
|
@ -18,7 +18,7 @@ module.exports = {
|
|||
label: "Integrations",
|
||||
items: [
|
||||
"artist_hosts_hiero",
|
||||
"artist_hosts_nuke",
|
||||
"artist_hosts_nuke_tut",
|
||||
"artist_hosts_maya",
|
||||
"artist_hosts_blender",
|
||||
"artist_hosts_harmony",
|
||||
|
|
|
|||