Merge pull request #887 from pypeclub/feature/nuke_in_pype3
Nuke to Pype3
2
.gitmodules
vendored
|
|
@ -13,4 +13,4 @@
|
|||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
[submodule "pype/modules/ftrack/python2_vendor/arrow"]
|
||||
path = pype/modules/ftrack/python2_vendor/arrow
|
||||
url = git@github.com:arrow-py/arrow.git
|
||||
url = git@github.com:arrow-py/arrow.git
|
||||
10
pype/api.py
|
|
@ -26,6 +26,14 @@ from .lib.mongo import (
|
|||
get_default_components
|
||||
)
|
||||
|
||||
from .lib.applications import (
|
||||
ApplicationManager
|
||||
)
|
||||
|
||||
from .lib.avalon_context import (
|
||||
BuildWorkfile
|
||||
)
|
||||
|
||||
from . import resources
|
||||
|
||||
from .plugin import (
|
||||
|
|
@ -63,6 +71,8 @@ __all__ = [
|
|||
"decompose_url",
|
||||
"compose_url",
|
||||
"get_default_components",
|
||||
"ApplicationManager",
|
||||
"BuildWorkfile",
|
||||
|
||||
# Resources
|
||||
"resources",
|
||||
|
|
|
|||
|
|
@ -1,144 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import nuke
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.tools import workfiles
|
||||
from pyblish import api as pyblish
|
||||
from pype.hosts.nuke import menu
|
||||
from pype.api import Logger
|
||||
from pype import PLUGINS_DIR
|
||||
from . import lib
|
||||
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.workfiles_launched = False
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "nuke", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "nuke", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "nuke", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nuke", "inventory")
|
||||
|
||||
|
||||
# registering pyblish gui regarding settings in presets
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
CAUTION: This is primarily for development and debugging purposes.
|
||||
|
||||
"""
|
||||
|
||||
import importlib
|
||||
|
||||
for module in (
|
||||
"{}.api".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.actions".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.presets".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.plugin".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.lib".format(AVALON_CONFIG),
|
||||
):
|
||||
log.info("Reloading module: {}...".format(module))
|
||||
|
||||
module = importlib.import_module(module)
|
||||
|
||||
try:
|
||||
importlib.reload(module)
|
||||
except AttributeError as e:
|
||||
log.warning("Cannot reload module: {}".format(e))
|
||||
reload(module)
|
||||
|
||||
|
||||
def install():
|
||||
''' Installing all requarements for Nuke host
|
||||
'''
|
||||
|
||||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
avalon.on("workio.open_file", lib.check_inventory_versions)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
"review",
|
||||
"nukenodes"
|
||||
"gizmo"
|
||||
]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
if launch_workfiles:
|
||||
nuke.addOnCreate(launch_workfiles_app, nodeClass="Root")
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
|
||||
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
|
||||
menu.install()
|
||||
|
||||
|
||||
def launch_workfiles_app():
|
||||
'''Function letting start workfiles after start of host
|
||||
'''
|
||||
if not self.workfiles_launched:
|
||||
self.workfiles_launched = True
|
||||
workfiles.show(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
|
||||
def uninstall():
|
||||
'''Uninstalling host's integration
|
||||
'''
|
||||
log.info("Deregistering Nuke plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
|
||||
reload_config()
|
||||
menu.uninstall()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node passthrough states on instance toggles."""
|
||||
|
||||
log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
|
||||
instance, old_value, new_value))
|
||||
|
||||
from avalon.nuke import (
|
||||
viewer_update_and_undo_stop,
|
||||
add_publish_knob
|
||||
)
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
n = instance[0]
|
||||
try:
|
||||
n["publish"].value()
|
||||
except ValueError:
|
||||
n = add_publish_knob(n)
|
||||
log.info(" `Publish` knob was added to write node..")
|
||||
|
||||
n["publish"].setValue(new_value)
|
||||
141
pype/hosts/nuke/api/__init__.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import os
|
||||
import sys
|
||||
import nuke
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.tools import workfiles
|
||||
from pyblish import api as pyblish
|
||||
from pype.api import Logger
|
||||
import pype.hosts.nuke
|
||||
from . import lib, menu
|
||||
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.workfiles_launched = False
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(pype.hosts.nuke.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
# registering pyblish gui regarding settings in presets
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
CAUTION: This is primarily for development and debugging purposes.
|
||||
|
||||
"""
|
||||
|
||||
import importlib
|
||||
|
||||
for module in (
|
||||
"{}.api".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.api.actions".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.api.menu".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.api.plugin".format(AVALON_CONFIG),
|
||||
"{}.hosts.nuke.api.lib".format(AVALON_CONFIG),
|
||||
):
|
||||
log.info("Reloading module: {}...".format(module))
|
||||
|
||||
module = importlib.import_module(module)
|
||||
|
||||
try:
|
||||
importlib.reload(module)
|
||||
except AttributeError as e:
|
||||
from importlib import reload
|
||||
log.warning("Cannot reload module: {}".format(e))
|
||||
reload(module)
|
||||
|
||||
|
||||
def install():
|
||||
''' Installing all requarements for Nuke host
|
||||
'''
|
||||
|
||||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
avalon.on("workio.open_file", lib.check_inventory_versions)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
"review",
|
||||
"nukenodes"
|
||||
"gizmo"
|
||||
]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
if launch_workfiles:
|
||||
nuke.addOnCreate(launch_workfiles_app, nodeClass="Root")
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
|
||||
# nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
|
||||
menu.install()
|
||||
|
||||
|
||||
def launch_workfiles_app():
|
||||
'''Function letting start workfiles after start of host
|
||||
'''
|
||||
if not self.workfiles_launched:
|
||||
self.workfiles_launched = True
|
||||
workfiles.show(os.environ["AVALON_WORKDIR"])
|
||||
|
||||
|
||||
def uninstall():
|
||||
'''Uninstalling host's integration
|
||||
'''
|
||||
log.info("Deregistering Nuke plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
reload_config()
|
||||
menu.uninstall()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node passthrough states on instance toggles."""
|
||||
|
||||
log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
|
||||
instance, old_value, new_value))
|
||||
|
||||
from avalon.nuke import (
|
||||
viewer_update_and_undo_stop,
|
||||
add_publish_knob
|
||||
)
|
||||
|
||||
# Whether instances should be passthrough based on new value
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
n = instance[0]
|
||||
try:
|
||||
n["publish"].value()
|
||||
except ValueError:
|
||||
n = add_publish_knob(n)
|
||||
log.info(" `Publish` knob was added to write node..")
|
||||
|
||||
n["publish"].setValue(new_value)
|
||||
|
|
@ -5,7 +5,7 @@ from avalon.nuke.lib import (
|
|||
select_nodes
|
||||
)
|
||||
|
||||
from ...action import get_errored_instances_from_context
|
||||
from pype.api import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
|
|
@ -6,26 +6,74 @@ from collections import OrderedDict
|
|||
from avalon import api, io, lib
|
||||
import avalon.nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
import pype.api as pype
|
||||
from pype.api import (
|
||||
Logger,
|
||||
Anatomy,
|
||||
get_version_from_path,
|
||||
get_anatomy_settings,
|
||||
get_hierarchy,
|
||||
get_asset,
|
||||
config,
|
||||
ApplicationManager
|
||||
)
|
||||
|
||||
import nuke
|
||||
|
||||
|
||||
from .presets import (
|
||||
get_colorspace_preset,
|
||||
get_node_dataflow_preset,
|
||||
get_node_colorspace_preset,
|
||||
get_anatomy
|
||||
)
|
||||
|
||||
from .utils import set_context_favorites
|
||||
|
||||
log = pype.Logger().get_logger(__name__)
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._project = None
|
||||
|
||||
|
||||
def get_node_imageio_setting(**kwarg):
|
||||
''' Get preset data for dataflow (fileType, compression, bitDepth)
|
||||
'''
|
||||
log.info(kwarg)
|
||||
host = str(kwarg.get("host", "nuke"))
|
||||
nodeclass = kwarg.get("nodeclass", None)
|
||||
creator = kwarg.get("creator", None)
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
|
||||
assert any([host, nodeclass]), nuke.message(
|
||||
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
|
||||
|
||||
imageio_nodes = (get_anatomy_settings(project_name)
|
||||
["imageio"]
|
||||
.get(host, None)
|
||||
["nodes"]
|
||||
["requiredNodes"]
|
||||
)
|
||||
|
||||
for node in imageio_nodes:
|
||||
log.info(node)
|
||||
if node["nukeNodeClass"] == nodeclass:
|
||||
if creator in node["plugins"]:
|
||||
imageio_node = node
|
||||
|
||||
log.info("ImageIO node: {}".format(imageio_node))
|
||||
return imageio_node
|
||||
|
||||
|
||||
def get_imageio_input_colorspace(filename):
|
||||
''' Get input file colorspace based on regex in settings.
|
||||
'''
|
||||
imageio_regex_inputs = (get_anatomy_settings(os.getenv("AVALON_PROJECT"))
|
||||
["imageio"]
|
||||
["nuke"]
|
||||
["regexInputs"]
|
||||
["inputs"]
|
||||
)
|
||||
|
||||
preset_clrsp = None
|
||||
for regexInput in imageio_regex_inputs:
|
||||
if bool(re.search(regexInput["regex"], filename)):
|
||||
preset_clrsp = str(regexInput["colorspace"])
|
||||
|
||||
return preset_clrsp
|
||||
|
||||
|
||||
def on_script_load():
|
||||
''' Callback for ffmpeg support
|
||||
'''
|
||||
|
|
@ -39,7 +87,7 @@ def on_script_load():
|
|||
|
||||
def check_inventory_versions():
|
||||
"""
|
||||
Actiual version idetifier of Loaded containers
|
||||
Actual version idetifier of Loaded containers
|
||||
|
||||
Any time this function is run it will check all nodes and filter only
|
||||
Loader nodes for its version. It will get all versions from database
|
||||
|
|
@ -52,9 +100,9 @@ def check_inventory_versions():
|
|||
container = avalon.nuke.parse_container(each)
|
||||
|
||||
if container:
|
||||
node = container["_node"]
|
||||
avalon_knob_data = avalon.nuke.get_avalon_knob_data(
|
||||
node, ['avalon:', 'ak:'])
|
||||
node = nuke.toNode(container["objectName"])
|
||||
avalon_knob_data = avalon.nuke.read(
|
||||
node)
|
||||
|
||||
# get representation from io
|
||||
representation = io.find_one({
|
||||
|
|
@ -88,7 +136,7 @@ def writes_version_sync():
|
|||
''' Callback synchronizing version of publishable write nodes
|
||||
'''
|
||||
try:
|
||||
rootVersion = pype.get_version_from_path(nuke.root().name())
|
||||
rootVersion = get_version_from_path(nuke.root().name())
|
||||
padding = len(rootVersion)
|
||||
new_version = "v" + str("{" + ":0>{}".format(padding) + "}").format(
|
||||
int(rootVersion)
|
||||
|
|
@ -103,8 +151,8 @@ def writes_version_sync():
|
|||
if "AvalonTab" not in each.knobs():
|
||||
continue
|
||||
|
||||
avalon_knob_data = avalon.nuke.get_avalon_knob_data(
|
||||
each, ['avalon:', 'ak:'])
|
||||
avalon_knob_data = avalon.nuke.read(
|
||||
each)
|
||||
|
||||
try:
|
||||
if avalon_knob_data['families'] not in ["render"]:
|
||||
|
|
@ -113,7 +161,7 @@ def writes_version_sync():
|
|||
|
||||
node_file = each['file'].value()
|
||||
|
||||
node_version = "v" + pype.get_version_from_path(node_file)
|
||||
node_version = "v" + get_version_from_path(node_file)
|
||||
log.debug("node_version: {}".format(node_version))
|
||||
|
||||
node_new_file = node_file.replace(node_version, new_version)
|
||||
|
|
@ -134,24 +182,40 @@ def version_up_script():
|
|||
nukescripts.script_and_write_nodes_version_up()
|
||||
|
||||
|
||||
def check_subsetname_exists(nodes, subset_name):
|
||||
"""
|
||||
Checking if node is not already created to secure there is no duplicity
|
||||
|
||||
Arguments:
|
||||
nodes (list): list of nuke.Node objects
|
||||
subset_name (str): name we try to find
|
||||
|
||||
Returns:
|
||||
bool: True of False
|
||||
"""
|
||||
result = next((True for n in nodes
|
||||
if subset_name in avalon.nuke.read(n).get("subset", "")), False)
|
||||
return result
|
||||
|
||||
|
||||
def get_render_path(node):
|
||||
''' Generate Render path from presets regarding avalon knob data
|
||||
'''
|
||||
data = dict()
|
||||
data['avalon'] = avalon.nuke.get_avalon_knob_data(
|
||||
node, ['avalon:', 'ak:'])
|
||||
data['avalon'] = avalon.nuke.read(
|
||||
node)
|
||||
|
||||
data_preset = {
|
||||
"class": data['avalon']['family'],
|
||||
"preset": data['avalon']['families']
|
||||
}
|
||||
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
|
||||
nuke_imageio_writes = get_node_imageio_setting(**data_preset)
|
||||
|
||||
application = lib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
data.update({
|
||||
"nuke_dataflow_writes": nuke_dataflow_writes,
|
||||
"nuke_colorspace_writes": nuke_colorspace_writes
|
||||
"application": application,
|
||||
"nuke_imageio_writes": nuke_imageio_writes
|
||||
})
|
||||
|
||||
anatomy_filled = format_anatomy(data)
|
||||
|
|
@ -169,7 +233,7 @@ def format_anatomy(data):
|
|||
'''
|
||||
# TODO: perhaps should be nonPublic
|
||||
|
||||
anatomy = get_anatomy()
|
||||
anatomy = Anatomy()
|
||||
log.debug("__ anatomy.templates: {}".format(anatomy.templates))
|
||||
|
||||
try:
|
||||
|
|
@ -192,18 +256,16 @@ def format_anatomy(data):
|
|||
version = data.get("version", None)
|
||||
if not version:
|
||||
file = script_name()
|
||||
data["version"] = pype.get_version_from_path(file)
|
||||
data["version"] = get_version_from_path(file)
|
||||
project_document = io.find_one({"type": "project"})
|
||||
data.update({
|
||||
"subset": data["avalon"]["subset"],
|
||||
"asset": data["avalon"]["asset"],
|
||||
"task": api.Session["AVALON_TASK"],
|
||||
"task": os.environ["AVALON_TASK"],
|
||||
"family": data["avalon"]["family"],
|
||||
"project": {"name": project_document["name"],
|
||||
"code": project_document["data"].get("code", '')},
|
||||
"representation": data["nuke_dataflow_writes"]["file_type"],
|
||||
"app": api.Session["AVALON_APP"],
|
||||
"hierarchy": pype.get_hierarchy(),
|
||||
"hierarchy": get_hierarchy(),
|
||||
"frame": "#" * padding,
|
||||
})
|
||||
return anatomy.format(data)
|
||||
|
|
@ -217,11 +279,11 @@ def script_name():
|
|||
|
||||
def add_button_write_to_read(node):
|
||||
name = "createReadNode"
|
||||
label = "[ Create Read ]"
|
||||
label = "Cread Read From Rendered"
|
||||
value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())"
|
||||
k = nuke.PyScript_Knob(name, label, value)
|
||||
k.setFlag(0x1000)
|
||||
node.addKnob(k)
|
||||
knob = nuke.PyScript_Knob(name, label, value)
|
||||
knob.clearFlag(nuke.STARTLINE)
|
||||
node.addKnob(knob)
|
||||
|
||||
|
||||
def create_write_node(name, data, input=None, prenodes=None, review=True):
|
||||
|
|
@ -254,18 +316,26 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
node (obj): group node with avalon data as Knobs
|
||||
'''
|
||||
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data)
|
||||
imageio_writes = get_node_imageio_setting(**data)
|
||||
app_manager = ApplicationManager()
|
||||
app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if app_name:
|
||||
app = app_manager.applications.get(app_name)
|
||||
|
||||
for knob in imageio_writes["knobs"]:
|
||||
if knob["name"] == "file_type":
|
||||
representation = knob["value"]
|
||||
|
||||
try:
|
||||
data.update({
|
||||
"nuke_dataflow_writes": nuke_dataflow_writes,
|
||||
"nuke_colorspace_writes": nuke_colorspace_writes
|
||||
"app": app.host_name,
|
||||
"imageio_writes": imageio_writes,
|
||||
"representation": representation,
|
||||
})
|
||||
anatomy_filled = format_anatomy(data)
|
||||
|
||||
except Exception as e:
|
||||
msg = "problem with resolving anatomy tepmlate: {}".format(e)
|
||||
msg = "problem with resolving anatomy template: {}".format(e)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
|
|
@ -274,7 +344,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
fpath = data["fpath_template"].format(
|
||||
work=fpath, version=data["version"], subset=data["subset"],
|
||||
frame=data["frame"],
|
||||
ext=data["nuke_dataflow_writes"]["file_type"]
|
||||
ext=representation
|
||||
)
|
||||
|
||||
# create directory
|
||||
|
|
@ -287,17 +357,12 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
})
|
||||
|
||||
# adding dataflow template
|
||||
log.debug("nuke_dataflow_writes: `{}`".format(nuke_dataflow_writes))
|
||||
{_data.update({k: v})
|
||||
for k, v in nuke_dataflow_writes.items()
|
||||
if k not in ["_id", "_previous"]}
|
||||
log.debug("imageio_writes: `{}`".format(imageio_writes))
|
||||
for knob in imageio_writes["knobs"]:
|
||||
if knob["name"] not in ["_id", "_previous"]:
|
||||
_data.update({knob["name"]: knob["value"]})
|
||||
|
||||
# adding colorspace template
|
||||
log.debug("nuke_colorspace_writes: `{}`".format(nuke_colorspace_writes))
|
||||
{_data.update({k: v})
|
||||
for k, v in nuke_colorspace_writes.items()}
|
||||
|
||||
_data = avalon.nuke.lib.fix_data_for_node_create(_data)
|
||||
_data = anlib.fix_data_for_node_create(_data)
|
||||
|
||||
log.debug("_data: `{}`".format(_data))
|
||||
|
||||
|
|
@ -366,7 +431,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
prev_node = now_node
|
||||
|
||||
# creating write node
|
||||
write_node = now_node = avalon.nuke.lib.add_write_node(
|
||||
write_node = now_node = anlib.add_write_node(
|
||||
"inside_{}".format(name),
|
||||
**_data
|
||||
)
|
||||
|
|
@ -383,30 +448,40 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
now_node.setInput(0, prev_node)
|
||||
|
||||
# imprinting group node
|
||||
avalon.nuke.imprint(GN, data["avalon"])
|
||||
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
|
||||
anlib.set_avalon_knob_data(GN, data["avalon"])
|
||||
anlib.add_publish_knob(GN)
|
||||
add_rendering_knobs(GN)
|
||||
|
||||
if review:
|
||||
add_review_knob(GN)
|
||||
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
GN.addKnob(nuke.Text_Knob('', 'Rendering'))
|
||||
|
||||
# Add linked knobs.
|
||||
linked_knob_names = ["Render", "use_limit", "first", "last"]
|
||||
linked_knob_names = [
|
||||
"_grp-start_",
|
||||
"use_limit", "first", "last",
|
||||
"_grp-end_",
|
||||
"Render"
|
||||
]
|
||||
for name in linked_knob_names:
|
||||
link = nuke.Link_Knob(name)
|
||||
link.makeLink(write_node.name(), name)
|
||||
link.setName(name)
|
||||
link.setFlag(0x1000)
|
||||
GN.addKnob(link)
|
||||
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
if "_grp-start_" in name:
|
||||
knob = nuke.Tab_Knob(
|
||||
"rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP)
|
||||
GN.addKnob(knob)
|
||||
elif "_grp-end_" in name:
|
||||
knob = nuke.Tab_Knob(
|
||||
"rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP)
|
||||
GN.addKnob(knob)
|
||||
else:
|
||||
link = nuke.Link_Knob("")
|
||||
link.makeLink(write_node.name(), name)
|
||||
link.setName(name)
|
||||
if "Render" in name:
|
||||
link.setLabel("Render Local")
|
||||
link.setFlag(0x1000)
|
||||
GN.addKnob(link)
|
||||
|
||||
# adding write to read button
|
||||
add_button_write_to_read(GN)
|
||||
|
|
@ -431,9 +506,9 @@ def add_rendering_knobs(node):
|
|||
node (obj): with added knobs
|
||||
'''
|
||||
if "render" not in node.knobs():
|
||||
knob = nuke.Enumeration_Knob("render", "Render", [
|
||||
knob = nuke.Enumeration_Knob("render", "", [
|
||||
"Use existing frames", "Local", "On farm"])
|
||||
knob.setFlag(0x1000)
|
||||
knob.clearFlag(nuke.STARTLINE)
|
||||
node.addKnob(knob)
|
||||
return node
|
||||
|
||||
|
|
@ -538,7 +613,7 @@ class WorkfileSettings(object):
|
|||
self._project = kwargs.get(
|
||||
"project") or io.find_one({"type": "project"})
|
||||
self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"]
|
||||
self._asset_entity = pype.get_asset(self._asset)
|
||||
self._asset_entity = get_asset(self._asset)
|
||||
self._root_node = root_node or nuke.root()
|
||||
self._nodes = self.get_nodes(nodes=nodes)
|
||||
|
||||
|
|
@ -674,7 +749,7 @@ class WorkfileSettings(object):
|
|||
log.error(msg)
|
||||
return
|
||||
|
||||
from avalon.nuke import get_avalon_knob_data
|
||||
from avalon.nuke import read
|
||||
|
||||
for node in nuke.allNodes():
|
||||
|
||||
|
|
@ -682,7 +757,7 @@ class WorkfileSettings(object):
|
|||
continue
|
||||
|
||||
# get data from avalon knob
|
||||
avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"])
|
||||
avalon_knob_data = read(node)
|
||||
|
||||
if not avalon_knob_data:
|
||||
continue
|
||||
|
|
@ -730,7 +805,7 @@ class WorkfileSettings(object):
|
|||
continue
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = get_colorspace_preset().get(
|
||||
read_clrs_presets = config.get_init_presets()["colorspace"].get(
|
||||
"nuke", {}).get("read", {})
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
|
|
@ -771,7 +846,8 @@ class WorkfileSettings(object):
|
|||
def set_colorspace(self):
|
||||
''' Setting colorpace following presets
|
||||
'''
|
||||
nuke_colorspace = get_colorspace_preset().get("nuke", None)
|
||||
nuke_colorspace = config.get_init_presets(
|
||||
)["colorspace"].get("nuke", None)
|
||||
|
||||
try:
|
||||
self.set_root_colorspace(nuke_colorspace["root"])
|
||||
|
|
@ -836,7 +912,7 @@ class WorkfileSettings(object):
|
|||
handle_start = data["handleStart"]
|
||||
handle_end = data["handleEnd"]
|
||||
|
||||
fps = data["fps"]
|
||||
fps = float(data["fps"])
|
||||
frame_start = int(data["frameStart"]) - handle_start
|
||||
frame_end = int(data["frameEnd"]) + handle_end
|
||||
|
||||
|
|
@ -863,7 +939,7 @@ class WorkfileSettings(object):
|
|||
node['frame_range_lock'].setValue(True)
|
||||
|
||||
# adding handle_start/end to root avalon knob
|
||||
if not avalon.nuke.imprint(self._root_node, {
|
||||
if not anlib.set_avalon_knob_data(self._root_node, {
|
||||
"handleStart": int(handle_start),
|
||||
"handleEnd": int(handle_end)
|
||||
}):
|
||||
|
|
@ -971,7 +1047,7 @@ class WorkfileSettings(object):
|
|||
# replace reset resolution from avalon core to pype's
|
||||
self.reset_frame_range_handles()
|
||||
# add colorspace menu item
|
||||
self.set_colorspace()
|
||||
# self.set_colorspace()
|
||||
|
||||
def set_favorites(self):
|
||||
work_dir = os.getenv("AVALON_WORKDIR")
|
||||
|
|
@ -1031,8 +1107,8 @@ def get_write_node_template_attr(node):
|
|||
'''
|
||||
# get avalon data from node
|
||||
data = dict()
|
||||
data['avalon'] = avalon.nuke.get_avalon_knob_data(
|
||||
node, ['avalon:', 'ak:'])
|
||||
data['avalon'] = avalon.nuke.read(
|
||||
node)
|
||||
data_preset = {
|
||||
"class": data['avalon']['family'],
|
||||
"families": data['avalon']['families'],
|
||||
|
|
@ -1040,25 +1116,20 @@ def get_write_node_template_attr(node):
|
|||
}
|
||||
|
||||
# get template data
|
||||
nuke_dataflow_writes = get_node_dataflow_preset(**data_preset)
|
||||
nuke_colorspace_writes = get_node_colorspace_preset(**data_preset)
|
||||
nuke_imageio_writes = get_node_imageio_setting(**data_preset)
|
||||
|
||||
# collecting correct data
|
||||
correct_data = OrderedDict({
|
||||
"file": get_render_path(node)
|
||||
})
|
||||
|
||||
# adding dataflow template
|
||||
# adding imageio template
|
||||
{correct_data.update({k: v})
|
||||
for k, v in nuke_dataflow_writes.items()
|
||||
for k, v in nuke_imageio_writes.items()
|
||||
if k not in ["_id", "_previous"]}
|
||||
|
||||
# adding colorspace template
|
||||
{correct_data.update({k: v})
|
||||
for k, v in nuke_colorspace_writes.items()}
|
||||
|
||||
# fix badly encoded data
|
||||
return avalon.nuke.lib.fix_data_for_node_create(correct_data)
|
||||
return anlib.fix_data_for_node_create(correct_data)
|
||||
|
||||
|
||||
class ExporterReview:
|
||||
|
|
@ -1177,6 +1248,7 @@ class ExporterReviewLut(ExporterReview):
|
|||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance,
|
||||
|
|
@ -1279,6 +1351,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
instance (pyblish.instance): instance of pyblish context
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance,
|
||||
|
|
@ -1,11 +1,8 @@
|
|||
import os
|
||||
import nuke
|
||||
from avalon.api import Session
|
||||
|
||||
from pype.hosts.nuke import lib
|
||||
from ...lib import BuildWorkfile
|
||||
from pype.api import Logger
|
||||
from pype.tools import workfiles
|
||||
from .lib import WorkfileSettings
|
||||
from pype.api import Logger, BuildWorkfile
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
|
@ -13,25 +10,6 @@ log = Logger().get_logger(__name__)
|
|||
def install():
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.findItem(Session["AVALON_LABEL"])
|
||||
workfile_settings = lib.WorkfileSettings
|
||||
|
||||
# replace reset resolution from avalon core to pype's
|
||||
name = "Work Files..."
|
||||
rm_item = [
|
||||
(i, item) for i, item in enumerate(menu.items()) if name in item.name()
|
||||
][0]
|
||||
|
||||
log.debug("Changing Item: {}".format(rm_item))
|
||||
|
||||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(
|
||||
name,
|
||||
lambda: workfiles.show(
|
||||
os.environ["AVALON_WORKDIR"]
|
||||
),
|
||||
index=(rm_item[0])
|
||||
)
|
||||
|
||||
# replace reset resolution from avalon core to pype's
|
||||
name = "Reset Resolution"
|
||||
new_name = "Set Resolution"
|
||||
|
|
@ -44,7 +22,7 @@ def install():
|
|||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(
|
||||
new_name,
|
||||
lambda: workfile_settings().reset_resolution(),
|
||||
lambda: WorkfileSettings().reset_resolution(),
|
||||
index=(rm_item[0])
|
||||
)
|
||||
|
||||
|
|
@ -59,14 +37,14 @@ def install():
|
|||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(
|
||||
new_name,
|
||||
lambda: workfile_settings().reset_frame_range_handles(),
|
||||
lambda: WorkfileSettings().reset_frame_range_handles(),
|
||||
index=(rm_item[0])
|
||||
)
|
||||
|
||||
# add colorspace menu item
|
||||
name = "Set Colorspace"
|
||||
menu.addCommand(
|
||||
name, lambda: workfile_settings().set_colorspace(),
|
||||
name, lambda: WorkfileSettings().set_colorspace(),
|
||||
index=(rm_item[0] + 2)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
|
@ -83,7 +61,7 @@ def install():
|
|||
name = "Apply All Settings"
|
||||
menu.addCommand(
|
||||
name,
|
||||
lambda: workfile_settings().set_context_settings(),
|
||||
lambda: WorkfileSettings().set_context_settings(),
|
||||
index=(rm_item[0] + 3)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
23
pype/hosts/nuke/api/plugin.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import avalon.api
|
||||
import avalon.nuke
|
||||
from pype.api import get_current_project_settings
|
||||
from .lib import check_subsetname_exists
|
||||
import nuke
|
||||
|
||||
|
||||
class PypeCreator(avalon.nuke.pipeline.Creator):
|
||||
"""Pype Nuke Creator class wrapper
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PypeCreator, self).__init__(*args, **kwargs)
|
||||
self.presets = get_current_project_settings()["nuke"]["create"].get(
|
||||
self.__class__.__name__, {}
|
||||
)
|
||||
if check_subsetname_exists(
|
||||
nuke.allNodes(),
|
||||
self.data["subset"]):
|
||||
msg = ("The subset name `{0}` is already used on a node in"
|
||||
"this workfile.".format(self.data["subset"]))
|
||||
self.log.error(msg + '\n\nPlease use other subset name!')
|
||||
raise NameError("`{0}: {1}".format(__name__, msg))
|
||||
return
|
||||
|
|
@ -4,12 +4,13 @@ from avalon.nuke import lib as anlib
|
|||
from pype.api import resources
|
||||
|
||||
|
||||
def set_context_favorites(favorites={}):
|
||||
def set_context_favorites(favorites=None):
|
||||
""" Addig favorite folders to nuke's browser
|
||||
|
||||
Argumets:
|
||||
favorites (dict): couples of {name:path}
|
||||
"""
|
||||
favorites = favorites or {}
|
||||
icon_path = resources.get_resource("icons", "folder-favorite3.png")
|
||||
for name, path in favorites.items():
|
||||
nuke.addFavoriteDir(
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
import re
|
||||
import avalon.api
|
||||
import avalon.nuke
|
||||
from pype.api import get_current_project_settings
|
||||
|
||||
class PypeCreator(avalon.nuke.pipeline.Creator):
|
||||
"""Pype Nuke Creator class wrapper
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PypeCreator, self).__init__(*args, **kwargs)
|
||||
self.presets = get_current_project_settings()["nuke"]["create"].get(
|
||||
self.__class__.__name__, {}
|
||||
)
|
||||
|
|
@ -32,7 +32,7 @@ class CreateBackdrop(avalon.nuke.Creator):
|
|||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
instance = anlib.set_avalon_knob_data(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
else:
|
||||
|
|
@ -48,6 +48,6 @@ class CreateBackdrop(avalon.nuke.Creator):
|
|||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
instance = anlib.set_avalon_knob_data(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
|
|
@ -36,7 +36,7 @@ class CreateCamera(avalon.nuke.Creator):
|
|||
# change node color
|
||||
n["tile_color"].setValue(int(self.node_color, 16))
|
||||
# add avalon knobs
|
||||
anlib.imprint(n, data)
|
||||
anlib.set_avalon_knob_data(n, data)
|
||||
return True
|
||||
else:
|
||||
msg = str("Please select nodes you "
|
||||
|
|
@ -49,5 +49,5 @@ class CreateCamera(avalon.nuke.Creator):
|
|||
camera_node = nuke.createNode("Camera2")
|
||||
camera_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(camera_node, self.data)
|
||||
instance = anlib.set_avalon_knob_data(camera_node, self.data)
|
||||
return instance
|
||||
|
|
@ -34,7 +34,7 @@ class CreateGizmo(avalon.nuke.Creator):
|
|||
if node.Class() in "Group":
|
||||
node["name"].setValue("{}_GZM".format(self.name))
|
||||
node["tile_color"].setValue(int(self.node_color, 16))
|
||||
return anlib.imprint(node, self.data)
|
||||
return anlib.set_avalon_knob_data(node, self.data)
|
||||
else:
|
||||
msg = ("Please select a group node "
|
||||
"you wish to publish as the gizmo")
|
||||
|
|
@ -57,7 +57,7 @@ class CreateGizmo(avalon.nuke.Creator):
|
|||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
return anlib.set_avalon_knob_data(gizmo_node, self.data)
|
||||
|
||||
else:
|
||||
msg = ("Please select nodes you "
|
||||
|
|
@ -80,4 +80,4 @@ class CreateGizmo(avalon.nuke.Creator):
|
|||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
return anlib.set_avalon_knob_data(gizmo_node, self.data)
|
||||
|
|
@ -44,7 +44,8 @@ class CrateRead(avalon.nuke.Creator):
|
|||
continue
|
||||
avalon_data = self.data
|
||||
avalon_data['subset'] = "{}".format(self.name)
|
||||
self.change_read_node(self.data["subset"], node, avalon_data)
|
||||
avalon.nuke.lib.set_avalon_knob_data(node, avalon_data)
|
||||
node['tile_color'].setValue(16744935)
|
||||
count_reads += 1
|
||||
|
||||
if count_reads < 1:
|
||||
|
|
@ -52,7 +53,3 @@ class CrateRead(avalon.nuke.Creator):
|
|||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
def change_read_node(self, name, node, data):
|
||||
node = avalon.nuke.lib.imprint(node, data)
|
||||
node['tile_color'].setValue(16744935)
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
from pype.hosts.nuke import (
|
||||
from pype.hosts.nuke.api import (
|
||||
plugin,
|
||||
lib as pnlib)
|
||||
lib)
|
||||
import nuke
|
||||
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ class CreateWritePrerender(plugin.PypeCreator):
|
|||
name = "WritePrerender"
|
||||
label = "Create Write Prerender"
|
||||
hosts = ["nuke"]
|
||||
n_class = "write"
|
||||
n_class = "Write"
|
||||
family = "prerender"
|
||||
icon = "sign-out"
|
||||
defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"]
|
||||
|
|
@ -75,9 +75,10 @@ class CreateWritePrerender(plugin.PypeCreator):
|
|||
|
||||
# recreate new
|
||||
write_data = {
|
||||
"class": self.n_class,
|
||||
"nodeclass": self.n_class,
|
||||
"families": [self.family],
|
||||
"avalon": self.data
|
||||
"avalon": self.data,
|
||||
"creator": self.__class__.__name__
|
||||
}
|
||||
|
||||
if self.presets.get('fpath_template'):
|
||||
|
|
@ -91,7 +92,9 @@ class CreateWritePrerender(plugin.PypeCreator):
|
|||
"fpath_template": ("{work}/prerenders/nuke/{subset}"
|
||||
"/{subset}.{frame}.{ext}")})
|
||||
|
||||
write_node = pnlib.create_write_node(
|
||||
self.log.info("write_data: {}".format(write_data))
|
||||
|
||||
write_node = lib.create_write_node(
|
||||
self.data["subset"],
|
||||
write_data,
|
||||
input=selected_node,
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
from pype.hosts.nuke import (
|
||||
from pype.hosts.nuke.api import (
|
||||
plugin,
|
||||
lib as pnlib)
|
||||
lib)
|
||||
import nuke
|
||||
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
name = "WriteRender"
|
||||
label = "Create Write Render"
|
||||
hosts = ["nuke"]
|
||||
n_class = "write"
|
||||
n_class = "Write"
|
||||
family = "render"
|
||||
icon = "sign-out"
|
||||
defaults = ["Main", "Mask"]
|
||||
|
|
@ -48,6 +48,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
"or tick off `Use selection`")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
if len(nodes) == 0:
|
||||
msg = (
|
||||
|
|
@ -56,6 +57,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
)
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
selected_node = nodes[0]
|
||||
inputs = [selected_node]
|
||||
|
|
@ -76,9 +78,10 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
|
||||
# recreate new
|
||||
write_data = {
|
||||
"class": self.n_class,
|
||||
"nodeclass": self.n_class,
|
||||
"families": [self.family],
|
||||
"avalon": self.data
|
||||
"avalon": self.data,
|
||||
"creator": self.__class__.__name__
|
||||
}
|
||||
|
||||
if self.presets.get('fpath_template'):
|
||||
|
|
@ -92,7 +95,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
"fpath_template": ("{work}/renders/nuke/{subset}"
|
||||
"/{subset}.{frame}.{ext}")})
|
||||
|
||||
write_node = pnlib.create_write_node(
|
||||
write_node = lib.create_write_node(
|
||||
self.data["subset"],
|
||||
write_data,
|
||||
input=selected_node)
|
||||
|
|
@ -25,7 +25,7 @@ class SetFrameRangeLoader(api.Loader):
|
|||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from pype.hosts.nuke import lib
|
||||
from pype.hosts.nuke.api import lib
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
|
@ -59,7 +59,7 @@ class SetFrameRangeWithHandlesLoader(api.Loader):
|
|||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from pype.hosts.nuke import lib
|
||||
from pype.hosts.nuke.api import lib
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from avalon import api, style, io
|
||||
import nuke
|
||||
import nukescripts
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
from avalon.nuke import lib as anlib
|
||||
from avalon.nuke import containerise, update_container
|
||||
reload(pnlib)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from avalon import api, style, io
|
||||
import nuke
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
from avalon.nuke import lib as anlib
|
||||
from avalon.nuke import containerise, update_container
|
||||
|
||||
|
|
@ -4,7 +4,9 @@ import nuke
|
|||
from avalon.vendor import qargparse
|
||||
from avalon import api, io
|
||||
|
||||
from pype.hosts.nuke import presets
|
||||
from pype.hosts.nuke.api.lib import (
|
||||
get_imageio_input_colorspace
|
||||
)
|
||||
|
||||
|
||||
class LoadImage(api.Loader):
|
||||
|
|
@ -90,17 +92,10 @@ class LoadImage(api.Loader):
|
|||
if colorspace:
|
||||
r["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
preset_clrsp = get_imageio_input_colorspace(file)
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
r["colorspace"].setValue(str(preset_clrsp))
|
||||
r["colorspace"].setValue(preset_clrsp)
|
||||
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
|
|
@ -2,7 +2,7 @@ from avalon import api, style, io
|
|||
import nuke
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
from pype.hosts.nuke import lib
|
||||
from pype.hosts.nuke.api import lib
|
||||
|
||||
|
||||
class LoadLutsInputProcess(api.Loader):
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
import re
|
||||
import nuke
|
||||
import contextlib
|
||||
|
||||
from avalon import api, io
|
||||
from pype.hosts.nuke import presets
|
||||
from pype.api import get_project_settings
|
||||
from pype.api import get_current_project_settings
|
||||
from pype.hosts.nuke.api.lib import (
|
||||
get_imageio_input_colorspace
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
|
@ -73,12 +74,18 @@ def add_review_presets_config():
|
|||
"families": list(),
|
||||
"representations": list()
|
||||
}
|
||||
settings = get_project_settings(io.Session["AVALON_PROJECT"])
|
||||
review_presets = settings["global"]["publish"].get(
|
||||
"ExtractReview", {})
|
||||
settings = get_current_project_settings()
|
||||
review_profiles = (
|
||||
settings["global"]
|
||||
["publish"]
|
||||
["ExtractReview"]
|
||||
["profiles"]
|
||||
)
|
||||
|
||||
outputs = {}
|
||||
for profile in review_profiles:
|
||||
outputs.update(profile.get("outputs", {}))
|
||||
|
||||
outputs = review_presets.get("outputs", {})
|
||||
#
|
||||
for output, properities in outputs.items():
|
||||
returning["representations"].append(output)
|
||||
returning["families"] += properities.get("families", [])
|
||||
|
|
@ -175,17 +182,10 @@ class LoadMov(api.Loader):
|
|||
if colorspace:
|
||||
read_node["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
preset_clrsp = get_imageio_input_colorspace(file)
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
read_node["colorspace"].setValue(str(preset_clrsp))
|
||||
read_node["colorspace"].setValue(preset_clrsp)
|
||||
|
||||
# add additional metadata from the version to imprint Avalon knob
|
||||
add_keys = [
|
||||
|
|
@ -276,10 +276,11 @@ class LoadMov(api.Loader):
|
|||
colorspace = version_data.get("colorspace")
|
||||
|
||||
if first is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(
|
||||
node['name'].value(), representation))
|
||||
self.log.warning((
|
||||
"Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})").format(
|
||||
node['name'].value(), representation))
|
||||
first = 0
|
||||
|
||||
# fix handle start and end if none are available
|
||||
|
|
@ -309,17 +310,10 @@ class LoadMov(api.Loader):
|
|||
if colorspace:
|
||||
node["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
preset_clrsp = get_imageio_input_colorspace(file)
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
node["colorspace"].setValue(str(preset_clrsp))
|
||||
node["colorspace"].setValue(preset_clrsp)
|
||||
|
||||
updated_dict = {}
|
||||
updated_dict.update({
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
import re
|
||||
import nuke
|
||||
import contextlib
|
||||
|
||||
from avalon import api, io
|
||||
from pype.hosts.nuke import presets
|
||||
from pype.hosts.nuke.api.lib import (
|
||||
get_imageio_input_colorspace
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
|
@ -142,17 +143,10 @@ class LoadSequence(api.Loader):
|
|||
if colorspace:
|
||||
r["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
preset_clrsp = get_imageio_input_colorspace(file)
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
r["colorspace"].setValue(str(preset_clrsp))
|
||||
r["colorspace"].setValue(preset_clrsp)
|
||||
|
||||
loader_shift(r, first, relative=True)
|
||||
r["origfirst"].setValue(int(first))
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
import nuke
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectBackdrops(pyblish.api.InstancePlugin):
|
||||
"""Collect Backdrop node instance and its content
|
||||
|
|
@ -42,12 +42,12 @@ class CollectBackdrops(pyblish.api.InstancePlugin):
|
|||
# get all connections from outside of backdrop
|
||||
nodes = instance[1:]
|
||||
connections_in, connections_out = pnlib.get_dependent_nodes(nodes)
|
||||
instance.data["connections_in"] = connections_in
|
||||
instance.data["connections_out"] = connections_out
|
||||
instance.data["nodeConnectionsIn"] = connections_in
|
||||
instance.data["nodeConnectionsOut"] = connections_out
|
||||
|
||||
# make label nicer
|
||||
instance.data["label"] = "{0} ({1} nodes)".format(
|
||||
bckn.name(), len(instance)-1)
|
||||
bckn.name(), len(instance) - 1)
|
||||
|
||||
instance.data["families"].append(instance.data["family"])
|
||||
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
import nuke
|
||||
|
||||
|
||||
|
|
@ -29,10 +28,6 @@ class CollectGizmo(pyblish.api.InstancePlugin):
|
|||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# get version
|
||||
version = pype.get_version_from_path(nuke.root().name())
|
||||
instance.data['version'] = int(version)
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
|
|
@ -41,7 +36,6 @@ class CollectGizmo(pyblish.api.InstancePlugin):
|
|||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": nuke.root().knob('workingSpaceLUT').value(),
|
||||
"version": int(version),
|
||||
"families": [instance.data["family"]] + instance.data["families"],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
|
|
@ -10,7 +10,7 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
|
|||
"""Collect all read nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
label = "Collect Reads"
|
||||
label = "Collect Source Reads"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["source"]
|
||||
|
||||
|
|
@ -109,7 +109,7 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
|
|||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": colorspace,
|
||||
"colorspace": colorspace,
|
||||
"families": [instance.data["family"]],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
import nuke
|
||||
import os
|
||||
import pype
|
||||
|
|
@ -30,8 +30,8 @@ class ExtractBackdropNode(pype.api.Extractor):
|
|||
# maintain selection
|
||||
with anlib.maintained_selection():
|
||||
# all connections outside of backdrop
|
||||
connections_in = instance.data["connections_in"]
|
||||
connections_out = instance.data["connections_out"]
|
||||
connections_in = instance.data["nodeConnectionsIn"]
|
||||
connections_out = instance.data["nodeConnectionsOut"]
|
||||
self.log.debug("_ connections_in: `{}`".format(connections_in))
|
||||
self.log.debug("_ connections_out: `{}`".format(connections_out))
|
||||
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke import utils as pnutils
|
||||
from pype.hosts.nuke.api import utils as pnutils
|
||||
import nuke
|
||||
import os
|
||||
import pype
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
import pype
|
||||
reload(pnlib)
|
||||
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.hosts.nuke import lib as pnlib
|
||||
from pype.hosts.nuke.api import lib as pnlib
|
||||
import pype
|
||||
|
||||
|
||||
|
|
@ -1,19 +1,20 @@
|
|||
import os
|
||||
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from avalon import io, api
|
||||
from avalon.nuke import get_avalon_knob_data
|
||||
from avalon.nuke import lib as anlib
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectNukeInstances(pyblish.api.ContextPlugin):
|
||||
class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all nodes with Avalon knob."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder - 0.59
|
||||
label = "Pre-collect Instances"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
# presets
|
||||
sync_workfile_version = False
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({
|
||||
"type": "asset",
|
||||
|
|
@ -38,8 +39,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
self.log.warning(E)
|
||||
|
||||
# get data from avalon knob
|
||||
self.log.debug("node[name]: {}".format(node['name'].value()))
|
||||
avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"])
|
||||
avalon_knob_data = anlib.get_avalon_knob_data(
|
||||
node, ["avalon:", "ak:"])
|
||||
|
||||
self.log.debug("avalon_knob_data: {}".format(avalon_knob_data))
|
||||
|
||||
|
|
@ -55,7 +56,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
families = list()
|
||||
|
||||
if families_ak:
|
||||
families.append(families_ak)
|
||||
families.append(families_ak.lower())
|
||||
|
||||
families.append(family)
|
||||
|
||||
|
|
@ -70,10 +71,17 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
instance = context.create_instance(subset)
|
||||
instance.append(node)
|
||||
|
||||
# get review knob value
|
||||
review = False
|
||||
if "review" in node.knobs():
|
||||
review = node["review"].value()
|
||||
families.append("review")
|
||||
families.append("ftrack")
|
||||
|
||||
# Add all nodes in group instances.
|
||||
if node.Class() == "Group":
|
||||
# only alter families for render family
|
||||
if "write" in families_ak:
|
||||
# check if it is write node in family
|
||||
if "write" in families:
|
||||
target = node["render"].value()
|
||||
if target == "Use existing frames":
|
||||
# Local rendering
|
||||
|
|
@ -105,6 +113,17 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
resolution_height = format.height()
|
||||
pixel_aspect = format.pixelAspect()
|
||||
|
||||
# get publish knob value
|
||||
if "publish" not in node.knobs():
|
||||
anlib.add_publish_knob(node)
|
||||
|
||||
# sync workfile version
|
||||
if not next((f for f in families
|
||||
if "prerender" in f),
|
||||
None) and self.sync_workfile_version:
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": avalon_knob_data["asset"],
|
||||
|
|
@ -114,17 +133,21 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
"family": family,
|
||||
"families": families,
|
||||
"avalonKnob": avalon_knob_data,
|
||||
"publish": node.knob('publish').value(),
|
||||
"step": 1,
|
||||
"publish": node.knob('publish').value(),
|
||||
"fps": nuke.root()['fps'].value(),
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"pixelAspect": pixel_aspect,
|
||||
"review": review
|
||||
|
||||
})
|
||||
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
||||
context.data["instances"] = instances
|
||||
# create instances in context data if not are created yet
|
||||
if not context.data.get("instances"):
|
||||
context.data["instances"] = list()
|
||||
|
||||
context.data["instances"].extend(instances)
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
|
@ -1,33 +1,32 @@
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import os
|
||||
|
||||
from avalon.nuke import (
|
||||
get_avalon_knob_data,
|
||||
add_publish_knob
|
||||
)
|
||||
import pype.api as pype
|
||||
from avalon.nuke import lib as anlib
|
||||
reload(anlib)
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Workfile"
|
||||
order = pyblish.api.CollectorOrder - 0.60
|
||||
label = "Pre-collect Workfile"
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, context):
|
||||
root = nuke.root()
|
||||
|
||||
knob_data = get_avalon_knob_data(root)
|
||||
current_file = os.path.normpath(nuke.root().name())
|
||||
|
||||
add_publish_knob(root)
|
||||
knob_data = anlib.get_avalon_knob_data(root)
|
||||
|
||||
anlib.add_publish_knob(root)
|
||||
|
||||
family = "workfile"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
# creating instances per write node
|
||||
file_path = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
base_name = os.path.basename(current_file)
|
||||
subset = family + task.capitalize()
|
||||
|
||||
# Get frame range
|
||||
|
|
@ -62,6 +61,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"handleEnd": handle_end,
|
||||
"step": 1,
|
||||
"fps": root['fps'].value(),
|
||||
|
||||
"currentFile": current_file,
|
||||
"version": int(pype.get_version_from_path(current_file)),
|
||||
|
||||
"host": pyblish.api.current_host(),
|
||||
"hostVersion": nuke.NUKE_VERSION_STRING
|
||||
}
|
||||
context.data.update(script_data)
|
||||
|
||||
|
|
@ -90,4 +95,9 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info('Publishing script version')
|
||||
|
||||
# create instances in context data if not are created yet
|
||||
if not context.data.get("instances"):
|
||||
context.data["instances"] = list()
|
||||
|
||||
context.data["instances"].append(instance)
|
||||
|
|
@ -1,14 +1,16 @@
|
|||
import os
|
||||
import nuke
|
||||
import pyblish.api
|
||||
import pype.api as pype
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectNukeWrites(pyblish.api.InstancePlugin):
|
||||
"""Collect all write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Writes"
|
||||
order = pyblish.api.CollectorOrder - 0.58
|
||||
label = "Pre-collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["write"]
|
||||
|
||||
|
|
@ -41,9 +43,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
frame_length = int(
|
||||
last_frame - first_frame + 1
|
||||
)
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
|
||||
if node["use_limit"].getValue():
|
||||
first_frame = int(node["first"].getValue())
|
||||
|
|
@ -54,14 +54,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
output_dir = os.path.dirname(path)
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
if not next((f for f in families
|
||||
if "prerender" in f),
|
||||
None) and self.sync_workfile_version:
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data["version"]
|
||||
|
||||
self.log.debug('Write Version: %s' % instance.data('version'))
|
||||
|
||||
# create label
|
||||
name = node.name()
|
||||
# Include start and end render frame in label
|
||||
|
|
@ -79,7 +71,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"stagingDir": output_dir
|
||||
"stagingDir": output_dir,
|
||||
"tags": list()
|
||||
}
|
||||
|
||||
try:
|
||||
|
|
@ -152,6 +145,25 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"families": []
|
||||
})
|
||||
|
||||
# * Add audio to instance if exists.
|
||||
# Find latest versions document
|
||||
version_doc = pype.get_latest_version(
|
||||
instance.data["asset"], "audioMain"
|
||||
)
|
||||
repre_doc = None
|
||||
if version_doc:
|
||||
# Try to find it's representation (Expected there is only one)
|
||||
repre_doc = io.find_one(
|
||||
{"type": "representation", "parent": version_doc["_id"]}
|
||||
)
|
||||
|
||||
# Add audio to instance if representation was found
|
||||
if repre_doc:
|
||||
instance.data["audio"] = [{
|
||||
"offset": 0,
|
||||
"filename": api.get_representation_path(repre_doc)
|
||||
}]
|
||||
|
||||
self.log.debug("families: {}".format(families))
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
@ -32,8 +32,8 @@ class SelectCenterInNodeGraph(pyblish.api.Action):
|
|||
# collect all failed nodes xpos and ypos
|
||||
for instance in instances:
|
||||
bdn = instance[0]
|
||||
xC = bdn.xpos() + bdn.screenWidth()/2
|
||||
yC = bdn.ypos() + bdn.screenHeight()/2
|
||||
xC = bdn.xpos() + bdn.screenWidth() / 2
|
||||
yC = bdn.ypos() + bdn.screenHeight() / 2
|
||||
|
||||
all_xC.append(xC)
|
||||
all_yC.append(yC)
|
||||
|
|
@ -58,10 +58,11 @@ class ValidateBackdrop(pyblish.api.InstancePlugin):
|
|||
actions = [SelectCenterInNodeGraph]
|
||||
|
||||
def process(self, instance):
|
||||
connections_out = instance.data["connections_out"]
|
||||
connections_out = instance.data["nodeConnectionsOut"]
|
||||
|
||||
msg_multiple_outputs = "Only one outcoming connection from \"{}\" is allowed".format(
|
||||
instance.data["name"])
|
||||
msg_multiple_outputs = (
|
||||
"Only one outcoming connection from "
|
||||
"\"{}\" is allowed").format(instance.data["name"])
|
||||
assert len(connections_out.keys()) <= 1, msg_multiple_outputs
|
||||
|
||||
msg_no_content = "No content on backdrop node: \"{}\"".format(
|
||||
|
|
@ -54,15 +54,15 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
|
|||
# Filter families.
|
||||
families = [instance.data["family"]]
|
||||
families += instance.data.get("families", [])
|
||||
families = list(set(families) & set(self.knobs.keys()))
|
||||
families = list(set(families) & set(cls.knobs.keys()))
|
||||
if not families:
|
||||
continue
|
||||
|
||||
# Get all knobs to validate.
|
||||
knobs = {}
|
||||
for family in families:
|
||||
for preset in self.knobs[family]:
|
||||
knobs.update({preset: self.knobs[family][preset]})
|
||||
for preset in cls.knobs[family]:
|
||||
knobs.update({preset: cls.knobs[family][preset]})
|
||||
|
||||
# Get invalid knobs.
|
||||
nodes = []
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
from pype.api import Anatomy, config, Logger
|
||||
import nuke
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def get_anatomy(**kwarg):
|
||||
return Anatomy()
|
||||
|
||||
|
||||
def get_dataflow_preset():
|
||||
presets = config.get_init_presets()
|
||||
return presets["dataflow"]
|
||||
|
||||
|
||||
def get_colorspace_preset():
|
||||
presets = config.get_init_presets()
|
||||
return presets["colorspace"]
|
||||
|
||||
|
||||
def get_node_dataflow_preset(**kwarg):
|
||||
''' Get preset data for dataflow (fileType, compression, bitDepth)
|
||||
'''
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
families = kwarg.get("families", [])
|
||||
preset = kwarg.get("preset", None) # omit < 2.0.0v
|
||||
|
||||
assert any([host, cls]), nuke.message(
|
||||
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
|
||||
|
||||
nuke_dataflow = get_dataflow_preset().get(str(host), None)
|
||||
nuke_dataflow_nodes = nuke_dataflow.get('nodes', None)
|
||||
nuke_dataflow_node = nuke_dataflow_nodes.get(str(cls), None)
|
||||
|
||||
if preset: # omit < 2.0.0v
|
||||
nuke_dataflow_node = nuke_dataflow_node.get(str(preset), None)
|
||||
# omit < 2.0.0v
|
||||
|
||||
if families:
|
||||
for family in families:
|
||||
nuke_dataflow_node = nuke_dataflow_node.get(str(family), None)
|
||||
|
||||
log.info("Dataflow: {}".format(nuke_dataflow_node))
|
||||
return nuke_dataflow_node
|
||||
|
||||
|
||||
def get_node_colorspace_preset(**kwarg):
|
||||
''' Get preset data for colorspace
|
||||
'''
|
||||
log.info(kwarg)
|
||||
host = kwarg.get("host", "nuke")
|
||||
cls = kwarg.get("class", None)
|
||||
families = kwarg.get("families", [])
|
||||
preset = kwarg.get("preset", None) # omit < 2.0.0v
|
||||
|
||||
if not any([host, cls]):
|
||||
msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
nuke_colorspace = get_colorspace_preset().get(str(host), None)
|
||||
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)
|
||||
|
||||
if preset: # omit < 2.0.0v
|
||||
nuke_colorspace_node = nuke_colorspace_node.get(str(preset), None)
|
||||
# omit < 2.0.0v
|
||||
|
||||
if families:
|
||||
for family in families:
|
||||
nuke_colorspace_node = nuke_colorspace_node.get(str(family), None)
|
||||
|
||||
log.info("Colorspace: {}".format(nuke_colorspace_node))
|
||||
return nuke_colorspace_node
|
||||
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 2.2 KiB |
|
Before Width: | Height: | Size: 2.7 KiB After Width: | Height: | Size: 2.7 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
|
@ -1,9 +1,4 @@
|
|||
import os
|
||||
import sys
|
||||
import KnobScripter
|
||||
|
||||
from pype.hosts.nuke.lib import (
|
||||
writes_version_sync,
|
||||
from pype.hosts.nuke.api.lib import (
|
||||
on_script_load,
|
||||
check_inventory_versions
|
||||
)
|
||||
|
|
@ -11,13 +6,11 @@ from pype.hosts.nuke.lib import (
|
|||
import nuke
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "nuke")
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
# nuke.addOnScriptSave(writes_version_sync)
|
||||
nuke.addOnScriptSave(on_script_load)
|
||||
nuke.addOnScriptLoad(check_inventory_versions)
|
||||
nuke.addOnScriptSave(check_inventory_versions)
|
||||
# nuke.addOnScriptSave(writes_version_sync)
|
||||
|
||||
log.info('Automatic syncing of write file knob to script version')
|
||||
|
|
@ -3,7 +3,7 @@ import os
|
|||
import glob
|
||||
import nuke
|
||||
from pype.api import Logger
|
||||
log = Logger().get_logger(__name__, "nuke")
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v',
|
||||
'm2v']
|
||||
|
|
@ -219,7 +219,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"PYBLISHPLUGINPATH",
|
||||
"NUKE_PATH",
|
||||
"TOOL_ENV",
|
||||
"PYPE_DEV"
|
||||
"PYPE_DEV",
|
||||
"FOUNDRY_LICENSE"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Collect Current File"
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, context):
|
||||
import os
|
||||
import nuke
|
||||
current_file = nuke.root().name()
|
||||
|
||||
normalised = os.path.normpath(current_file)
|
||||
|
||||
context.data["current_file"] = normalised
|
||||
context.data["currentFile"] = normalised
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHost(pyblish.api.ContextPlugin):
|
||||
"""Inject the host into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Host"
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, context):
|
||||
import pyblish.api
|
||||
|
||||
context.data["host"] = pyblish.api.current_host()
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectHostVersion(pyblish.api.ContextPlugin):
|
||||
"""Inject the hosts version into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Host Version"
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, context):
|
||||
import nuke
|
||||
context.data["hostVersion"] = nuke.NUKE_VERSION_STRING
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
import toml
|
||||
|
||||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectReadLegacy(pyblish.api.ContextPlugin):
|
||||
"""Collect legacy read nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Read Legacy"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
for node in nuke.allNodes():
|
||||
if node.Class() != "Read":
|
||||
continue
|
||||
|
||||
if "avalon" not in node.knobs().keys():
|
||||
continue
|
||||
|
||||
if not toml.loads(node["avalon"].value()):
|
||||
return
|
||||
|
||||
instance = context.create_instance(
|
||||
node.name(), family="read.legacy"
|
||||
)
|
||||
instance.append(node)
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
from avalon import io, api
|
||||
|
||||
import nuke
|
||||
|
||||
|
||||
class CollectReview(pyblish.api.InstancePlugin):
|
||||
"""Collect review instance from rendered frames
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.3
|
||||
label = "Collect Review"
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "render.local", "render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
if "review" not in node.knobs():
|
||||
knob = nuke.Boolean_Knob("review", "Review")
|
||||
knob.setValue(True)
|
||||
node.addKnob(knob)
|
||||
|
||||
if not node["review"].value():
|
||||
return
|
||||
|
||||
# * Add audio to instance if exists.
|
||||
# Find latest versions document
|
||||
version_doc = pype.api.get_latest_version(
|
||||
instance.context.data["assetEntity"]["name"], "audioMain"
|
||||
)
|
||||
repre_doc = None
|
||||
if version_doc:
|
||||
# Try to find it's representation (Expected there is only one)
|
||||
repre_doc = io.find_one(
|
||||
{"type": "representation", "parent": version_doc["_id"]}
|
||||
)
|
||||
|
||||
# Add audio to instance if representation was found
|
||||
if repre_doc:
|
||||
instance.data["audio"] = [{
|
||||
"offset": 0,
|
||||
"filename": api.get_representation_path(repre_doc)
|
||||
}]
|
||||
|
||||
instance.data["families"].append("review")
|
||||
instance.data['families'].append('ftrack')
|
||||
|
||||
self.log.info("Review collected: `{}`".format(instance))
|
||||
self.log.debug("__ instance.data: `{}`".format(instance.data))
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectSelection(pyblish.api.ContextPlugin):
|
||||
"""Collect selection."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Selection of Nodes"
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, context):
|
||||
context.data["selection"] = nuke.selectedNodes()
|
||||
|
|
@ -8,6 +8,9 @@
|
|||
}
|
||||
},
|
||||
"publish": {
|
||||
"PreCollectNukeInstances": {
|
||||
"sync_workfile_version": true
|
||||
},
|
||||
"ExtractThumbnail": {
|
||||
"enabled": true,
|
||||
"nodes": {
|
||||
|
|
@ -36,7 +39,7 @@
|
|||
}
|
||||
},
|
||||
"ValidateKnobs": {
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"knobs": {
|
||||
"render": {
|
||||
"review": true
|
||||
|
|
@ -44,7 +47,7 @@
|
|||
}
|
||||
},
|
||||
"ExtractReviewDataLut": {
|
||||
"enabled": true
|
||||
"enabled": false
|
||||
},
|
||||
"ExtractReviewDataMov": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -235,17 +235,19 @@
|
|||
"__environment_keys__": {
|
||||
"nuke": [
|
||||
"NUKE_PATH",
|
||||
"PATH"
|
||||
"PATH",
|
||||
"LOGLEVEL"
|
||||
]
|
||||
},
|
||||
"NUKE_PATH": [
|
||||
"{PYPE_REPOS_ROOT}/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_ROOT}/setup/nuke/nuke_path",
|
||||
"{PYPE_ROOT}/pype/hosts/nuke/startup",
|
||||
"{PYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
}
|
||||
},
|
||||
"LOGLEVEL": "DEBUG"
|
||||
},
|
||||
"variants": {
|
||||
"nuke_12.2": {
|
||||
|
|
@ -358,17 +360,19 @@
|
|||
"__environment_keys__": {
|
||||
"nukex": [
|
||||
"NUKE_PATH",
|
||||
"PATH"
|
||||
"PATH",
|
||||
"LOGLEVEL"
|
||||
]
|
||||
},
|
||||
"NUKE_PATH": [
|
||||
"{PYPE_REPOS_ROOT}/avalon-core/setup/nuke/nuke_path",
|
||||
"{PYPE_ROOT}/setup/nuke/nuke_path",
|
||||
"{PYPE_ROOT}/pype/hosts/nuke/startup",
|
||||
"{PYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
}
|
||||
},
|
||||
"LOGLEVEL": "DEBUG"
|
||||
},
|
||||
"variants": {
|
||||
"nukex_12.2": {
|
||||
|
|
@ -484,18 +488,18 @@
|
|||
"PATH",
|
||||
"WORKFILES_STARTUP",
|
||||
"TAG_ASSETBUILD_STARTUP",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"LOGLEVEL"
|
||||
]
|
||||
},
|
||||
"HIERO_PLUGIN_PATH": [
|
||||
"{PYPE_ROOT}/setup/nukestudio/hiero_plugin_path"
|
||||
"{PYPE_ROOT}/setup/hiero/hiero_plugin_path"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
},
|
||||
"WORKFILES_STARTUP": "0",
|
||||
"TAG_ASSETBUILD_STARTUP": "0",
|
||||
"PYPE_LOG_NO_COLORS": "True"
|
||||
"LOGLEVEL": "DEBUG"
|
||||
},
|
||||
"variants": {
|
||||
"nukestudio_12.2": {
|
||||
|
|
@ -606,18 +610,18 @@
|
|||
"PATH",
|
||||
"WORKFILES_STARTUP",
|
||||
"TAG_ASSETBUILD_STARTUP",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
"LOGLEVEL"
|
||||
]
|
||||
},
|
||||
"HIERO_PLUGIN_PATH": [
|
||||
"{PYPE_ROOT}/setup/nukestudio/hiero_plugin_path"
|
||||
"{PYPE_ROOT}/setup/hiero/hiero_plugin_path"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
},
|
||||
"WORKFILES_STARTUP": "0",
|
||||
"TAG_ASSETBUILD_STARTUP": "0",
|
||||
"PYPE_LOG_NO_COLORS": "True"
|
||||
"LOGLEVEL": "DEBUG"
|
||||
},
|
||||
"variants": {
|
||||
"hiero_12.2": {
|
||||
|
|
|
|||
|
|
@ -47,6 +47,20 @@
|
|||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
"key": "PreCollectNukeInstances",
|
||||
"label": "PreCollectNukeInstances",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "sync_workfile_version",
|
||||
"label": "Sync Version from workfile"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsable": true,
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit d269f5da0f80990fe7e400d6180e3d7699b6324a
|
||||
Subproject commit 964ae5fa7110984d5c0c00ad81c2c108031a7834
|
||||
|
|
@ -29,6 +29,7 @@ pytest-cov
|
|||
pytest-print
|
||||
pyqt5
|
||||
Qt.py
|
||||
setuptools==45.0.0
|
||||
scandir
|
||||
speedcopy
|
||||
six
|
||||
|
|
|
|||