diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 25504aa12b..62739bcbf6 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -2,16 +2,12 @@ import os import sys import nuke -from avalon import api as avalon -from openpype.tools import workfiles -from pyblish import api as pyblish +import avalon.api +import pyblish.api +from pype.api import Logger from openpype.api import Logger -import openpype.hosts.nuke from . import lib, menu - -self = sys.modules[__name__] -self.workfiles_launched = False log = Logger().get_logger(__name__) AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") @@ -25,7 +21,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") # registering pyblish gui regarding settings in presets if os.getenv("PYBLISH_GUI", None): - pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) + pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) def reload_config(): @@ -61,15 +57,16 @@ def install(): ''' log.info("Registering Nuke plug-ins..") - pyblish.register_plugin_path(PUBLISH_PATH) - avalon.register_plugin_path(avalon.Loader, LOAD_PATH) - avalon.register_plugin_path(avalon.Creator, CREATE_PATH) - avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) + pyblish.api.register_plugin_path(PUBLISH_PATH) + avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) # Register Avalon event for workfiles loading. - avalon.on("workio.open_file", lib.check_inventory_versions) + avalon.api.on("workio.open_file", lib.check_inventory_versions) - pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled) workfile_settings = lib.WorkfileSettings() # Disable all families except for the ones we explicitly want to see family_states = [ @@ -79,39 +76,27 @@ def install(): "gizmo" ] - avalon.data["familiesStateDefault"] = False - avalon.data["familiesStateToggled"] = family_states - - # Workfiles. - launch_workfiles = os.environ.get("WORKFILES_STARTUP") - - if launch_workfiles: - nuke.addOnCreate(launch_workfiles_app, nodeClass="Root") + avalon.api.data["familiesStateDefault"] = False + avalon.api.data["familiesStateToggled"] = family_states # Set context settings. nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - # nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - + nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") + nuke.addOnCreate(lib.open_last_workfile, nodeClass="Root") + nuke.addOnCreate(lib.launch_workfiles_app, nodeClass="Root") menu.install() -def launch_workfiles_app(): - '''Function letting start workfiles after start of host - ''' - if not self.workfiles_launched: - self.workfiles_launched = True - workfiles.show(os.environ["AVALON_WORKDIR"]) - - def uninstall(): '''Uninstalling host's integration ''' log.info("Deregistering Nuke plug-ins..") - pyblish.deregister_plugin_path(PUBLISH_PATH) - avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) - avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + pyblish.api.deregister_plugin_path(PUBLISH_PATH) + avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) - pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) + pyblish.api.deregister_callback( + "instanceToggled", on_pyblish_instance_toggled) reload_config() menu.uninstall() @@ -123,7 +108,7 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( instance, old_value, new_value)) - from avalon.nuke import ( + from avalon.api.nuke import ( viewer_update_and_undo_stop, add_publish_knob ) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 8618b03cdc..d95af6ec4c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -3,9 +3,14 @@ import re import sys from collections import OrderedDict + from avalon import api, io, lib +from avalon.tools import workfiles import avalon.nuke from avalon.nuke import lib as anlib +from avalon.nuke import ( + save_file, open_file +) from openpype.api import ( Logger, Anatomy, @@ -13,6 +18,7 @@ from openpype.api import ( get_anatomy_settings, get_hierarchy, get_asset, + get_current_project_settings, config, ApplicationManager ) @@ -25,8 +31,10 @@ log = Logger().get_logger(__name__) self = sys.modules[__name__] self._project = None +self.workfiles_launched = False self._node_tab_name = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") + def get_node_imageio_setting(**kwarg): ''' Get preset data for dataflow (fileType, compression, bitDepth) ''' @@ -1616,3 +1624,41 @@ def find_free_space_to_paste_nodes( xpos = min(group_xpos) ypos = max(group_ypos) + abs(offset) return xpos, ypos + + +def launch_workfiles_app(): + '''Function letting start workfiles after start of host + ''' + # get state from settings + open_at_start = get_current_project_settings()["nuke"].get( + "general", {}).get("open_workfile_at_start") + + # return if none is defined + if not open_at_start: + return + + if not self.workfiles_launched: + self.workfiles_launched = True + workfiles.show(os.environ["AVALON_WORKDIR"]) + + +def open_last_workfile(): + # get state from settings + open_last_version = get_current_project_settings()["nuke"].get( + "general", {}).get("create_initial_workfile") + + log.info("Opening last workfile...") + last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") + + if not os.path.exists(last_workfile_path): + # return if none is defined + if not open_last_version: + return + + save_file(last_workfile_path) + else: + # to avoid looping of the callback, remove it! + nuke.removeOnCreate(open_last_workfile, nodeClass="Root") + + # open workfile + open_file(last_workfile_path) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 0173eb0a82..826d1268b4 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -1,5 +1,8 @@ { "general": { + "open_workfile_at_start": false, + "create_initial_workfile": true + }, "menu": { "create": "ctrl+shift+alt+c", "publish": "ctrl+alt+p", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 75ca5411a1..e5520f459b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -12,37 +12,47 @@ "label": "General", "children": [ { - "type": "dict", - "collapsible": true, - "key": "menu", - "label": "OpenPype Menu shortcuts", - "children": [ - { - "type": "text", - "key": "create", - "label": "Create..." - }, - { - "type": "text", - "key": "publish", - "label": "Publish..." - }, - { - "type": "text", - "key": "load", - "label": "Load..." - }, - { - "type": "text", - "key": "manage", - "label": "Manage..." - }, - { - "type": "text", - "key": "build_workfile", - "label": "Build Workfile" - } - ] + "type": "boolean", + "key": "open_workfile_at_start", + "label": "Open Workfile window at start of a Nuke session" + }, + { + "type": "boolean", + "key": "create_initial_workfile", + "label": "Create initial workfile version if none available" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "menu", + "label": "OpenPype Menu shortcuts", + "children": [ + { + "type": "text", + "key": "create", + "label": "Create..." + }, + { + "type": "text", + "key": "publish", + "label": "Publish..." + }, + { + "type": "text", + "key": "load", + "label": "Load..." + }, + { + "type": "text", + "key": "manage", + "label": "Manage..." + }, + { + "type": "text", + "key": "build_workfile", + "label": "Build Workfile" } ] }, @@ -87,7 +97,7 @@ "name": "schema_nuke_publish", "template_data": [] }, - { + { "type": "schema", "name": "schema_nuke_load", "template_data": [] @@ -101,4 +111,4 @@ "name": "schema_publish_gui_filter" } ] -} +} \ No newline at end of file diff --git a/pype/hosts/nuke/api/__init__.py b/pype/hosts/nuke/api/__init__.py deleted file mode 100644 index 04f3ec9083..0000000000 --- a/pype/hosts/nuke/api/__init__.py +++ /dev/null @@ -1,126 +0,0 @@ -import os -import sys -import nuke - -import avalon.api -import pyblish.api -from pype.api import Logger -import pype.hosts.nuke -from . import lib, menu - -log = Logger().get_logger(__name__) - -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") -HOST_DIR = os.path.dirname(os.path.abspath(pype.hosts.nuke.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") - - -# registering pyblish gui regarding settings in presets -if os.getenv("PYBLISH_GUI", None): - pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) - - -def reload_config(): - """Attempt to reload pipeline at run-time. - - CAUTION: This is primarily for development and debugging purposes. - - """ - - import importlib - - for module in ( - "{}.api".format(AVALON_CONFIG), - "{}.hosts.nuke.api.actions".format(AVALON_CONFIG), - "{}.hosts.nuke.api.menu".format(AVALON_CONFIG), - "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG), - "{}.hosts.nuke.api.lib".format(AVALON_CONFIG), - ): - log.info("Reloading module: {}...".format(module)) - - module = importlib.import_module(module) - - try: - importlib.reload(module) - except AttributeError as e: - from importlib import reload - log.warning("Cannot reload module: {}".format(e)) - reload(module) - - -def install(): - ''' Installing all requarements for Nuke host - ''' - - log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) - avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) - - # Register Avalon event for workfiles loading. - avalon.api.on("workio.open_file", lib.check_inventory_versions) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled) - workfile_settings = lib.WorkfileSettings() - # Disable all families except for the ones we explicitly want to see - family_states = [ - "write", - "review", - "nukenodes" - "gizmo" - ] - - avalon.api.data["familiesStateDefault"] = False - avalon.api.data["familiesStateToggled"] = family_states - - # Set context settings. - nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - nuke.addOnCreate(lib.open_last_workfile, nodeClass="Root") - nuke.addOnCreate(lib.launch_workfiles_app, nodeClass="Root") - menu.install() - - -def uninstall(): - '''Uninstalling host's integration - ''' - log.info("Deregistering Nuke plug-ins..") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) - - pyblish.api.deregister_callback( - "instanceToggled", on_pyblish_instance_toggled) - - reload_config() - menu.uninstall() - - -def on_pyblish_instance_toggled(instance, old_value, new_value): - """Toggle node passthrough states on instance toggles.""" - - log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( - instance, old_value, new_value)) - - from avalon.api.nuke import ( - viewer_update_and_undo_stop, - add_publish_knob - ) - - # Whether instances should be passthrough based on new value - - with viewer_update_and_undo_stop(): - n = instance[0] - try: - n["publish"].value() - except ValueError: - n = add_publish_knob(n) - log.info(" `Publish` knob was added to write node..") - - n["publish"].setValue(new_value) diff --git a/pype/hosts/nuke/api/lib.py b/pype/hosts/nuke/api/lib.py deleted file mode 100644 index c01b6e6470..0000000000 --- a/pype/hosts/nuke/api/lib.py +++ /dev/null @@ -1,1660 +0,0 @@ -import os -import re -import sys -from collections import OrderedDict - - -from avalon import api, io, lib -from avalon.tools import workfiles -import avalon.nuke -from avalon.nuke import lib as anlib -from avalon.nuke import ( - save_file, open_file -) - -from pype.api import ( - Logger, - Anatomy, - get_version_from_path, - get_anatomy_settings, - get_hierarchy, - get_asset, - get_current_project_settings, - config, - ApplicationManager -) - -import nuke - -from .utils import set_context_favorites - -log = Logger().get_logger(__name__) - -self = sys.modules[__name__] -self._project = None -self.workfiles_launched = False - -def get_node_imageio_setting(**kwarg): - ''' Get preset data for dataflow (fileType, compression, bitDepth) - ''' - log.info(kwarg) - host = str(kwarg.get("host", "nuke")) - nodeclass = kwarg.get("nodeclass", None) - creator = kwarg.get("creator", None) - project_name = os.getenv("AVALON_PROJECT") - - assert any([host, nodeclass]), nuke.message( - "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) - - imageio_nodes = (get_anatomy_settings(project_name) - ["imageio"] - .get(host, None) - ["nodes"] - ["requiredNodes"] - ) - - for node in imageio_nodes: - log.info(node) - if node["nukeNodeClass"] == nodeclass: - if creator in node["plugins"]: - imageio_node = node - - log.info("ImageIO node: {}".format(imageio_node)) - return imageio_node - - -def get_imageio_input_colorspace(filename): - ''' Get input file colorspace based on regex in settings. - ''' - imageio_regex_inputs = (get_anatomy_settings(os.getenv("AVALON_PROJECT")) - ["imageio"] - ["nuke"] - ["regexInputs"] - ["inputs"] - ) - - preset_clrsp = None - for regexInput in imageio_regex_inputs: - if bool(re.search(regexInput["regex"], filename)): - preset_clrsp = str(regexInput["colorspace"]) - - return preset_clrsp - - -def on_script_load(): - ''' Callback for ffmpeg support - ''' - if nuke.env['LINUX']: - nuke.tcl('load ffmpegReader') - nuke.tcl('load ffmpegWriter') - else: - nuke.tcl('load movReader') - nuke.tcl('load movWriter') - - -def check_inventory_versions(): - """ - Actual version idetifier of Loaded containers - - Any time this function is run it will check all nodes and filter only - Loader nodes for its version. It will get all versions from database - and check if the node is having actual version. If not then it will color - it to red. - """ - # get all Loader nodes by avalon attribute metadata - for each in nuke.allNodes(): - if each.Class() == 'Read': - container = avalon.nuke.parse_container(each) - - if container: - node = nuke.toNode(container["objectName"]) - avalon_knob_data = avalon.nuke.read( - node) - - # get representation from io - representation = io.find_one({ - "type": "representation", - "_id": io.ObjectId(avalon_knob_data["representation"]) - }) - - # Get start frame from version data - version = io.find_one({ - "type": "version", - "_id": representation["parent"] - }) - - # get all versions in list - versions = io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) - - # check the available version and do match - # change color of node if not max verion - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd84f20ff", 16)) - else: - node["tile_color"].setValue(int("0x4ecd25ff", 16)) - - -def writes_version_sync(): - ''' Callback synchronizing version of publishable write nodes - ''' - try: - rootVersion = get_version_from_path(nuke.root().name()) - padding = len(rootVersion) - new_version = "v" + str("{" + ":0>{}".format(padding) + "}").format( - int(rootVersion) - ) - log.debug("new_version: {}".format(new_version)) - except Exception: - return - - for each in nuke.allNodes(): - if each.Class() == 'Write': - # check if the node is avalon tracked - if "AvalonTab" not in each.knobs(): - continue - - avalon_knob_data = avalon.nuke.read( - each) - - try: - if avalon_knob_data['families'] not in ["render"]: - log.debug(avalon_knob_data['families']) - continue - - node_file = each['file'].value() - - node_version = "v" + get_version_from_path(node_file) - log.debug("node_version: {}".format(node_version)) - - node_new_file = node_file.replace(node_version, new_version) - each['file'].setValue(node_new_file) - if not os.path.isdir(os.path.dirname(node_new_file)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(node_new_file)) - except Exception as e: - log.warning( - "Write node: `{}` has no version in path: {}".format( - each.name(), e)) - - -def version_up_script(): - ''' Raising working script's version - ''' - import nukescripts - nukescripts.script_and_write_nodes_version_up() - - -def check_subsetname_exists(nodes, subset_name): - """ - Checking if node is not already created to secure there is no duplicity - - Arguments: - nodes (list): list of nuke.Node objects - subset_name (str): name we try to find - - Returns: - bool: True of False - """ - result = next((True for n in nodes - if subset_name in avalon.nuke.read(n).get("subset", "")), False) - return result - - -def get_render_path(node): - ''' Generate Render path from presets regarding avalon knob data - ''' - data = dict() - data['avalon'] = avalon.nuke.read( - node) - - data_preset = { - "class": data['avalon']['family'], - "preset": data['avalon']['families'] - } - - nuke_imageio_writes = get_node_imageio_setting(**data_preset) - - application = lib.get_application(os.environ["AVALON_APP_NAME"]) - data.update({ - "application": application, - "nuke_imageio_writes": nuke_imageio_writes - }) - - anatomy_filled = format_anatomy(data) - return anatomy_filled["render"]["path"].replace("\\", "/") - - -def format_anatomy(data): - ''' Helping function for formating of anatomy paths - - Arguments: - data (dict): dictionary with attributes used for formating - - Return: - path (str) - ''' - # TODO: perhaps should be nonPublic - - anatomy = Anatomy() - log.debug("__ anatomy.templates: {}".format(anatomy.templates)) - - try: - # TODO: bck compatibility with old anatomy template - padding = int( - anatomy.templates["render"].get( - "frame_padding", - anatomy.templates["render"].get("padding") - ) - ) - except KeyError as e: - msg = ("`padding` key is not in `render` " - "or `frame_padding` on is not available in " - "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`").format(e) - - log.error(msg) - nuke.message(msg) - - version = data.get("version", None) - if not version: - file = script_name() - data["version"] = get_version_from_path(file) - project_document = io.find_one({"type": "project"}) - data.update({ - "subset": data["avalon"]["subset"], - "asset": data["avalon"]["asset"], - "task": os.environ["AVALON_TASK"], - "family": data["avalon"]["family"], - "project": {"name": project_document["name"], - "code": project_document["data"].get("code", '')}, - "hierarchy": get_hierarchy(), - "frame": "#" * padding, - }) - return anatomy.format(data) - - -def script_name(): - ''' Returns nuke script path - ''' - return nuke.root().knob('name').value() - - -def add_button_write_to_read(node): - name = "createReadNode" - label = "Cread Read From Rendered" - value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())" - knob = nuke.PyScript_Knob(name, label, value) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - - -def create_write_node(name, data, input=None, prenodes=None, review=True): - ''' Creating write node which is group node - - Arguments: - name (str): name of node - data (dict): data to be imprinted - input (node): selected node to connect to - prenodes (list, optional): list of lists, definitions for nodes - to be created before write - review (bool): adding review knob - - Example: - prenodes = [( - "NameNode", # string - "NodeClass", # string - ( # OrderDict: knob and values pairs - ("knobName", "knobValue"), - ("knobName", "knobValue") - ), - ( # list outputs - "firstPostNodeName", - "secondPostNodeName" - ) - ) - ] - - Return: - node (obj): group node with avalon data as Knobs - ''' - - imageio_writes = get_node_imageio_setting(**data) - app_manager = ApplicationManager() - app_name = os.environ.get("AVALON_APP_NAME") - if app_name: - app = app_manager.applications.get(app_name) - - for knob in imageio_writes["knobs"]: - if knob["name"] == "file_type": - representation = knob["value"] - - try: - data.update({ - "app": app.host_name, - "imageio_writes": imageio_writes, - "representation": representation, - }) - anatomy_filled = format_anatomy(data) - - except Exception as e: - msg = "problem with resolving anatomy template: {}".format(e) - log.error(msg) - nuke.message(msg) - - # build file path to workfiles - fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/") - fpath = data["fpath_template"].format( - work=fpath, version=data["version"], subset=data["subset"], - frame=data["frame"], - ext=representation - ) - - # create directory - if not os.path.isdir(os.path.dirname(fpath)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(fpath)) - - _data = OrderedDict({ - "file": fpath - }) - - # adding dataflow template - log.debug("imageio_writes: `{}`".format(imageio_writes)) - for knob in imageio_writes["knobs"]: - if knob["name"] not in ["_id", "_previous"]: - _data.update({knob["name"]: knob["value"]}) - - _data = anlib.fix_data_for_node_create(_data) - - log.debug("_data: `{}`".format(_data)) - - if "frame_range" in data.keys(): - _data["frame_range"] = data.get("frame_range", None) - log.debug("_data[frame_range]: `{}`".format(_data["frame_range"])) - - GN = nuke.createNode("Group", "name {}".format(name)) - - prev_node = None - with GN: - connections = list() - if input: - # if connected input node was defined - connections.append({ - "node": input, - "inputName": input.name()}) - prev_node = nuke.createNode( - "Input", "name {}".format(input.name())) - else: - # generic input node connected to nothing - prev_node = nuke.createNode( - "Input", "name {}".format("rgba")) - - # creating pre-write nodes `prenodes` - if prenodes: - for name, klass, properties, set_output_to in prenodes: - # create node - now_node = nuke.createNode(klass, "name {}".format(name)) - - # add data to knob - for k, v in properties: - try: - now_node[k].value() - except NameError: - log.warning( - "knob `{}` does not exist on node `{}`".format( - k, now_node["name"].value() - )) - continue - - if k and v: - now_node[k].setValue(str(v)) - - # connect to previous node - if set_output_to: - if isinstance(set_output_to, (tuple or list)): - for i, node_name in enumerate(set_output_to): - input_node = nuke.createNode( - "Input", "name {}".format(node_name)) - connections.append({ - "node": nuke.toNode(node_name), - "inputName": node_name}) - now_node.setInput(1, input_node) - elif isinstance(set_output_to, str): - input_node = nuke.createNode( - "Input", "name {}".format(node_name)) - connections.append({ - "node": nuke.toNode(set_output_to), - "inputName": set_output_to}) - now_node.setInput(0, input_node) - else: - now_node.setInput(0, prev_node) - - # swith actual node to previous - prev_node = now_node - - # creating write node - write_node = now_node = anlib.add_write_node( - "inside_{}".format(name), - **_data - ) - - # connect to previous node - now_node.setInput(0, prev_node) - - # swith actual node to previous - prev_node = now_node - - now_node = nuke.createNode("Output", "name Output1") - - # connect to previous node - now_node.setInput(0, prev_node) - - # imprinting group node - anlib.set_avalon_knob_data(GN, data["avalon"]) - anlib.add_publish_knob(GN) - add_rendering_knobs(GN) - - if review: - add_review_knob(GN) - - # add divider - GN.addKnob(nuke.Text_Knob('', 'Rendering')) - - # Add linked knobs. - linked_knob_names = [ - "_grp-start_", - "use_limit", "first", "last", - "_grp-end_", - "Render" - ] - for name in linked_knob_names: - if "_grp-start_" in name: - knob = nuke.Tab_Knob( - "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) - GN.addKnob(knob) - elif "_grp-end_" in name: - knob = nuke.Tab_Knob( - "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) - GN.addKnob(knob) - else: - link = nuke.Link_Knob("") - link.makeLink(write_node.name(), name) - link.setName(name) - if "Render" in name: - link.setLabel("Render Local") - link.setFlag(0x1000) - GN.addKnob(link) - - # adding write to read button - add_button_write_to_read(GN) - - # Deadline tab. - add_deadline_tab(GN) - - # set tile color - tile_color = _data.get("tile_color", "0xff0000ff") - GN["tile_color"].setValue(tile_color) - - return GN - - -def add_rendering_knobs(node): - ''' Adds additional rendering knobs to given node - - Arguments: - node (obj): nuke node object to be fixed - - Return: - node (obj): with added knobs - ''' - if "render" not in node.knobs(): - knob = nuke.Enumeration_Knob("render", "", [ - "Use existing frames", "Local", "On farm"]) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - return node - - -def add_review_knob(node): - ''' Adds additional review knob to given node - - Arguments: - node (obj): nuke node object to be fixed - - Return: - node (obj): with added knob - ''' - if "review" not in node.knobs(): - knob = nuke.Boolean_Knob("review", "Review") - knob.setValue(True) - node.addKnob(knob) - return node - - -def add_deadline_tab(node): - node.addKnob(nuke.Tab_Knob("Deadline")) - - knob = nuke.Int_Knob("deadlineChunkSize", "Chunk Size") - knob.setValue(0) - node.addKnob(knob) - - knob = nuke.Int_Knob("deadlinePriority", "Priority") - knob.setValue(50) - node.addKnob(knob) - - -def get_deadline_knob_names(): - return ["Deadline", "deadlineChunkSize", "deadlinePriority"] - - -def create_backdrop(label="", color=None, layer=0, - nodes=None): - """ - Create Backdrop node - - Arguments: - color (str): nuke compatible string with color code - layer (int): layer of node usually used (self.pos_layer - 1) - label (str): the message - nodes (list): list of nodes to be wrapped into backdrop - - """ - assert isinstance(nodes, list), "`nodes` should be a list of nodes" - - # Calculate bounds for the backdrop node. - bdX = min([node.xpos() for node in nodes]) - bdY = min([node.ypos() for node in nodes]) - bdW = max([node.xpos() + node.screenWidth() for node in nodes]) - bdX - bdH = max([node.ypos() + node.screenHeight() for node in nodes]) - bdY - - # Expand the bounds to leave a little border. Elements are offsets - # for left, top, right and bottom edges respectively - left, top, right, bottom = (-20, -65, 20, 60) - bdX += left - bdY += top - bdW += (right - left) - bdH += (bottom - top) - - bdn = nuke.createNode("BackdropNode") - bdn["z_order"].setValue(layer) - - if color: - bdn["tile_color"].setValue(int(color, 16)) - - bdn["xpos"].setValue(bdX) - bdn["ypos"].setValue(bdY) - bdn["bdwidth"].setValue(bdW) - bdn["bdheight"].setValue(bdH) - - if label: - bdn["label"].setValue(label) - - bdn["note_font_size"].setValue(20) - return bdn - - -class WorkfileSettings(object): - """ - All settings for workfile will be set - - This object is setting all possible root settings to the workfile. - Including Colorspace, Frame ranges, Resolution format. It can set it - to Root node or to any given node. - - Arguments: - root (node): nuke's root node - nodes (list): list of nuke's nodes - nodes_filter (list): filtering classes for nodes - - """ - - def __init__(self, - root_node=None, - nodes=None, - **kwargs): - self._project = kwargs.get( - "project") or io.find_one({"type": "project"}) - self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] - self._asset_entity = get_asset(self._asset) - self._root_node = root_node or nuke.root() - self._nodes = self.get_nodes(nodes=nodes) - - self.data = kwargs - - def get_nodes(self, nodes=None, nodes_filter=None): - - if not isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in nuke.allNodes()] - elif not isinstance(nodes, list) and isinstance(nodes_filter, list): - nodes = list() - for filter in nodes_filter: - [nodes.append(n) for n in nuke.allNodes(filter=filter)] - return nodes - elif isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in self._nodes] - elif isinstance(nodes, list) and isinstance(nodes_filter, list): - for filter in nodes_filter: - return [n for n in self._nodes if filter in n.Class()] - - def set_viewers_colorspace(self, viewer_dict): - ''' Adds correct colorspace to viewer - - Arguments: - viewer_dict (dict): adjustments from presets - - ''' - if not isinstance(viewer_dict, dict): - msg = "set_viewers_colorspace(): argument should be dictionary" - log.error(msg) - nuke.message(msg) - return - - filter_knobs = [ - "viewerProcess", - "wipe_position" - ] - - erased_viewers = [] - for v in [n for n in self._nodes - if "Viewer" in n.Class()]: - v['viewerProcess'].setValue(str(viewer_dict["viewerProcess"])) - if str(viewer_dict["viewerProcess"]) \ - not in v['viewerProcess'].value(): - copy_inputs = v.dependencies() - copy_knobs = {k: v[k].value() for k in v.knobs() - if k not in filter_knobs} - - # delete viewer with wrong settings - erased_viewers.append(v['name'].value()) - nuke.delete(v) - - # create new viewer - nv = nuke.createNode("Viewer") - - # connect to original inputs - for i, n in enumerate(copy_inputs): - nv.setInput(i, n) - - # set coppied knobs - for k, v in copy_knobs.items(): - print(k, v) - nv[k].setValue(v) - - # set viewerProcess - nv['viewerProcess'].setValue(str(viewer_dict["viewerProcess"])) - - if erased_viewers: - log.warning( - "Attention! Viewer nodes {} were erased." - "It had wrong color profile".format(erased_viewers)) - - def set_root_colorspace(self, root_dict): - ''' Adds correct colorspace to root - - Arguments: - root_dict (dict): adjustmensts from presets - - ''' - if not isinstance(root_dict, dict): - msg = "set_root_colorspace(): argument should be dictionary" - log.error(msg) - nuke.message(msg) - - log.debug(">> root_dict: {}".format(root_dict)) - - # first set OCIO - if self._root_node["colorManagement"].value() \ - not in str(root_dict["colorManagement"]): - self._root_node["colorManagement"].setValue( - str(root_dict["colorManagement"])) - log.debug("nuke.root()['{0}'] changed to: {1}".format( - "colorManagement", root_dict["colorManagement"])) - root_dict.pop("colorManagement") - - # second set ocio version - if self._root_node["OCIO_config"].value() \ - not in str(root_dict["OCIO_config"]): - self._root_node["OCIO_config"].setValue( - str(root_dict["OCIO_config"])) - log.debug("nuke.root()['{0}'] changed to: {1}".format( - "OCIO_config", root_dict["OCIO_config"])) - root_dict.pop("OCIO_config") - - # third set ocio custom path - if root_dict.get("customOCIOConfigPath"): - self._root_node["customOCIOConfigPath"].setValue( - str(root_dict["customOCIOConfigPath"]).format( - **os.environ - ).replace("\\", "/") - ) - log.debug("nuke.root()['{}'] changed to: {}".format( - "customOCIOConfigPath", root_dict["customOCIOConfigPath"])) - root_dict.pop("customOCIOConfigPath") - - # then set the rest - for knob, value in root_dict.items(): - if self._root_node[knob].value() not in value: - self._root_node[knob].setValue(str(value)) - log.debug("nuke.root()['{}'] changed to: {}".format( - knob, value)) - - def set_writes_colorspace(self, write_dict): - ''' Adds correct colorspace to write node dict - - Arguments: - write_dict (dict): nuke write node as dictionary - - ''' - # scene will have fixed colorspace following presets for the project - if not isinstance(write_dict, dict): - msg = "set_root_colorspace(): argument should be dictionary" - log.error(msg) - return - - from avalon.nuke import read - - for node in nuke.allNodes(): - - if node.Class() in ["Viewer", "Dot"]: - continue - - # get data from avalon knob - avalon_knob_data = read(node) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] != "pyblish.avalon.instance": - continue - - # establish families - families = [avalon_knob_data["family"]] - if avalon_knob_data.get("families"): - families.append(avalon_knob_data.get("families")) - - # except disabled nodes but exclude backdrops in test - for fmly, knob in write_dict.items(): - write = None - if (fmly in families): - # Add all nodes in group instances. - if node.Class() == "Group": - node.begin() - for x in nuke.allNodes(): - if x.Class() == "Write": - write = x - node.end() - elif node.Class() == "Write": - write = node - else: - log.warning("Wrong write node Class") - - write["colorspace"].setValue(str(knob["colorspace"])) - log.info( - "Setting `{0}` to `{1}`".format( - write.name(), - knob["colorspace"])) - - def set_reads_colorspace(self, reads): - """ Setting colorspace to Read nodes - - Looping trought all read nodes and tries to set colorspace based - on regex rules in presets - """ - changes = dict() - for n in nuke.allNodes(): - file = nuke.filename(n) - if not n.Class() == "Read": - continue - - # load nuke presets for Read's colorspace - read_clrs_presets = config.get_init_presets()["colorspace"].get( - "nuke", {}).get("read", {}) - - # check if any colorspace presets for read is mathing - preset_clrsp = next((read_clrs_presets[k] - for k in read_clrs_presets - if bool(re.search(k, file))), - None) - log.debug(preset_clrsp) - if preset_clrsp is not None: - current = n["colorspace"].value() - future = str(preset_clrsp) - if current != future: - changes.update({ - n.name(): { - "from": current, - "to": future - } - }) - log.debug(changes) - if changes: - msg = "Read nodes are not set to correct colospace:\n\n" - for nname, knobs in changes.items(): - msg += str( - " - node: '{0}' is now '{1}' but should be '{2}'\n" - ).format(nname, knobs["from"], knobs["to"]) - - msg += "\nWould you like to change it?" - - if nuke.ask(msg): - for nname, knobs in changes.items(): - n = nuke.toNode(nname) - n["colorspace"].setValue(knobs["to"]) - log.info( - "Setting `{0}` to `{1}`".format( - nname, - knobs["to"])) - - def set_colorspace(self): - ''' Setting colorpace following presets - ''' - nuke_colorspace = config.get_init_presets( - )["colorspace"].get("nuke", None) - - try: - self.set_root_colorspace(nuke_colorspace["root"]) - except AttributeError: - msg = "set_colorspace(): missing `root` settings in template" - - try: - self.set_viewers_colorspace(nuke_colorspace["viewer"]) - except AttributeError: - msg = "set_colorspace(): missing `viewer` settings in template" - nuke.message(msg) - log.error(msg) - - try: - self.set_writes_colorspace(nuke_colorspace["write"]) - except AttributeError: - msg = "set_colorspace(): missing `write` settings in template" - nuke.message(msg) - log.error(msg) - - reads = nuke_colorspace.get("read") - if reads: - self.set_reads_colorspace(reads) - - try: - for key in nuke_colorspace: - log.debug("Preset's colorspace key: {}".format(key)) - except TypeError: - msg = "Nuke is not in templates! Contact your supervisor!" - nuke.message(msg) - log.error(msg) - - def reset_frame_range_handles(self): - """Set frame range to current asset""" - - if "data" not in self._asset_entity: - msg = "Asset {} don't have set any 'data'".format(self._asset) - log.warning(msg) - nuke.message(msg) - return - data = self._asset_entity["data"] - - log.debug("__ asset data: `{}`".format(data)) - - missing_cols = [] - check_cols = ["fps", "frameStart", "frameEnd", - "handleStart", "handleEnd"] - - for col in check_cols: - if col not in data: - missing_cols.append(col) - - if len(missing_cols) > 0: - missing = ", ".join(missing_cols) - msg = "'{}' are not set for asset '{}'!".format( - missing, self._asset) - log.warning(msg) - nuke.message(msg) - return - - # get handles values - handle_start = data["handleStart"] - handle_end = data["handleEnd"] - - fps = float(data["fps"]) - frame_start = int(data["frameStart"]) - handle_start - frame_end = int(data["frameEnd"]) + handle_end - - self._root_node["lock_range"].setValue(False) - self._root_node["fps"].setValue(fps) - self._root_node["first_frame"].setValue(frame_start) - self._root_node["last_frame"].setValue(frame_end) - self._root_node["lock_range"].setValue(True) - - # setting active viewers - try: - nuke.frame(int(data["frameStart"])) - except Exception as e: - log.warning("no viewer in scene: `{}`".format(e)) - - range = '{0}-{1}'.format( - int(data["frameStart"]), - int(data["frameEnd"])) - - for node in nuke.allNodes(filter="Viewer"): - node['frame_range'].setValue(range) - node['frame_range_lock'].setValue(True) - node['frame_range'].setValue(range) - node['frame_range_lock'].setValue(True) - - # adding handle_start/end to root avalon knob - if not anlib.set_avalon_knob_data(self._root_node, { - "handleStart": int(handle_start), - "handleEnd": int(handle_end) - }): - log.warning("Cannot set Avalon knob to Root node!") - - def reset_resolution(self): - """Set resolution to project resolution.""" - log.info("Reseting resolution") - project = io.find_one({"type": "project"}) - asset = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset, "type": "asset"}) - asset_data = asset.get('data', {}) - - data = { - "width": int(asset_data.get( - 'resolutionWidth', - asset_data.get('resolution_width'))), - "height": int(asset_data.get( - 'resolutionHeight', - asset_data.get('resolution_height'))), - "pixel_aspect": asset_data.get( - 'pixelAspect', - asset_data.get('pixel_aspect', 1)), - "name": project["name"] - } - - if any(x for x in data.values() if x is None): - msg = ("Missing set shot attributes in DB." - "\nContact your supervisor!." - "\n\nWidth: `{width}`" - "\nHeight: `{height}`" - "\nPixel Asspect: `{pixel_aspect}`").format(**data) - log.error(msg) - nuke.message(msg) - - bbox = self._asset_entity.get('data', {}).get('crop') - - if bbox: - try: - x, y, r, t = bbox.split(".") - data.update( - { - "x": int(x), - "y": int(y), - "r": int(r), - "t": int(t), - } - ) - except Exception as e: - bbox = None - msg = ("{}:{} \nFormat:Crop need to be set with dots, " - "example: 0.0.1920.1080, " - "/nSetting to default").format(__name__, e) - log.error(msg) - nuke.message(msg) - - existing_format = None - for format in nuke.formats(): - if data["name"] == format.name(): - existing_format = format - break - - if existing_format: - # Enforce existing format to be correct. - existing_format.setWidth(data["width"]) - existing_format.setHeight(data["height"]) - existing_format.setPixelAspect(data["pixel_aspect"]) - - if bbox: - existing_format.setX(data["x"]) - existing_format.setY(data["y"]) - existing_format.setR(data["r"]) - existing_format.setT(data["t"]) - else: - format_string = self.make_format_string(**data) - log.info("Creating new format: {}".format(format_string)) - nuke.addFormat(format_string) - - nuke.root()["format"].setValue(data["name"]) - log.info("Format is set.") - - def make_format_string(self, **kwargs): - if kwargs.get("r"): - return ( - "{width} " - "{height} " - "{x} " - "{y} " - "{r} " - "{t} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - else: - return ( - "{width} " - "{height} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - - def set_context_settings(self): - # replace reset resolution from avalon core to pype's - self.reset_resolution() - # replace reset resolution from avalon core to pype's - self.reset_frame_range_handles() - # add colorspace menu item - # self.set_colorspace() - - def set_favorites(self): - work_dir = os.getenv("AVALON_WORKDIR") - asset = os.getenv("AVALON_ASSET") - project = os.getenv("AVALON_PROJECT") - favorite_items = OrderedDict() - - # project - # get project's root and split to parts - projects_root = os.path.normpath(work_dir.split( - project)[0]) - # add project name - project_dir = os.path.join(projects_root, project) + "/" - # add to favorites - favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) - - # asset - asset_root = os.path.normpath(work_dir.split( - asset)[0]) - # add asset name - asset_dir = os.path.join(asset_root, asset) + "/" - # add to favorites - favorite_items.update({"Shot dir": asset_dir.replace("\\", "/")}) - - # workdir - favorite_items.update({"Work dir": work_dir.replace("\\", "/")}) - - set_context_favorites(favorite_items) - - -def get_hierarchical_attr(entity, attr, default=None): - attr_parts = attr.split('.') - value = entity - for part in attr_parts: - value = value.get(part) - if not value: - break - - if value or entity['type'].lower() == 'project': - return value - - parent_id = entity['parent'] - if ( - entity['type'].lower() == 'asset' - and entity.get('data', {}).get('visualParent') - ): - parent_id = entity['data']['visualParent'] - - parent = io.find_one({'_id': parent_id}) - - return get_hierarchical_attr(parent, attr) - - -def get_write_node_template_attr(node): - ''' Gets all defined data from presets - - ''' - # get avalon data from node - data = dict() - data['avalon'] = avalon.nuke.read( - node) - data_preset = { - "class": data['avalon']['family'], - "families": data['avalon']['families'], - "preset": data['avalon']['families'] # omit < 2.0.0v - } - - # get template data - nuke_imageio_writes = get_node_imageio_setting(**data_preset) - - # collecting correct data - correct_data = OrderedDict({ - "file": get_render_path(node) - }) - - # adding imageio template - {correct_data.update({k: v}) - for k, v in nuke_imageio_writes.items() - if k not in ["_id", "_previous"]} - - # fix badly encoded data - return anlib.fix_data_for_node_create(correct_data) - - -class ExporterReview: - """ - Base class object for generating review data from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - _temp_nodes = [] - data = dict({ - "representations": list() - }) - - def __init__(self, - klass, - instance - ): - - self.log = klass.log - self.instance = instance - self.path_in = self.instance.data.get("path", None) - self.staging_dir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - - def get_file_info(self): - if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) - # get path - self.fname = os.path.basename(self.collection.format( - "{head}{padding}{tail}")) - self.fhead = self.collection.format("{head}") - - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) - if "slate" in self.instance.data["families"]: - self.first_frame += 1 - else: - self.fname = os.path.basename(self.path_in) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data.get("frameStartHandle", None) - self.last_frame = self.instance.data.get("frameEndHandle", None) - - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] - - def get_representation_data(self, tags=None, range=False): - add_tags = [] - if tags: - add_tags = tags - - repre = { - 'name': self.name, - 'ext': self.ext, - 'files': self.file, - "stagingDir": self.staging_dir, - "tags": [self.name.replace("_", "-")] + add_tags - } - - if range: - repre.update({ - "frameStart": self.first_frame, - "frameEnd": self.last_frame, - }) - - self.data["representations"].append(repre) - - def get_view_process_node(self): - """ - Will get any active view process. - - Arguments: - self (class): in object definition - - Returns: - nuke.Node: copy node of Input Process node - """ - anlib.reset_selection() - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" == n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) - - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() - - return ipn - - def clean_nodes(self): - for node in self._temp_nodes: - nuke.delete(node) - self.log.info("Deleted nodes...") - - -class ExporterReviewLut(ExporterReview): - """ - Generator object for review lut from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - - """ - - def __init__(self, - klass, - instance, - name=None, - ext=None, - cube_size=None, - lut_size=None, - lut_style=None): - # initialize parent class - ExporterReview.__init__(self, klass, instance) - - # deal with now lut defined in viewer lut - if hasattr(klass, "viewer_lut_raw"): - self.viewer_lut_raw = klass.viewer_lut_raw - else: - self.viewer_lut_raw = False - - self.name = name or "baked_lut" - self.ext = ext or "cube" - self.cube_size = cube_size or 32 - self.lut_size = lut_size or 1024 - self.lut_style = lut_style or "linear" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def generate_lut(self): - # ---------- start nodes creation - - # CMSTestPattern - cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(self.cube_size) - # connect - self._temp_nodes.append(cms_node) - self.previous_node = cms_node - self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) - - # Node View Process - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) - - # GenerateLUT - gen_lut_node = nuke.createNode("GenerateLUT") - gen_lut_node["file"].setValue(self.path) - gen_lut_node["file_type"].setValue(".{}".format(self.ext)) - gen_lut_node["lut1d"].setValue(self.lut_size) - gen_lut_node["style1d"].setValue(self.lut_style) - # connect - gen_lut_node.setInput(0, self.previous_node) - self._temp_nodes.append(gen_lut_node) - self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) - - # ---------- end nodes creation - - # Export lut file - nuke.execute( - gen_lut_node.name(), - int(self.first_frame), - int(self.first_frame)) - - self.log.info("Exported...") - - # ---------- generate representation data - self.get_representation_data() - - self.log.debug("Representation... `{}`".format(self.data)) - - # ---------- Clean up - self.clean_nodes() - - return self.data - - -class ExporterReviewMov(ExporterReview): - """ - Metaclass for generating review mov files - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - - def __init__(self, - klass, - instance, - name=None, - ext=None, - ): - # initialize parent class - ExporterReview.__init__(self, klass, instance) - - # passing presets for nodes to self - if hasattr(klass, "nodes"): - self.nodes = klass.nodes - else: - self.nodes = {} - - # deal with now lut defined in viewer lut - self.viewer_lut_raw = klass.viewer_lut_raw - self.bake_colorspace_fallback = klass.bake_colorspace_fallback - self.bake_colorspace_main = klass.bake_colorspace_main - self.write_colorspace = instance.data["colorspace"] - - self.name = name or "baked" - self.ext = ext or "mov" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def render(self, render_node_name): - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - render_node_name, - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - def save_file(self): - import shutil - with anlib.maintained_selection(): - self.log.info("Saving nodes as file... ") - # create nk path - path = os.path.splitext(self.path)[0] + ".nk" - # save file to the path - shutil.copyfile(self.instance.context.data["currentFile"], path) - - self.log.info("Nodes exported...") - return path - - def generate_mov(self, farm=False): - # ---------- start nodes creation - - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(self.path_in) - r_node["first"].setValue(self.first_frame) - r_node["origfirst"].setValue(self.first_frame) - r_node["last"].setValue(self.last_frame) - r_node["origlast"].setValue(self.last_frame) - r_node["colorspace"].setValue(self.write_colorspace) - - # connect - self._temp_nodes.append(r_node) - self.previous_node = r_node - self.log.debug("Read... `{}`".format(self._temp_nodes)) - - # View Process node - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - colorspaces = [ - self.bake_colorspace_main, self.bake_colorspace_fallback - ] - - if any(colorspaces): - # OCIOColorSpace with controled output - dag_node = nuke.createNode("OCIOColorSpace") - self._temp_nodes.append(dag_node) - for c in colorspaces: - test = dag_node["out_colorspace"].setValue(str(c)) - if test: - self.log.info( - "Baking in colorspace... `{}`".format(c)) - break - - if not test: - dag_node = nuke.createNode("OCIODisplay") - else: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) - - # Write node - write_node = nuke.createNode("Write") - self.log.debug("Path: {}".format(self.path)) - write_node["file"].setValue(self.path) - write_node["file_type"].setValue(self.ext) - - # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO change this to use conditions, if possible. - try: - write_node["meta_codec"].setValue("ap4h") - except Exception: - self.log.info("`meta_codec` knob was not found") - - try: - write_node["mov64_codec"].setValue("ap4h") - except Exception: - self.log.info("`mov64_codec` knob was not found") - write_node["mov64_write_timecode"].setValue(1) - write_node["raw"].setValue(1) - # connect - write_node.setInput(0, self.previous_node) - self._temp_nodes.append(write_node) - self.log.debug("Write... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation - - # ---------- render or save to nk - if farm: - nuke.scriptSave() - path_nk = self.save_file() - self.data.update({ - "bakeScriptPath": path_nk, - "bakeWriteNodeName": write_node.name(), - "bakeRenderPath": self.path - }) - else: - self.render(write_node.name()) - # ---------- generate representation data - self.get_representation_data( - tags=["review", "delete"], - range=True - ) - - self.log.debug("Representation... `{}`".format(self.data)) - - # ---------- Clean up - self.clean_nodes() - nuke.scriptSave() - return self.data - - -def get_dependent_nodes(nodes): - """Get all dependent nodes connected to the list of nodes. - - Looking for connections outside of the nodes in incoming argument. - - Arguments: - nodes (list): list of nuke.Node objects - - Returns: - connections_in: dictionary of nodes and its dependencies - connections_out: dictionary of nodes and its dependency - """ - - connections_in = dict() - connections_out = dict() - node_names = [n.name() for n in nodes] - for node in nodes: - inputs = node.dependencies() - outputs = node.dependent() - # collect all inputs outside - test_in = [(i, n) for i, n in enumerate(inputs) - if n.name() not in node_names] - if test_in: - connections_in.update({ - node: test_in - }) - # collect all outputs outside - test_out = [i for i in outputs if i.name() not in node_names] - if test_out: - # only one dependent node is allowed - connections_out.update({ - node: test_out[-1] - }) - - return connections_in, connections_out - - -def find_free_space_to_paste_nodes( - nodes, - group=nuke.root(), - direction="right", - offset=300): - """ - For getting coordinates in DAG (node graph) for placing new nodes - - Arguments: - nodes (list): list of nuke.Node objects - group (nuke.Node) [optional]: object in which context it is - direction (str) [optional]: where we want it to be placed - [left, right, top, bottom] - offset (int) [optional]: what offset it is from rest of nodes - - Returns: - xpos (int): x coordinace in DAG - ypos (int): y coordinace in DAG - """ - if len(nodes) == 0: - return 0, 0 - - group_xpos = list() - group_ypos = list() - - # get local coordinates of all nodes - nodes_xpos = [n.xpos() for n in nodes] + \ - [n.xpos() + n.screenWidth() for n in nodes] - - nodes_ypos = [n.ypos() for n in nodes] + \ - [n.ypos() + n.screenHeight() for n in nodes] - - # get complete screen size of all nodes to be placed in - nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) - nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) - - # get screen size (r,l,t,b) of all nodes in `group` - with group: - group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ - [n.xpos() + n.screenWidth() for n in nuke.allNodes() - if n not in nodes] - group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ - [n.ypos() + n.screenHeight() for n in nuke.allNodes() - if n not in nodes] - - # calc output left - if direction in "left": - xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output right - if direction in "right": - xpos = max(group_xpos) + abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output top - if direction in "top": - xpos = min(group_xpos) - ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) - return xpos, ypos - # calc output bottom - if direction in "bottom": - xpos = min(group_xpos) - ypos = max(group_ypos) + abs(offset) - return xpos, ypos - - -def launch_workfiles_app(): - '''Function letting start workfiles after start of host - ''' - # get state from settings - open_at_start = get_current_project_settings()["nuke"].get( - "general", {}).get("open_workfile_at_start") - - # return if none is defined - if not open_at_start: - return - - if not self.workfiles_launched: - self.workfiles_launched = True - workfiles.show(os.environ["AVALON_WORKDIR"]) - - -def open_last_workfile(): - # get state from settings - open_last_version = get_current_project_settings()["nuke"].get( - "general", {}).get("create_initial_workfile") - - log.info("Opening last workfile...") - last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") - - if not os.path.exists(last_workfile_path): - # return if none is defined - if not open_last_version: - return - - save_file(last_workfile_path) - else: - # to avoid looping of the callback, remove it! - nuke.removeOnCreate(open_last_workfile, nodeClass="Root") - - # open workfile - open_file(last_workfile_path) diff --git a/pype/modules/ftrack/python2_vendor/arrow b/pype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/pype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/pype/modules/ftrack/python2_vendor/ftrack-python-api b/pype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/pype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e diff --git a/pype/settings/defaults/project_settings/nuke.json b/pype/settings/defaults/project_settings/nuke.json deleted file mode 100644 index 99b37e59e4..0000000000 --- a/pype/settings/defaults/project_settings/nuke.json +++ /dev/null @@ -1,141 +0,0 @@ -{ - "general": { - "open_workfile_at_start": false, - "create_initial_workfile": true - }, - "menu": { - "Pype": { - "Create...": "ctrl+shift+alt+c", - "Publish...": "ctrl+alt+p", - "Load...": "ctrl+alt+l", - "Manage...": "ctrl+alt+m", - "Build Workfile": "ctrl+alt+b" - } - }, - "create": { - "CreateWriteRender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}" - }, - "CreateWritePrerender": { - "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}" - } - }, - "publish": { - "PreCollectNukeInstances": { - "sync_workfile_version": true - }, - "ValidateKnobs": { - "enabled": false, - "knobs": { - "render": { - "review": true - } - } - }, - "ValidateOutputResolution": { - "enabled": true, - "optional": true - }, - "ValidateGizmo": { - "enabled": true, - "optional": true - }, - "ValidateScript": { - "enabled": true, - "optional": true - }, - "ValidateNukeWriteBoundingBox": { - "enabled": true, - "optional": true - }, - "ExtractThumbnail": { - "enabled": true, - "nodes": { - "Reformat": [ - [ - "type", - "to format" - ], - [ - "format", - "HD_1080" - ], - [ - "filter", - "Lanczos6" - ], - [ - "black_outside", - true - ], - [ - "pbb", - false - ] - ] - } - }, - "ExtractReviewDataLut": { - "enabled": false - }, - "ExtractReviewDataMov": { - "enabled": true, - "viewer_lut_raw": false - }, - "ExtractSlateFrame": { - "viewer_lut_raw": false - }, - "NukeSubmitDeadline": { - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_chunk_size": 1 - } - }, - "load": { - "LoadImage": { - "enabled": true, - "representations": [] - }, - "LoadMov": { - "enabled": true, - "representations": [] - }, - "LoadSequence": { - "enabled": true, - "representations": [ - "png", - "jpg", - "exr", - "" - ] - } - }, - "workfile_build": { - "profiles": [ - { - "tasks": [ - "compositing" - ], - "current_context": [ - { - "subset_name_filters": [], - "families": [ - "render", - "plate" - ], - "repre_names": [ - "exr", - "dpx" - ], - "loaders": [ - "LoadSequence" - ] - } - ], - "linked_assets": [] - } - ] - }, - "filters": {} -} \ No newline at end of file diff --git a/pype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/pype/settings/entities/schemas/projects_schema/schema_project_nuke.json deleted file mode 100644 index 7a01ffec1f..0000000000 --- a/pype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ /dev/null @@ -1,122 +0,0 @@ -{ - "type": "dict", - "collapsible": true, - "key": "nuke", - "label": "Nuke", - "is_file": true, - "children": [ - { - "type": "dict", - "collapsible": true, - "key": "general", - "label": "General", - "children": [ - { - "type": "boolean", - "key": "open_workfile_at_start", - "label": "Open Workfile window at start of a Nuke session" - }, - { - "type": "boolean", - "key": "create_initial_workfile", - "label": "Create initial workfile version if none available" - } - ] - },{ - "type": "dict", - "collapsible": true, - "key": "menu", - "label": "Menu shortcuts", - "children": [ - { - "type": "dict", - "collapsible": false, - "key": "Pype", - "label": "Pype", - "is_group": true, - "children": [ - { - "type": "text", - "key": "Create...", - "label": "Create..." - }, - { - "type": "text", - "key": "Publish...", - "label": "Publish..." - }, - { - "type": "text", - "key": "Load...", - "label": "Load..." - }, - { - "type": "text", - "key": "Manage...", - "label": "Manage..." - }, - { - "type": "text", - "key": "Build Workfile", - "label": "Build Workfile" - } - ] - } - ] - }, - { - "type": "dict", - "collapsible": true, - "key": "create", - "label": "Creator plugins", - "children": [ - { - "type": "dict", - "collapsible": false, - "key": "CreateWriteRender", - "label": "CreateWriteRender", - "is_group": true, - "children": [ - { - "type": "text", - "key": "fpath_template", - "label": "Path template" - } - ] - }, - { - "type": "dict", - "collapsible": false, - "key": "CreateWritePrerender", - "label": "CreateWritePrerender", - "is_group": true, - "children": [ - { - "type": "text", - "key": "fpath_template", - "label": "Path template" - } - ] - } - ] - }, - { - "type": "schema", - "name": "schema_nuke_publish", - "template_data": [] - }, - { - "type": "schema", - "name": "schema_nuke_load", - "template_data": [] - }, - { - "type": "schema", - "name": "schema_workfile_build" - }, - { - "type": "schema", - "name": "schema_publish_gui_filter" - } - ] -}