diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index adcce9c2b1..ce2754c25d 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -88,9 +88,11 @@ class FtrackModule: def set_action_server(self): try: self.action_server.run_server() - except Exception: - msg = 'Ftrack Action server crashed! Please try to start again.' - log.error(msg) + except Exception as exc: + log.error( + "Ftrack Action server crashed! Please try to start again.", + exc_info=True + ) # TODO show message to user self.bool_action_server = False self.set_menu_visibility() diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 383da52d0c..634460edd7 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -889,7 +889,7 @@ class BuildWorkfile(WorkfileSettings): def process(self, regex_filter=None, version=None, - representations=["exr", "dpx"]): + representations=["exr", "dpx", "lutJson"]): """ A short description. @@ -935,7 +935,7 @@ class BuildWorkfile(WorkfileSettings): nodes=[wn]) # move position - self.position_up(2) + self.position_up(4) # set frame range for new viewer self.reset_frame_range_handles() @@ -947,7 +947,10 @@ class BuildWorkfile(WorkfileSettings): representations=representations) nodes_backdrop = list() + for name, subset in subsets.items(): + if "lut" in name: + continue log.info("Building Loader to: `{}`".format(name)) version = subset["version"] log.info("Version to: `{}`".format(version["name"])) @@ -959,26 +962,20 @@ class BuildWorkfile(WorkfileSettings): wn.setInput(0, rn) # get editional nodes - # # TODO: link it to nut Create and Load + lut_subset = [s for n, s in subsets.items() + if "lut{}".format(name.lower()) in n.lower()] + log.debug(">> lut_subset: `{}`".format(lut_subset)) - print("\n\n__________ nodes __________") - # # create all editional nodes - # for n in nodes: - # print(n) - # # create nodes - # klass, value = n[0] - # node = nuke.createNode(value) - # print(node.name()) - # - # for k, v in n: - # if "Class" not in k: - # node[k].setValue(v) - # self._nodes.append(node) + if len(lut_subset) > 0: + lsub = lut_subset[0] + fxn = self.effect_loader(lsub["representaions"][-1]) + fxn_ypos = fxn["ypos"].value() + fxn["ypos"].setValue(fxn_ypos - 100) + nodes_backdrop.append(fxn) + nodes_backdrop.append(rn) # move position self.position_right() - nodes_backdrop.append(rn) - bdn = self.create_backdrop(label="Loaded Reads", color='0x2d7702ff', layer=-1, @@ -993,9 +990,6 @@ class BuildWorkfile(WorkfileSettings): """ context = representation["context"] - read_name = "Read_{0}_{1}_{2}".format(context["asset"], - context["subset"], - context["representation"]) loader_name = "LoadSequence" if "mov" in context["representation"]: @@ -1011,6 +1005,28 @@ class BuildWorkfile(WorkfileSettings): return api.load(Loader=loader_plugin, representation=representation["_id"]) + def effect_loader(self, representation): + """ + Gets Loader plugin for effects + + Arguments: + representation (dict): avalon db entity + + """ + context = representation["context"] + + loader_name = "LoadLuts" + + loader_plugin = None + for Loader in api.discover(api.Loader): + if Loader.__name__ != loader_name: + continue + + loader_plugin = Loader + + return api.load(Loader=loader_plugin, + representation=representation["_id"]) + def write_create(self): """ Create render write diff --git a/pype/plugins/nuke/load/load_luts.py b/pype/plugins/nuke/load/load_luts.py new file mode 100644 index 0000000000..d1c6c71057 --- /dev/null +++ b/pype/plugins/nuke/load/load_luts.py @@ -0,0 +1,317 @@ +from avalon import api, style, io +import nuke +import json +from collections import OrderedDict + + +class LoadLuts(api.Loader): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["lutJson"] + families = ["lut"] + + label = "Load Luts - nodes" + order = 0 + icon = "cc" + color = style.colors.light + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + # import dependencies + from avalon.nuke import containerise + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + GN = nuke.createNode("Group") + + GN["name"].setValue(object_name) + + # adding content to the group node + with GN: + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if isinstance(v, list) and len(v) > 4: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to find parent read node + self.connect_read_node(GN, namespace, json_f["assignTo"]) + + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) + + return containerise( + node=GN, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + from avalon.nuke import ( + update_container + ) + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # Update the imprinted representation + update_container( + GN, + data_imprint + ) + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # adding content to the group node + with GN: + # first remove all nodes + [nuke.delete(n) for n in nuke.allNodes()] + + # create input node + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if isinstance(v, list) and len(v) > 3: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + # create output node + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to find parent read node + self.connect_read_node(GN, namespace, json_f["assignTo"]) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd84f20ff", 16)) + else: + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + def connect_read_node(self, group_node, asset, subset): + """ + Finds read node and selects it + + Arguments: + asset (str): asset name + + Returns: + nuke node: node is selected + None: if nothing found + """ + search_name = "{0}_{1}".format(asset, subset) + node = [n for n in nuke.allNodes() if search_name in n["name"].value()] + if len(node) > 0: + rn = node[0] + else: + None + + # Parent read node has been found + # solving connections + if rn: + dep_nodes = rn.dependent() + + if len(dep_nodes) > 0: + for dn in dep_nodes: + dn.setInput(0, group_node) + + group_node.setInput(0, rn) + group_node.autoplace() + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items()] + subTrackNums = [v["subTrackIndex"] for k, v in data.items()] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 471dfcf555..2946857e09 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -96,6 +96,8 @@ class LoadSequence(api.Loader): self.first_frame = int(nuke.root()["first_frame"].getValue()) self.handle_start = version_data.get("handleStart", 0) + self.handle_start = version_data.get("handleStart", 0) + self.handle_end = version_data.get("handleEnd", 0) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) @@ -104,6 +106,9 @@ class LoadSequence(api.Loader): if namespace is None: namespace = context['asset']['name'] + first -= self.handle_start + last += self.handle_end + file = self.fname.replace("\\", "/") log.info("file: {}\n".format(self.fname)) @@ -231,6 +236,7 @@ class LoadSequence(api.Loader): self.first_frame = int(nuke.root()["first_frame"].getValue()) self.handle_start = version_data.get("handleStart", 0) + self.handle_end = version_data.get("handleEnd", 0) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) @@ -241,6 +247,9 @@ class LoadSequence(api.Loader): "{} ({})".format(node['name'].value(), representation)) first = 0 + first -= self.handle_start + last += self.handle_end + # Update the loader's path whilst preserving some values with preserve_trim(node): node["file"].setValue(file["path"])