From 84b6efbbf6d95b2f5f137a1b075646d18d2e3100 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Nov 2018 18:03:04 +0100 Subject: [PATCH 01/78] nuke menu.py autostart workfiles --- setup/nuke/nuke_path/menu.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 043d4fd7b9..20300b6c17 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -8,6 +8,7 @@ for each in nuke.allNodes(): each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value())) ''' nuke.knobDefault('onScriptSave', cmd) + print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n' ffmpeg_cmd = '''if nuke.env['LINUX']: @@ -19,9 +20,9 @@ else: nuke.knobDefault('onScriptLoad', ffmpeg_cmd) -# # run avalon's tool Workfiles -# workfiles = '''from avalon.tools import workfiles -# if nuke.Root().name() == 'Root': -# nuke.scriptClose() -# workfiles.show(os.environ["AVALON_WORKDIR"])''' -# nuke.knobDefault('onCreate', workfiles) +# run avalon's tool Workfiles +workfiles = '''from avalon.tools import workfiles +if nuke.Root().name() == 'Root': + nuke.scriptClose() +workfiles.show(os.environ["AVALON_WORKDIR"])''' +nuke.knobDefault('onCreate', workfiles) From 29325c3fa2774442bd110c6031406205a4f9005d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Nov 2018 19:10:48 +0100 Subject: [PATCH 02/78] let nuke not to run workfiles at start --- setup/nuke/nuke_path/menu.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 20300b6c17..a454b8bd66 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -1,3 +1,5 @@ + +from avalon.tools import workfiles import nuke # auto fix version paths in write nodes following root name of script cmd = ''' @@ -20,9 +22,11 @@ else: nuke.knobDefault('onScriptLoad', ffmpeg_cmd) -# run avalon's tool Workfiles -workfiles = '''from avalon.tools import workfiles -if nuke.Root().name() == 'Root': - nuke.scriptClose() -workfiles.show(os.environ["AVALON_WORKDIR"])''' -nuke.knobDefault('onCreate', workfiles) +# # run avalon's tool Workfiles +# workfiles = '''from avalon.tools import workfiles +# if nuke.Root().name() == 'Root': +# nuke.scriptClear() +# workfiles.show(os.environ["AVALON_WORKDIR"])''' +# nuke.knobDefault('onCreate', workfiles) + +# workfiles.show(os.environ["AVALON_WORKDIR"]) From bd128b25de93222f9b4bb209126abedaa71a61de Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Nov 2018 22:54:47 +0100 Subject: [PATCH 03/78] plugins ideas --- pype/plugins/nuke/create/create_backdrop | 2 ++ pype/plugins/nuke/create/create_camera | 3 +++ pype/plugins/nuke/create/create_read_plate | 8 ++++++++ pype/plugins/nuke/create/create_write | 17 +++++++++++++++++ pype/plugins/nuke/load/load_alembic | 0 pype/plugins/nuke/load/load_backdrop | 0 pype/plugins/nuke/load/load_camera_abc | 0 pype/plugins/nuke/load/load_camera_nk | 1 + pype/plugins/nuke/load/load_still | 1 + 9 files changed, 32 insertions(+) create mode 100644 pype/plugins/nuke/create/create_backdrop create mode 100644 pype/plugins/nuke/create/create_camera create mode 100644 pype/plugins/nuke/create/create_read_plate create mode 100644 pype/plugins/nuke/create/create_write create mode 100644 pype/plugins/nuke/load/load_alembic create mode 100644 pype/plugins/nuke/load/load_backdrop create mode 100644 pype/plugins/nuke/load/load_camera_abc create mode 100644 pype/plugins/nuke/load/load_camera_nk create mode 100644 pype/plugins/nuke/load/load_still diff --git a/pype/plugins/nuke/create/create_backdrop b/pype/plugins/nuke/create/create_backdrop new file mode 100644 index 0000000000..2cdc222618 --- /dev/null +++ b/pype/plugins/nuke/create/create_backdrop @@ -0,0 +1,2 @@ +# creates backdrop which is published as separate nuke script +# it is versioned by major version diff --git a/pype/plugins/nuke/create/create_camera b/pype/plugins/nuke/create/create_camera new file mode 100644 index 0000000000..0d542b8ad7 --- /dev/null +++ b/pype/plugins/nuke/create/create_camera @@ -0,0 +1,3 @@ +# create vanilla camera if no camera is selected +# if camera is selected then it will convert it into containerized object +# it is major versioned in publish diff --git a/pype/plugins/nuke/create/create_read_plate b/pype/plugins/nuke/create/create_read_plate new file mode 100644 index 0000000000..90a47cb55e --- /dev/null +++ b/pype/plugins/nuke/create/create_read_plate @@ -0,0 +1,8 @@ +# create publishable read node usually used for enabling version tracking +# also useful for sharing across shots or assets + +# if read nodes are selected it will convert them to centainer +# if no read node selected it will create read node and offer browser to shot resource folder + +# type movie > mov or imagesequence +# type still > matpaint .psd, .tif, .png, diff --git a/pype/plugins/nuke/create/create_write b/pype/plugins/nuke/create/create_write new file mode 100644 index 0000000000..dcb132875a --- /dev/null +++ b/pype/plugins/nuke/create/create_write @@ -0,0 +1,17 @@ +# type: render +# if no render type node in script then first is having in name [master] for definition of main script renderer +# colorspace setting from templates +# dataflow setting from templates + +# type: mask_render +# created with shuffle gizmo for RGB separation into davinci matte +# colorspace setting from templates +# dataflow setting from templates + +# type: prerender +# backdrop with write and read +# colorspace setting from templates +# dataflow setting from templates + +# type: geo +# dataflow setting from templates diff --git a/pype/plugins/nuke/load/load_alembic b/pype/plugins/nuke/load/load_alembic new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_backdrop b/pype/plugins/nuke/load/load_backdrop new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_camera_abc b/pype/plugins/nuke/load/load_camera_abc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_camera_nk b/pype/plugins/nuke/load/load_camera_nk new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/pype/plugins/nuke/load/load_camera_nk @@ -0,0 +1 @@ + diff --git a/pype/plugins/nuke/load/load_still b/pype/plugins/nuke/load/load_still new file mode 100644 index 0000000000..c2aa061c5a --- /dev/null +++ b/pype/plugins/nuke/load/load_still @@ -0,0 +1 @@ +# usually used for mattepainting From 539a010ce8ebebdbe1e3eb323d2cab9b34fda673 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Nov 2018 22:55:20 +0100 Subject: [PATCH 04/78] adding plugins from maya and old repository for wip --- .../nuke/publish/collect_nuke_writes.py | 198 +++++++++++++ .../publish/extract_nuke_baked_colorspace.py | 107 +++++++ .../nuke/publish/extract_nuke_write.py | 116 ++++++++ pype/plugins/nuke/publish/submit_deadline.py | 264 ++++++++++++++++++ .../publish/validate_deadline_connection.py | 27 ++ 5 files changed, 712 insertions(+) create mode 100644 pype/plugins/nuke/publish/collect_nuke_writes.py create mode 100644 pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py create mode 100644 pype/plugins/nuke/publish/extract_nuke_write.py create mode 100644 pype/plugins/nuke/publish/submit_deadline.py create mode 100644 pype/plugins/nuke/publish/validate_deadline_connection.py diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py new file mode 100644 index 0000000000..7f301803fb --- /dev/null +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -0,0 +1,198 @@ +import os + +import nuke +import pyblish.api +import clique +import ft_utils +reload(ft_utils) + +global pre_name +pre_name = ft_utils.get_paths_from_template(['shot.vfx.prerender'], + False)[0].split('_')[0] + + +class CollectNukeWrites(pyblish.api.ContextPlugin): + """Collect all write nodes.""" + + order = pyblish.api.CollectorOrder + label = "Writes" + hosts = ["nuke", "nukeassist"] + + # targets = ["default", "process"] + + def process(self, context): + + instances = [] + # creating instances per write node + for node in nuke.allNodes(): + if node.Class() != "Write": + continue + + # Determine output type + output_type = "img" + if node["file_type"].value() == "mov": + output_type = "mov" + + # Create instance + instance = pyblish.api.Instance(node.name()) + instance.data["family"] = output_type + instance.add(node) + instance.data["label"] = node.name() + + instance.data["publish"] = False + + # Get frame range + start_frame = int(nuke.root()["first_frame"].getValue()) + end_frame = int(nuke.root()["last_frame"].getValue()) + if node["use_limit"].getValue(): + start_frame = int(node["first"].getValue()) + end_frame = int(node["last"].getValue()) + print "writeNode collected: {}".format(node.name()) + # Add collection + collection = None + try: + path = "" + if pre_name in node.name(): + path = ft_utils.convert_hashes_in_file_name( + node['prerender_path'].getText()) + else: + path = nuke.filename(node) + path += " [{0}-{1}]".format(start_frame, end_frame) + collection = clique.parse(path) + ################################################### + '''possible place to start create mov publish write collection''' + ################################################### + except ValueError: + # Ignore the exception when the path does not match the + # collection. + pass + + instance.data["collection"] = collection + + instances.append(instance) + + context.data["write_instances"] = instances + + context.data["instances"] = ( + context.data.get("instances", []) + instances) + + +class CollectNukeWritesProcess(pyblish.api.ContextPlugin): + """Collect all local processing write instances.""" + + order = CollectNukeWrites.order + 0.01 + label = "Writes Local" + hosts = ["nuke"] + + # targets = ["process.local"] + + def process(self, context): + + for item in context.data["write_instances"]: + instance = context.create_instance(item.data["name"]) + for key, value in item.data.iteritems(): + instance.data[key] = value + + if pre_name not in item.data["name"]: + instance.data["label"] += " - write - local" + instance.data["families"] = ["write", "local"] + else: + instance.data["label"] += " - prerender - local" + instance.data["families"] = ["prerender", "local"] + + for node in item: + instance.add(node) + + # Adding/Checking publish attribute + if "process_local" not in node.knobs(): + knob = nuke.Boolean_Knob("process_local", "Process Local") + knob.setValue(False) + node.addKnob(knob) + + value = bool(node["process_local"].getValue()) + + # Compare against selection + selection = instance.context.data.get("selection", []) + if selection: + if list(set(instance) & set(selection)): + value = True + else: + value = False + + instance.data["publish"] = value + + def instanceToggled(instance, value): + instance[0]["process_local"].setValue(value) + + instance.data["instanceToggled"] = instanceToggled + + +class CollectNukeWritesPublish(pyblish.api.ContextPlugin): + """Collect all write instances for publishing.""" + + order = CollectNukeWrites.order + 0.01 + label = "Writes" + hosts = ["nuke", "nukeassist"] + + # targets = ["default"] + + def process(self, context): + + for item in context.data["write_instances"]: + + # If the collection was not generated. + if not item.data["collection"]: + continue + + missing_files = [] + for f in item.data["collection"]: + # print f + if not os.path.exists(f): + missing_files.append(f) + + for f in missing_files: + item.data["collection"].remove(f) + + if not list(item.data["collection"]): + continue + + instance = context.create_instance(item.data["name"]) + + for key, value in item.data.iteritems(): + # print key, value + instance.data[key] = value + + instance.data["families"] = ["output"] + instance.data["label"] += ( + " - " + os.path.basename(instance.data["collection"].format())) + + for node in item: + instance.add(node) + + # Adding/Checking publish attribute + if "publish" not in node.knobs(): + knob = nuke.Boolean_Knob("publish", "Publish") + knob.setValue(False) + node.addKnob(knob) + + value = bool(node["publish"].getValue()) + + # Compare against selection + selection = instance.context.data.get("selection", []) + if selection: + if list(set(instance) & set(selection)): + value = True + else: + value = False + + instance.data["publish"] = value + + def instanceToggled(instance, value): + # Removing and adding the knob to support NukeAssist, where + # you can't modify the knob value directly. + instance[0].removeKnob(instance[0]["publish"]) + knob = nuke.Boolean_Knob("publish", "Publish") + knob.setValue(value) + instance[0].addKnob(knob) + + instance.data["instanceToggled"] = instanceToggled diff --git a/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py b/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py new file mode 100644 index 0000000000..f2561bd7c5 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py @@ -0,0 +1,107 @@ +import os +import tempfile +import shutil + +import nuke + +import pyblish.api + + +class ExtractNukeBakedColorspace(pyblish.api.InstancePlugin): + """Extracts movie with baked in luts + + V:\Remote Apps\ffmpeg\bin>ffmpeg -y -i + V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02._baked.mov + -pix_fmt yuv420p + -crf 18 + -timecode 00:00:00:01 + V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02..mov + + """ + + order = pyblish.api.ExtractorOrder + label = "Baked Colorspace" + optional = True + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + if "collection" not in instance.data.keys(): + return + + # Store selection + selection = [i for i in nuke.allNodes() if i["selected"].getValue()] + + # Deselect all nodes to prevent external connections + [i["selected"].setValue(False) for i in nuke.allNodes()] + + temporary_nodes = [] + + # Create nodes + first_frame = min(instance.data["collection"].indexes) + last_frame = max(instance.data["collection"].indexes) + + temp_dir = tempfile.mkdtemp() + for f in instance.data["collection"]: + shutil.copy(f, os.path.join(temp_dir, os.path.basename(f))) + + node = previous_node = nuke.createNode("Read") + node["file"].setValue( + os.path.join(temp_dir, + os.path.basename(instance.data["collection"].format( + "{head}{padding}{tail}"))).replace("\\", "/")) + + node["first"].setValue(first_frame) + node["origfirst"].setValue(first_frame) + node["last"].setValue(last_frame) + node["origlast"].setValue(last_frame) + temporary_nodes.append(node) + + reformat_node = nuke.createNode("Reformat") + reformat_node["format"].setValue("HD_1080") + reformat_node["resize"].setValue("fit") + reformat_node["filter"].setValue("Lanczos6") + reformat_node["black_outside"].setValue(True) + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + viewer_process_node = nuke.ViewerProcess.node() + dag_node = None + if viewer_process_node: + dag_node = nuke.createNode(viewer_process_node.Class()) + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + # Copy viewer process values + excludedKnobs = ["name", "xpos", "ypos"] + for item in viewer_process_node.knobs().keys(): + if item not in excludedKnobs and item in dag_node.knobs(): + x1 = viewer_process_node[item] + x2 = dag_node[item] + x2.fromScript(x1.toScript(False)) + else: + self.log.warning("No viewer node found.") + + write_node = nuke.createNode("Write") + path = instance.data["collection"].format("{head}_baked.mov") + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path.replace("\\", "/")) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + shutil.rmtree(temp_dir) + + # Restore selection + [i["selected"].setValue(False) for i in nuke.allNodes()] + [i["selected"].setValue(True) for i in selection] diff --git a/pype/plugins/nuke/publish/extract_nuke_write.py b/pype/plugins/nuke/publish/extract_nuke_write.py new file mode 100644 index 0000000000..155b5cf56d --- /dev/null +++ b/pype/plugins/nuke/publish/extract_nuke_write.py @@ -0,0 +1,116 @@ +import os + +import nuke +import pyblish.api + + +class Extract(pyblish.api.InstancePlugin): + """Super class for write and writegeo extractors.""" + + order = pyblish.api.ExtractorOrder + optional = True + label = "Extract Nuke [super]" + hosts = ["nuke"] + match = pyblish.api.Subset + + # targets = ["process.local"] + + def execute(self, instance): + # Get frame range + node = instance[0] + first_frame = nuke.root()["first_frame"].value() + last_frame = nuke.root()["last_frame"].value() + + if node["use_limit"].value(): + first_frame = node["first"].value() + last_frame = node["last"].value() + + # Render frames + nuke.execute(node.name(), int(first_frame), int(last_frame)) + + +class ExtractNukeWrite(Extract): + """ Extract output from write nodes. """ + + families = ["write", "local"] + label = "Extract Write" + + def process(self, instance): + + self.execute(instance) + + # Validate output + for filename in list(instance.data["collection"]): + if not os.path.exists(filename): + instance.data["collection"].remove(filename) + self.log.warning("\"{0}\" didn't render.".format(filename)) + + +class ExtractNukeCache(Extract): + + label = "Cache" + families = ["cache", "local"] + + def process(self, instance): + + self.execute(instance) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg + + +class ExtractNukeCamera(Extract): + + label = "Camera" + families = ["camera", "local"] + + def process(self, instance): + + node = instance[0] + node["writeGeometries"].setValue(False) + node["writePointClouds"].setValue(False) + node["writeAxes"].setValue(False) + + file_path = node["file"].getValue() + node["file"].setValue(instance.data["output_path"]) + + self.execute(instance) + + node["writeGeometries"].setValue(True) + node["writePointClouds"].setValue(True) + node["writeAxes"].setValue(True) + + node["file"].setValue(file_path) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg + + +class ExtractNukeGeometry(Extract): + + label = "Geometry" + families = ["geometry", "local"] + + def process(self, instance): + + node = instance[0] + node["writeCameras"].setValue(False) + node["writePointClouds"].setValue(False) + node["writeAxes"].setValue(False) + + file_path = node["file"].getValue() + node["file"].setValue(instance.data["output_path"]) + + self.execute(instance) + + node["writeCameras"].setValue(True) + node["writePointClouds"].setValue(True) + node["writeAxes"].setValue(True) + + node["file"].setValue(file_path) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg diff --git a/pype/plugins/nuke/publish/submit_deadline.py b/pype/plugins/nuke/publish/submit_deadline.py new file mode 100644 index 0000000000..4dabf4837e --- /dev/null +++ b/pype/plugins/nuke/publish/submit_deadline.py @@ -0,0 +1,264 @@ +import os +import json +import getpass + +from maya import cmds + +from avalon import api +from avalon.vendor import requests + +import pyblish.api + +import pype.maya.lib as lib + + +def get_renderer_variables(renderlayer=None): + """Retrieve the extension which has been set in the VRay settings + + Will return None if the current renderer is not VRay + For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which + start with `rs`. Use the actual node name, do NOT use the `nice name` + + Args: + renderlayer (str): the node name of the renderlayer. + + Returns: + dict + """ + + renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) + render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) + + padding = cmds.getAttr("{}.{}".format(render_attrs["node"], + render_attrs["padding"])) + + filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0] + + if renderer == "vray": + # Maya's renderSettings function does not return V-Ray file extension + # so we get the extension from vraySettings + extension = cmds.getAttr("vraySettings.imageFormatStr") + + # When V-Ray image format has not been switched once from default .png + # the getAttr command above returns None. As such we explicitly set + # it to `.png` + if extension is None: + extension = "png" + + filename_prefix = "/_/" + else: + # Get the extension, getAttr defaultRenderGlobals.imageFormat + # returns an index number. + filename_base = os.path.basename(filename_0) + extension = os.path.splitext(filename_base)[-1].strip(".") + filename_prefix = "/_/" + + return {"ext": extension, + "filename_prefix": filename_prefix, + "padding": padding, + "filename_0": filename_0} + + +def preview_fname(folder, scene, layer, padding, ext): + """Return output file path with #### for padding. + + Deadline requires the path to be formatted with # in place of numbers. + For example `/path/to/render.####.png` + + Args: + folder (str): The root output folder (image path) + scene (str): The scene name + layer (str): The layer name to be rendered + padding (int): The padding length + ext(str): The output file extension + + Returns: + str + + """ + + # Following hardcoded "/_/" + output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format( + scene=scene, + layer=layer, + number="#" * padding, + ext=ext + ) + + return os.path.join(folder, output) + + +class MayaSubmitDeadline(pyblish.api.InstancePlugin): + """Submit available render layers to Deadline + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE + + """ + + label = "Submit to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["maya"] + families = ["renderlayer"] + + def process(self, instance): + + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + context = instance.context + workspace = context.data["workspaceDir"] + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) + comment = context.data.get("comment", "") + scene = os.path.splitext(filename)[0] + dirname = os.path.join(workspace, "renders") + renderlayer = instance.data['setMembers'] # rs_beauty + renderlayer_name = instance.data['subset'] # beauty + renderlayer_globals = instance.data["renderGlobals"] + legacy_layers = renderlayer_globals["UseLegacyRenderLayers"] + deadline_user = context.data.get("deadlineUser", getpass.getuser()) + jobname = "%s - %s" % (filename, instance.name) + + # Get the variables depending on the renderer + render_variables = get_renderer_variables(renderlayer) + output_filename_0 = preview_fname(folder=dirname, + scene=scene, + layer=renderlayer_name, + padding=render_variables["padding"], + ext=render_variables["ext"]) + + try: + # Ensure render folder exists + os.makedirs(dirname) + except OSError: + pass + + # Documentation for keys available at: + # https://docs.thinkboxsoftware.com + # /products/deadline/8.0/1_User%20Manual/manual + # /manual-submission.html#job-info-file-options + payload = { + "JobInfo": { + # Top-level group name + "BatchName": filename, + + # Job name, as seen in Monitor + "Name": jobname, + + # Arbitrary username, for visualisation in Monitor + "UserName": deadline_user, + + "Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"), + "Frames": "{start}-{end}x{step}".format( + start=int(instance.data["startFrame"]), + end=int(instance.data["endFrame"]), + step=int(instance.data["byFrameStep"]), + ), + + "Comment": comment, + + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor + "OutputFilename0": output_filename_0.replace("\\", "/"), + }, + "PluginInfo": { + # Input + "SceneFile": filepath, + + # Output directory and filename + "OutputFilePath": dirname.replace("\\", "/"), + "OutputFilePrefix": render_variables["filename_prefix"], + + # Mandatory for Deadline + "Version": cmds.about(version=True), + + # Only render layers are considered renderable in this pipeline + "UsingRenderLayers": True, + + # Use legacy Render Layer system + "UseLegacyRenderLayers": legacy_layers, + + # Render only this layer + "RenderLayer": renderlayer, + + # Determine which renderer to use from the file itself + "Renderer": instance.data["renderer"], + + # Resolve relative references + "ProjectPath": workspace, + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Include critical environment variables with submission + keys = [ + # This will trigger `userSetup.py` on the slave + # such that proper initialisation happens the same + # way as it does on a local machine. + # TODO(marcus): This won't work if the slaves don't + # have accesss to these paths, such as if slaves are + # running Linux and the submitter is on Windows. + "PYTHONPATH", + + # todo: This is a temporary fix for yeti variables + "PEREGRINEL_LICENSE", + "REDSHIFT_MAYAEXTENSIONSPATH", + "REDSHIFT_DISABLEOUTPUTLOCKFILES" + "VRAY_FOR_MAYA2018_PLUGINS_X64", + "VRAY_PLUGINS_X64", + "VRAY_USE_THREAD_AFFINITY", + "MAYA_MODULE_PATH" + ] + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **api.Session) + + PATHS = os.environ["PATH"].split(";") + environment["PATH"] = ";".join([p for p in PATHS + if p.startswith("P:")]) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Include optional render globals + render_globals = instance.data.get("renderGlobals", {}) + payload["JobInfo"].update(render_globals) + + plugin = payload["JobInfo"]["Plugin"] + self.log.info("using render plugin : {}".format(plugin)) + + self.preflight_check(instance) + + self.log.info("Submitting..") + self.log.info(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(AVALON_DEADLINE) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Store output dir for unified publisher (filesequence) + instance.data["outputDir"] = os.path.dirname(output_filename_0) + instance.data["deadlineSubmissionJob"] = response.json() + + def preflight_check(self, instance): + """Ensure the startFrame, endFrame and byFrameStep are integers""" + + for key in ("startFrame", "endFrame", "byFrameStep"): + value = instance.data[key] + + if int(value) == value: + continue + + self.log.warning( + "%f=%d was rounded off to nearest integer" + % (value, int(value)) + ) diff --git a/pype/plugins/nuke/publish/validate_deadline_connection.py b/pype/plugins/nuke/publish/validate_deadline_connection.py new file mode 100644 index 0000000000..53399bfb33 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_deadline_connection.py @@ -0,0 +1,27 @@ +import pyblish.api + +import avalon.api as api +from avalon.vendor import requests + + +class ValidateDeadlineConnection(pyblish.api.ContextPlugin): + """Validate Deadline Web Service is running""" + + label = "Validate Deadline Web Service" + order = pyblish.api.ValidatorOrder + hosts = ["maya"] + families = ["renderlayer"] + + def process(self, instance): + + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + + assert AVALON_DEADLINE is not None, "Requires AVALON_DEADLINE" + + # Check response + response = requests.get(AVALON_DEADLINE) + assert response.ok, "Response must be ok" + assert response.text.startswith("Deadline Web Service "), ( + "Web service did not respond with 'Deadline Web Service'" + ) \ No newline at end of file From 63c887056e653a1c30c53823fc96d0f43389dfff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 16 Nov 2018 15:43:12 +0100 Subject: [PATCH 05/78] working on plugins --- .../nuke/publish/collect_nuke_writes.py | 61 ++--- .../nuke/publish/collect_render_target.py | 46 ++++ .../publish/extract_nuke_baked_colorspace.py | 107 -------- .../nuke/publish/extract_script_save.py | 15 +- .../nuke/publish/publish_image_sequences.py | 98 +++++++ pype/plugins/nuke/publish/render_local.py | 47 ++++ pype/plugins/nuke/publish/submit_deadline.py | 247 +++++------------- .../publish/validate_deadline_connection.py | 27 -- .../publish/validate_prerenders_output.py | 20 -- 9 files changed, 290 insertions(+), 378 deletions(-) create mode 100644 pype/plugins/nuke/publish/collect_render_target.py delete mode 100644 pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py create mode 100644 pype/plugins/nuke/publish/publish_image_sequences.py create mode 100644 pype/plugins/nuke/publish/render_local.py delete mode 100644 pype/plugins/nuke/publish/validate_deadline_connection.py delete mode 100644 pype/plugins/nuke/publish/validate_prerenders_output.py diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 7f301803fb..2df9adf6fc 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -3,14 +3,9 @@ import os import nuke import pyblish.api import clique -import ft_utils -reload(ft_utils) - -global pre_name -pre_name = ft_utils.get_paths_from_template(['shot.vfx.prerender'], - False)[0].split('_')[0] +@pyblish.api.log class CollectNukeWrites(pyblish.api.ContextPlugin): """Collect all write nodes.""" @@ -35,7 +30,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # Create instance instance = pyblish.api.Instance(node.name()) - instance.data["family"] = output_type instance.add(node) instance.data["label"] = node.name() @@ -47,27 +41,28 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): if node["use_limit"].getValue(): start_frame = int(node["first"].getValue()) end_frame = int(node["last"].getValue()) - print "writeNode collected: {}".format(node.name()) + + self.log.info("writeNode collected: {}".format(node.name())) + # Add collection collection = None - try: - path = "" - if pre_name in node.name(): - path = ft_utils.convert_hashes_in_file_name( - node['prerender_path'].getText()) - else: - path = nuke.filename(node) - path += " [{0}-{1}]".format(start_frame, end_frame) - collection = clique.parse(path) - ################################################### - '''possible place to start create mov publish write collection''' - ################################################### - except ValueError: - # Ignore the exception when the path does not match the - # collection. - pass + path = nuke.filename(node) + path += " [{0}-{1}]".format(start_frame, end_frame) + collection = clique.parse(path) - instance.data["collection"] = collection + instance.data.update({ + "asset": os.environ["AVALON_ASSET"], # todo: not a constant + "subset": subset, + "path": nuke.filename(node), + "outputDir": os.path.dirname(nuke.filename(node)), + "ext": output_type, # todo: should be redundant + "label": node.name(), + "family": "write", + "publish": False, + "collection": collection, + "start_frame": start_frame, + "end_frame": end_frame + }) instances.append(instance) @@ -93,23 +88,19 @@ class CollectNukeWritesProcess(pyblish.api.ContextPlugin): for key, value in item.data.iteritems(): instance.data[key] = value - if pre_name not in item.data["name"]: - instance.data["label"] += " - write - local" - instance.data["families"] = ["write", "local"] - else: - instance.data["label"] += " - prerender - local" - instance.data["families"] = ["prerender", "local"] + instance.data["label"] += " - render - local" + instance.data["families"] = ["render", "local"] for node in item: instance.add(node) # Adding/Checking publish attribute - if "process_local" not in node.knobs(): - knob = nuke.Boolean_Knob("process_local", "Process Local") + if "render_local" not in node.knobs(): + knob = nuke.Boolean_Knob("render_local", "Local rendering") knob.setValue(False) node.addKnob(knob) - value = bool(node["process_local"].getValue()) + value = bool(node["render_local"].getValue()) # Compare against selection selection = instance.context.data.get("selection", []) @@ -122,7 +113,7 @@ class CollectNukeWritesProcess(pyblish.api.ContextPlugin): instance.data["publish"] = value def instanceToggled(instance, value): - instance[0]["process_local"].setValue(value) + instance[0]["render_local"].setValue(value) instance.data["instanceToggled"] = instanceToggled diff --git a/pype/plugins/nuke/publish/collect_render_target.py b/pype/plugins/nuke/publish/collect_render_target.py new file mode 100644 index 0000000000..257f444720 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_render_target.py @@ -0,0 +1,46 @@ +import pyblish.api + + +class CollectNukeRenderMode(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Collect current comp's render Mode + + Options: + renderlocal + deadline + + Note that this value is set for each comp separately. When you save the + comp this information will be stored in that file. If for some reason the + available tool does not visualize which render mode is set for the + current comp, please run the following line in the console (Py2) + + comp.GetData("rendermode") + + This will return the name of the current render mode as seen above under + Options. + + """ + + order = pyblish.api.CollectorOrder + 0.4 + label = "Collect Render Mode" + hosts = ["nuke"] + families = ["write"] + + def process(self, instance): + """Collect all image sequence tools""" + options = ["local", "deadline"] + + node = instance[0] + + if bool(node["render_local"].getValue()): + rendermode = "local" + else: + rendermode = "deadline" + + assert rendermode in options, "Must be supported render mode" + + self.log.info("Render mode: {0}".format(rendermode)) + + # Append family + family = "write.{0}".format(rendermode) + instance.data["families"].append(family) diff --git a/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py b/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py deleted file mode 100644 index f2561bd7c5..0000000000 --- a/pype/plugins/nuke/publish/extract_nuke_baked_colorspace.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import tempfile -import shutil - -import nuke - -import pyblish.api - - -class ExtractNukeBakedColorspace(pyblish.api.InstancePlugin): - """Extracts movie with baked in luts - - V:\Remote Apps\ffmpeg\bin>ffmpeg -y -i - V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02._baked.mov - -pix_fmt yuv420p - -crf 18 - -timecode 00:00:00:01 - V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02..mov - - """ - - order = pyblish.api.ExtractorOrder - label = "Baked Colorspace" - optional = True - families = ["review"] - hosts = ["nuke"] - - def process(self, instance): - - if "collection" not in instance.data.keys(): - return - - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - - temporary_nodes = [] - - # Create nodes - first_frame = min(instance.data["collection"].indexes) - last_frame = max(instance.data["collection"].indexes) - - temp_dir = tempfile.mkdtemp() - for f in instance.data["collection"]: - shutil.copy(f, os.path.join(temp_dir, os.path.basename(f))) - - node = previous_node = nuke.createNode("Read") - node["file"].setValue( - os.path.join(temp_dir, - os.path.basename(instance.data["collection"].format( - "{head}{padding}{tail}"))).replace("\\", "/")) - - node["first"].setValue(first_frame) - node["origfirst"].setValue(first_frame) - node["last"].setValue(last_frame) - node["origlast"].setValue(last_frame) - temporary_nodes.append(node) - - reformat_node = nuke.createNode("Reformat") - reformat_node["format"].setValue("HD_1080") - reformat_node["resize"].setValue("fit") - reformat_node["filter"].setValue("Lanczos6") - reformat_node["black_outside"].setValue(True) - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - viewer_process_node = nuke.ViewerProcess.node() - dag_node = None - if viewer_process_node: - dag_node = nuke.createNode(viewer_process_node.Class()) - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - # Copy viewer process values - excludedKnobs = ["name", "xpos", "ypos"] - for item in viewer_process_node.knobs().keys(): - if item not in excludedKnobs and item in dag_node.knobs(): - x1 = viewer_process_node[item] - x2 = dag_node[item] - x2.fromScript(x1.toScript(False)) - else: - self.log.warning("No viewer node found.") - - write_node = nuke.createNode("Write") - path = instance.data["collection"].format("{head}_baked.mov") - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path.replace("\\", "/")) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - shutil.rmtree(temp_dir) - - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] diff --git a/pype/plugins/nuke/publish/extract_script_save.py b/pype/plugins/nuke/publish/extract_script_save.py index b0eeb47886..76054f72c1 100644 --- a/pype/plugins/nuke/publish/extract_script_save.py +++ b/pype/plugins/nuke/publish/extract_script_save.py @@ -2,14 +2,15 @@ import nuke import pyblish.api -class ExtractScriptSave(pyblish.api.InstancePlugin): - """ Saves the script before extraction. """ - - order = pyblish.api.ExtractorOrder - 0.49 - label = "Script Save" - hosts = ["nuke"] - families = ["saver"] +class ExtractScriptSave(pyblish.api.Extractor): + """ + """ + label = 'Script Save' + order = pyblish.api.Extractor.order - 0.45 + hosts = ['nuke'] + families = ['script'] def process(self, instance): + self.log.info('saving script') nuke.scriptSave() diff --git a/pype/plugins/nuke/publish/publish_image_sequences.py b/pype/plugins/nuke/publish/publish_image_sequences.py new file mode 100644 index 0000000000..e8b468e94a --- /dev/null +++ b/pype/plugins/nuke/publish/publish_image_sequences.py @@ -0,0 +1,98 @@ +import re +import os +import json +import subprocess + +import pyblish.api + +from pype.action import get_errored_plugins_from_data + + +def _get_script(): + """Get path to the image sequence script""" + + # todo: use a more elegant way to get the python script + + try: + from pype.fusion.scripts import publish_filesequence + except Exception: + raise RuntimeError("Expected module 'publish_imagesequence'" + "to be available") + + module_path = publish_filesequence.__file__ + if module_path.endswith(".pyc"): + module_path = module_path[:-len(".pyc")] + ".py" + + return module_path + + +class PublishImageSequence(pyblish.api.InstancePlugin): + """Publish the generated local image sequences.""" + + order = pyblish.api.IntegratorOrder + label = "Publish Rendered Image Sequence(s)" + hosts = ["fusion"] + families = ["saver.renderlocal"] + + def process(self, instance): + + # Skip this plug-in if the ExtractImageSequence failed + errored_plugins = get_errored_plugins_from_data(instance.context) + if any(plugin.__name__ == "FusionRenderLocal" for plugin in + errored_plugins): + raise RuntimeError("Fusion local render failed, " + "publishing images skipped.") + + subset = instance.data["subset"] + ext = instance.data["ext"] + + # Regex to match resulting renders + regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset), + ext=re.escape(ext)) + + # The instance has most of the information already stored + metadata = { + "regex": regex, + "startFrame": instance.context.data["startFrame"], + "endFrame": instance.context.data["endFrame"], + "families": ["imagesequence"], + } + + # Write metadata and store the path in the instance + output_directory = instance.data["outputDir"] + path = os.path.join(output_directory, + "{}_metadata.json".format(subset)) + with open(path, "w") as f: + json.dump(metadata, f) + + assert os.path.isfile(path), ("Stored path is not a file for %s" + % instance.data["name"]) + + # Suppress any subprocess console + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + startupinfo.wShowWindow = subprocess.SW_HIDE + + process = subprocess.Popen(["python", _get_script(), + "--paths", path], + bufsize=1, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=startupinfo) + + while True: + output = process.stdout.readline() + # Break when there is no output or a return code has been given + if output == '' and process.poll() is not None: + process.stdout.close() + break + if output: + line = output.strip() + if line.startswith("ERROR"): + self.log.error(line) + else: + self.log.info(line) + + if process.returncode != 0: + raise RuntimeError("Process quit with non-zero " + "return code: {}".format(process.returncode)) diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py new file mode 100644 index 0000000000..63963c9b8c --- /dev/null +++ b/pype/plugins/nuke/publish/render_local.py @@ -0,0 +1,47 @@ +import pyblish.api + +import avalon.fusion as fusion + + +class NukeRenderLocal(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Render the current Fusion composition locally. + + Extract the result of savers by starting a comp render + This will run the local render of Fusion. + + """ + + order = pyblish.api.ExtractorOrder + label = "Render Local" + hosts = ["nuke"] + families = ["write.local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + current_comp = context.data["currentFile"] + start_frame = instance.data["startFrame"] + end_frame = instance.data["end_frame"] + + self.log.info("Starting render") + self.log.info("Start frame: {}".format(start_frame)) + self.log.info("End frame: {}".format(end_frame)) + + # Render frames + result = nuke.execute( + node.name(), + int(first_frame), + int(last_frame) + ) + + if not result: + raise RuntimeError("Comp render failed") diff --git a/pype/plugins/nuke/publish/submit_deadline.py b/pype/plugins/nuke/publish/submit_deadline.py index 4dabf4837e..a183a54ab7 100644 --- a/pype/plugins/nuke/publish/submit_deadline.py +++ b/pype/plugins/nuke/publish/submit_deadline.py @@ -2,94 +2,15 @@ import os import json import getpass -from maya import cmds - from avalon import api from avalon.vendor import requests import pyblish.api -import pype.maya.lib as lib - -def get_renderer_variables(renderlayer=None): - """Retrieve the extension which has been set in the VRay settings - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - - Returns: - dict - """ - - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0] - - if renderer == "vray": - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - filename_prefix = "/_/" - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - filename_prefix = "/_/" - - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - -def preview_fname(folder, scene, layer, padding, ext): - """Return output file path with #### for padding. - - Deadline requires the path to be formatted with # in place of numbers. - For example `/path/to/render.####.png` - - Args: - folder (str): The root output folder (image path) - scene (str): The scene name - layer (str): The layer name to be rendered - padding (int): The padding length - ext(str): The output file extension - - Returns: - str - - """ - - # Following hardcoded "/_/" - output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format( - scene=scene, - layer=layer, - number="#" * padding, - ext=ext - ) - - return os.path.join(folder, output) - - -class MayaSubmitDeadline(pyblish.api.InstancePlugin): - """Submit available render layers to Deadline +class NukeSubmitDeadline(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Submit current Comp to Deadline Renders are submitted to a Deadline Web Service as supplied via the environment variable AVALON_DEADLINE @@ -98,42 +19,44 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): label = "Submit to Deadline" order = pyblish.api.IntegratorOrder - hosts = ["maya"] - families = ["renderlayer"] + hosts = ["nuke"] + families = ["write.deadline"] def process(self, instance): + context = instance.context + + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", "http://localhost:8082") assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" - context = instance.context - workspace = context.data["workspaceDir"] + # Collect all saver instances in context that are to be rendered + write_instances = [] + for instance in context[:]: + if not self.families[0] in instance.data.get("families"): + # Allow only saver family instances + continue + + if not instance.data.get("publish", True): + # Skip inactive instances + continue + self.log.debug(instance.data["name"]) + write_instances.append(instance) + + if not write_instances: + raise RuntimeError("No instances found for Deadline submittion") + + hostVersion = int(context.data["hostVersion"]) filepath = context.data["currentFile"] filename = os.path.basename(filepath) comment = context.data.get("comment", "") - scene = os.path.splitext(filename)[0] - dirname = os.path.join(workspace, "renders") - renderlayer = instance.data['setMembers'] # rs_beauty - renderlayer_name = instance.data['subset'] # beauty - renderlayer_globals = instance.data["renderGlobals"] - legacy_layers = renderlayer_globals["UseLegacyRenderLayers"] deadline_user = context.data.get("deadlineUser", getpass.getuser()) - jobname = "%s - %s" % (filename, instance.name) - - # Get the variables depending on the renderer - render_variables = get_renderer_variables(renderlayer) - output_filename_0 = preview_fname(folder=dirname, - scene=scene, - layer=renderlayer_name, - padding=render_variables["padding"], - ext=render_variables["ext"]) - - try: - # Ensure render folder exists - os.makedirs(dirname) - except OSError: - pass # Documentation for keys available at: # https://docs.thinkboxsoftware.com @@ -145,81 +68,64 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "BatchName": filename, # Job name, as seen in Monitor - "Name": jobname, + "Name": filename, - # Arbitrary username, for visualisation in Monitor + # User, as seen in Monitor "UserName": deadline_user, - "Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"), - "Frames": "{start}-{end}x{step}".format( + # Use a default submission pool for Nuke + "Pool": "nuke", + + "Plugin": "Nuke", + "Frames": "{start}-{end}".format( start=int(instance.data["startFrame"]), - end=int(instance.data["endFrame"]), - step=int(instance.data["byFrameStep"]), + end=int(instance.data["endFrame"]) ), "Comment": comment, - - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - "OutputFilename0": output_filename_0.replace("\\", "/"), }, "PluginInfo": { # Input - "SceneFile": filepath, - - # Output directory and filename - "OutputFilePath": dirname.replace("\\", "/"), - "OutputFilePrefix": render_variables["filename_prefix"], + "FlowFile": filepath, # Mandatory for Deadline - "Version": cmds.about(version=True), + "Version": str(hostVersion), - # Only render layers are considered renderable in this pipeline - "UsingRenderLayers": True, + # Render in high quality + "HighQuality": True, - # Use legacy Render Layer system - "UseLegacyRenderLayers": legacy_layers, + # Whether saver output should be checked after rendering + # is complete + "CheckOutput": True, - # Render only this layer - "RenderLayer": renderlayer, - - # Determine which renderer to use from the file itself - "Renderer": instance.data["renderer"], - - # Resolve relative references - "ProjectPath": workspace, + # Proxy: higher numbers smaller images for faster test renders + # 1 = no proxy quality + "Proxy": 1, }, # Mandatory for Deadline, may be empty "AuxFiles": [] } - # Include critical environment variables with submission - keys = [ - # This will trigger `userSetup.py` on the slave - # such that proper initialisation happens the same - # way as it does on a local machine. - # TODO(marcus): This won't work if the slaves don't - # have accesss to these paths, such as if slaves are - # running Linux and the submitter is on Windows. - "PYTHONPATH", + # Enable going to rendered frames from Deadline Monitor + for index, instance in enumerate(write_instances): + path = instance.data["path"] + folder, filename = os.path.split(path) + payload["JobInfo"]["OutputDirectory%d" % index] = folder + payload["JobInfo"]["OutputFilename%d" % index] = filename - # todo: This is a temporary fix for yeti variables - "PEREGRINEL_LICENSE", - "REDSHIFT_MAYAEXTENSIONSPATH", - "REDSHIFT_DISABLEOUTPUTLOCKFILES" - "VRAY_FOR_MAYA2018_PLUGINS_X64", - "VRAY_PLUGINS_X64", - "VRAY_USE_THREAD_AFFINITY", - "MAYA_MODULE_PATH" + # Include critical variables with submission + keys = [ + # TODO: This won't work if the slaves don't have accesss to + # these paths, such as if slaves are running Linux and the + # submitter is on Windows. + "PYTHONPATH", + "NUKE_PATH" + # "OFX_PLUGIN_PATH", ] environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **api.Session) - PATHS = os.environ["PATH"].split(";") - environment["PATH"] = ";".join([p for p in PATHS - if p.startswith("P:")]) - payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, @@ -227,15 +133,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): ) for index, key in enumerate(environment) }) - # Include optional render globals - render_globals = instance.data.get("renderGlobals", {}) - payload["JobInfo"].update(render_globals) - - plugin = payload["JobInfo"]["Plugin"] - self.log.info("using render plugin : {}".format(plugin)) - - self.preflight_check(instance) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) @@ -245,20 +142,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if not response.ok: raise Exception(response.text) - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) - instance.data["deadlineSubmissionJob"] = response.json() - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers""" - - for key in ("startFrame", "endFrame", "byFrameStep"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) + # Store the response for dependent job submission plug-ins + for instance in write_instances: + instance.data["deadlineSubmissionJob"] = response.json() diff --git a/pype/plugins/nuke/publish/validate_deadline_connection.py b/pype/plugins/nuke/publish/validate_deadline_connection.py deleted file mode 100644 index 53399bfb33..0000000000 --- a/pype/plugins/nuke/publish/validate_deadline_connection.py +++ /dev/null @@ -1,27 +0,0 @@ -import pyblish.api - -import avalon.api as api -from avalon.vendor import requests - - -class ValidateDeadlineConnection(pyblish.api.ContextPlugin): - """Validate Deadline Web Service is running""" - - label = "Validate Deadline Web Service" - order = pyblish.api.ValidatorOrder - hosts = ["maya"] - families = ["renderlayer"] - - def process(self, instance): - - AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", - "http://localhost:8082") - - assert AVALON_DEADLINE is not None, "Requires AVALON_DEADLINE" - - # Check response - response = requests.get(AVALON_DEADLINE) - assert response.ok, "Response must be ok" - assert response.text.startswith("Deadline Web Service "), ( - "Web service did not respond with 'Deadline Web Service'" - ) \ No newline at end of file diff --git a/pype/plugins/nuke/publish/validate_prerenders_output.py b/pype/plugins/nuke/publish/validate_prerenders_output.py deleted file mode 100644 index 412c55ac0a..0000000000 --- a/pype/plugins/nuke/publish/validate_prerenders_output.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import pyblish.api - - -@pyblish.api.log -class ValidatePrerendersOutput(pyblish.api.Validator): - """Validates that the output directory for the write nodes exists""" - - families = ['write.prerender'] - hosts = ['nuke'] - label = 'Pre-renders output' - - def process(self, instance): - path = os.path.dirname(instance[0]['file'].value()) - - if 'output' not in path: - name = instance[0].name() - msg = 'Output directory for %s is not in an "output" folder.' % name - - raise ValueError(msg) From 8b0113b7be0727185582da687cd0da9337f5af44 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 18 Nov 2018 21:37:50 +0100 Subject: [PATCH 06/78] removing company names --- pype/plugins/global/publish/collect_comment.py | 2 +- pype/plugins/global/publish/collect_time.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_comment.py b/pype/plugins/global/publish/collect_comment.py index a246b7eaba..5bbd1da2a1 100644 --- a/pype/plugins/global/publish/collect_comment.py +++ b/pype/plugins/global/publish/collect_comment.py @@ -1,7 +1,7 @@ import pyblish.api -class CollectColorbleedComment(pyblish.api.ContextPlugin): +class CollectComment(pyblish.api.ContextPlugin): """This plug-ins displays the comment dialog box per default""" label = "Collect Comment" diff --git a/pype/plugins/global/publish/collect_time.py b/pype/plugins/global/publish/collect_time.py index d4fa658425..e0adc7dfc3 100644 --- a/pype/plugins/global/publish/collect_time.py +++ b/pype/plugins/global/publish/collect_time.py @@ -2,7 +2,7 @@ import pyblish.api from avalon import api -class CollectMindbenderTime(pyblish.api.ContextPlugin): +class CollectTime(pyblish.api.ContextPlugin): """Store global time at the time of publish""" label = "Collect Current Time" From 70f42d4db7e535d28ea4e186af88c3497c3f3314 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 18 Nov 2018 21:38:32 +0100 Subject: [PATCH 07/78] fix 'avalon_tools_env' to 'tools_env' --- pype/ftrack/actions/ftrack_action_handler.py | 22 ++++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index 31afc79e7e..87440d0e13 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -62,7 +62,7 @@ class AppAction(object): self.session.event_hub.subscribe( 'topic=ftrack.action.discover and source.user.username={0}'.format( self.session.api_user - ), self._discover + ), self._discover ) self.session.event_hub.subscribe( @@ -196,7 +196,7 @@ class AppAction(object): response = self.launch( self.session, *args - ) + ) return self._handle_result( self.session, response, *args @@ -242,7 +242,8 @@ class AppAction(object): anatomy = t.anatomy io.install() - hierarchy = io.find_one({"type": 'asset', "name": entity['parent']['name']})['data']['parents'] + hierarchy = io.find_one({"type": 'asset', "name": entity['parent']['name']})[ + 'data']['parents'] io.uninstall() if hierarchy: # hierarchy = os.path.sep.join(hierarchy) @@ -260,9 +261,11 @@ class AppAction(object): # TODO Add paths to avalon setup from tomls if self.identifier == 'maya': - os.environ['PYTHONPATH'] += os.pathsep + os.path.join(os.getenv("AVALON_CORE"), 'setup', 'maya') + os.environ['PYTHONPATH'] += os.pathsep + \ + os.path.join(os.getenv("AVALON_CORE"), 'setup', 'maya') elif self.identifier == 'nuke': - os.environ['NUKE_PATH'] = os.pathsep + os.path.join(os.getenv("AVALON_CORE"), 'setup', 'nuke') + os.environ['NUKE_PATH'] = os.pathsep + \ + os.path.join(os.getenv("AVALON_CORE"), 'setup', 'nuke') # config = toml.load(lib.which_app(self.identifier + "_" + self.variant)) env = os.environ @@ -276,8 +279,8 @@ class AppAction(object): tools_attr = [os.environ["AVALON_APP_NAME"]] for parent in reversed(parents): # check if the attribute is empty, if not use it - if parent['custom_attributes']['avalon_tools_env']: - tools_attr.extend(parent['custom_attributes']['avalon_tools_env']) + if parent['custom_attributes']['tools_env']: + tools_attr.extend(parent['custom_attributes']['tools_env']) break tools_env = acre.get_tools(tools_attr) @@ -420,7 +423,7 @@ class BaseAction(object): self.session.event_hub.subscribe( 'topic=ftrack.action.discover and source.user.username={0}'.format( self.session.api_user - ), self._discover + ), self._discover ) self.session.event_hub.subscribe( @@ -442,7 +445,8 @@ class BaseAction(object): ) if accepts: - self.logger.info(u'Discovering action with selection: {0}'.format(args[1]['data'].get('selection', []))) + self.logger.info(u'Discovering action with selection: {0}'.format( + args[1]['data'].get('selection', []))) return { 'items': [{ 'label': self.label, From bc44306eeb781442758bc7adab3249a1dac41ed0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 18 Nov 2018 21:38:47 +0100 Subject: [PATCH 08/78] nuke plugins update --- .../nuke/publish/collect_nuke_writes.py | 102 ++++++++---------- .../nuke/publish/collect_render_target.py | 11 +- pype/plugins/nuke/publish/render_local.py | 2 +- pype/plugins/nuke/publish/submit_deadline.py | 2 +- 4 files changed, 51 insertions(+), 66 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 2df9adf6fc..05def54582 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -22,79 +22,42 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): for node in nuke.allNodes(): if node.Class() != "Write": continue + if node["disable"].value(): + continue + + # Determine defined file type + ext = node["file_type"].value() # Determine output type output_type = "img" - if node["file_type"].value() == "mov": + if ext == "mov": output_type = "mov" - # Create instance - instance = pyblish.api.Instance(node.name()) - instance.add(node) - instance.data["label"] = node.name() - - instance.data["publish"] = False - # Get frame range start_frame = int(nuke.root()["first_frame"].getValue()) end_frame = int(nuke.root()["last_frame"].getValue()) + if node["use_limit"].getValue(): start_frame = int(node["first"].getValue()) end_frame = int(node["last"].getValue()) - self.log.info("writeNode collected: {}".format(node.name())) - # Add collection collection = None path = nuke.filename(node) path += " [{0}-{1}]".format(start_frame, end_frame) collection = clique.parse(path) - instance.data.update({ - "asset": os.environ["AVALON_ASSET"], # todo: not a constant - "subset": subset, - "path": nuke.filename(node), - "outputDir": os.path.dirname(nuke.filename(node)), - "ext": output_type, # todo: should be redundant - "label": node.name(), - "family": "write", - "publish": False, - "collection": collection, - "start_frame": start_frame, - "end_frame": end_frame - }) + subset = node.name() + # Include start and end render frame in label + label = "{subset} ({start}-{end})".format(subset=subset, + start=int(start_frame), + end=int(end_frame)) - instances.append(instance) + # Create instance + instance = context.create_instance(subset) + instance.add(node) - context.data["write_instances"] = instances - - context.data["instances"] = ( - context.data.get("instances", []) + instances) - - -class CollectNukeWritesProcess(pyblish.api.ContextPlugin): - """Collect all local processing write instances.""" - - order = CollectNukeWrites.order + 0.01 - label = "Writes Local" - hosts = ["nuke"] - - # targets = ["process.local"] - - def process(self, context): - - for item in context.data["write_instances"]: - instance = context.create_instance(item.data["name"]) - for key, value in item.data.iteritems(): - instance.data[key] = value - - instance.data["label"] += " - render - local" - instance.data["families"] = ["render", "local"] - - for node in item: - instance.add(node) - - # Adding/Checking publish attribute + # Adding/Checking publish and render target attribute if "render_local" not in node.knobs(): knob = nuke.Boolean_Knob("render_local", "Local rendering") knob.setValue(False) @@ -105,24 +68,45 @@ class CollectNukeWritesProcess(pyblish.api.ContextPlugin): # Compare against selection selection = instance.context.data.get("selection", []) if selection: - if list(set(instance) & set(selection)): + if list(set(instance) and set(selection)): value = True else: value = False - instance.data["publish"] = value - def instanceToggled(instance, value): instance[0]["render_local"].setValue(value) - instance.data["instanceToggled"] = instanceToggled + instance.data.update({ + "instanceToggled": instanceToggled, + "asset": os.environ["AVALON_ASSET"], # todo: not a constant + "path": nuke.filename(node), + "subset": subset, + "outputDir": os.path.dirname(nuke.filename(node)), + "ext": ext, # todo: should be redundant + "label": label, + "families": ["render"], + "family": "write", + "publish": value, + "collection": collection, + "start_frame": start_frame, + "end_frame": end_frame, + "output_type": output_type + }) + instances.append(instance) + + self.log.info("writeNode collected: {}".format(subset)) + + context.data["write_instances"] = instances + + context.data["instances"] = ( + context.data.get("instances", []) + instances) class CollectNukeWritesPublish(pyblish.api.ContextPlugin): """Collect all write instances for publishing.""" order = CollectNukeWrites.order + 0.01 - label = "Writes" + label = "Writes Publish" hosts = ["nuke", "nukeassist"] # targets = ["default"] @@ -153,7 +137,7 @@ class CollectNukeWritesPublish(pyblish.api.ContextPlugin): # print key, value instance.data[key] = value - instance.data["families"] = ["output"] + instance.data["families"] = ["publish"] instance.data["label"] += ( " - " + os.path.basename(instance.data["collection"].format())) diff --git a/pype/plugins/nuke/publish/collect_render_target.py b/pype/plugins/nuke/publish/collect_render_target.py index 257f444720..c8059613a9 100644 --- a/pype/plugins/nuke/publish/collect_render_target.py +++ b/pype/plugins/nuke/publish/collect_render_target.py @@ -6,7 +6,7 @@ class CollectNukeRenderMode(pyblish.api.InstancePlugin): """Collect current comp's render Mode Options: - renderlocal + local deadline Note that this value is set for each comp separately. When you save the @@ -24,7 +24,7 @@ class CollectNukeRenderMode(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.4 label = "Collect Render Mode" hosts = ["nuke"] - families = ["write"] + families = ["write", "render"] def process(self, instance): """Collect all image sequence tools""" @@ -39,8 +39,9 @@ class CollectNukeRenderMode(pyblish.api.InstancePlugin): assert rendermode in options, "Must be supported render mode" - self.log.info("Render mode: {0}".format(rendermode)) - # Append family - family = "write.{0}".format(rendermode) + instance.data["families"].remove("render") + family = "render.{0}".format(rendermode) instance.data["families"].append(family) + + self.log.info("Render mode: {0}".format(rendermode)) diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 63963c9b8c..eebdefc29f 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -15,7 +15,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder label = "Render Local" hosts = ["nuke"] - families = ["write.local"] + families = ["write", "render.local"] def process(self, instance): diff --git a/pype/plugins/nuke/publish/submit_deadline.py b/pype/plugins/nuke/publish/submit_deadline.py index a183a54ab7..ffb298f75d 100644 --- a/pype/plugins/nuke/publish/submit_deadline.py +++ b/pype/plugins/nuke/publish/submit_deadline.py @@ -20,7 +20,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): label = "Submit to Deadline" order = pyblish.api.IntegratorOrder hosts = ["nuke"] - families = ["write.deadline"] + families = ["write", "render.deadline"] def process(self, instance): From 1820373b53a5b03bd9c880c3607e946821bb8547 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 20 Nov 2018 18:00:06 +0100 Subject: [PATCH 09/78] working on plugins for nuke --- pype/__init__.py | 1 + pype/plugins/nuke/create/create_write_exr.py | 4 +++- pype/plugins/nuke/publish/collect_current_file.py | 8 ++++---- pype/plugins/nuke/publish/collect_nuke_writes.py | 5 ----- pype/plugins/nuke/publish/collect_render_target.py | 2 +- .../publish/{extract_script_save.py => script_save.py} | 1 - 6 files changed, 9 insertions(+), 12 deletions(-) rename pype/plugins/nuke/publish/{extract_script_save.py => script_save.py} (91%) diff --git a/pype/__init__.py b/pype/__init__.py index e5d1aee374..7f854b2073 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -24,3 +24,4 @@ def uninstall(): print("Deregistering global plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) + print("Global plug-ins unregistred") diff --git a/pype/plugins/nuke/create/create_write_exr.py b/pype/plugins/nuke/create/create_write_exr.py index 41cd528b15..b77b5735e8 100644 --- a/pype/plugins/nuke/create/create_write_exr.py +++ b/pype/plugins/nuke/create/create_write_exr.py @@ -10,7 +10,9 @@ class CrateWriteExr(avalon.api.Creator): hosts = ["nuke"] family = "write" icon = "sign-out" - + # TODO: create container of metadata into user knob + # TODO: if write node selected it will add metadata + # def __init__(self, *args, **kwargs): # super(CrateWriteExr, self).__init__(*args, **kwargs) # self.data.setdefault("subset", "this") diff --git a/pype/plugins/nuke/publish/collect_current_file.py b/pype/plugins/nuke/publish/collect_current_file.py index 0d4867f08b..96ec44d9d6 100644 --- a/pype/plugins/nuke/publish/collect_current_file.py +++ b/pype/plugins/nuke/publish/collect_current_file.py @@ -1,18 +1,18 @@ import pyblish.api -class CollectCurrentFile(pyblish.api.ContextPlugin): +class SelectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Current File" + order = pyblish.api.CollectorOrder hosts = ["nuke"] - families = ["workfile"] def process(self, context): import os import nuke current_file = nuke.root().name() + normalised = os.path.normpath(current_file) + context.data["current_file"] = normalised context.data["currentFile"] = normalised diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 05def54582..2774c2ed54 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -63,8 +63,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): knob.setValue(False) node.addKnob(knob) - value = bool(node["render_local"].getValue()) - # Compare against selection selection = instance.context.data.get("selection", []) if selection: @@ -73,11 +71,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): else: value = False - def instanceToggled(instance, value): - instance[0]["render_local"].setValue(value) instance.data.update({ - "instanceToggled": instanceToggled, "asset": os.environ["AVALON_ASSET"], # todo: not a constant "path": nuke.filename(node), "subset": subset, diff --git a/pype/plugins/nuke/publish/collect_render_target.py b/pype/plugins/nuke/publish/collect_render_target.py index c8059613a9..86a38f26b6 100644 --- a/pype/plugins/nuke/publish/collect_render_target.py +++ b/pype/plugins/nuke/publish/collect_render_target.py @@ -24,7 +24,7 @@ class CollectNukeRenderMode(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.4 label = "Collect Render Mode" hosts = ["nuke"] - families = ["write", "render"] + families = ["write", "render.local"] def process(self, instance): """Collect all image sequence tools""" diff --git a/pype/plugins/nuke/publish/extract_script_save.py b/pype/plugins/nuke/publish/script_save.py similarity index 91% rename from pype/plugins/nuke/publish/extract_script_save.py rename to pype/plugins/nuke/publish/script_save.py index 76054f72c1..472742f464 100644 --- a/pype/plugins/nuke/publish/extract_script_save.py +++ b/pype/plugins/nuke/publish/script_save.py @@ -8,7 +8,6 @@ class ExtractScriptSave(pyblish.api.Extractor): label = 'Script Save' order = pyblish.api.Extractor.order - 0.45 hosts = ['nuke'] - families = ['script'] def process(self, instance): From d53548fb475141ad2611ac05391c185a7baf2f71 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Nov 2018 16:21:30 +0100 Subject: [PATCH 10/78] First commit --- pype/ftrack/actions/action_syncToAvalon.py | 3 +- pype/ftrack/events/event_sync_to_avalon.py | 224 ++++++++++++++++++ pype/ftrack/events/ftrack_event_handler.py | 157 ++++++++++++ .../events/{ => old}/file_version_statuses.py | 0 .../events/{ => old}/new_task_update.py | 0 pype/ftrack/events/{ => old}/radio_buttons.py | 0 .../events/{ => old}/thumbnail_updates.py | 0 .../{ => old}/version_to_task_status.py | 0 pype/ftrack/events/test_event.py | 6 +- 9 files changed, 385 insertions(+), 5 deletions(-) create mode 100644 pype/ftrack/events/event_sync_to_avalon.py create mode 100644 pype/ftrack/events/ftrack_event_handler.py rename pype/ftrack/events/{ => old}/file_version_statuses.py (100%) rename pype/ftrack/events/{ => old}/new_task_update.py (100%) rename pype/ftrack/events/{ => old}/radio_buttons.py (100%) rename pype/ftrack/events/{ => old}/thumbnail_updates.py (100%) rename pype/ftrack/events/{ => old}/version_to_task_status.py (100%) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index a5ee5ee49b..bdf8649714 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -292,10 +292,11 @@ class SyncToAvalon(BaseAction): # TODO check if is asset in same folder!!! ???? FEATURE FOR FUTURE print("Asset {} - updated".format(name)) + entityId = io.find_one({'type': 'asset', 'name': name})['_id'] ## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK # Set custom attribute to avalon/mongo id of entity (parentID is last) if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(parentId) + entity['custom_attributes'][ca_mongoid] = str(entityId) else: print("Custom attribute for <{}> is not created.".format(entity['name'])) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py new file mode 100644 index 0000000000..be5aef1086 --- /dev/null +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -0,0 +1,224 @@ +import os +import sys +import ftrack_api +from ftrack_event_handler import BaseEvent +from avalon import io, inventory, lib +from avalon.vendor import toml +import re + +class Sync_to_Avalon(BaseEvent): + + def launch(self, session, entities, event): + self.ca_mongoid = 'avalon_mongo_id' + + self.proj = None + for entity in entities: + try: + base_proj = entity['link'][0] + except: + continue + self.proj = session.get(base_proj['type'], base_proj['id']) + break + + if self.proj is None: + return + + os.environ["AVALON_PROJECT"] = self.proj['full_name'] + + proj_id = self.proj['custom_attributes'][self.ca_mongoid] + + io.install() + self.avalon_project = io.find({"_id": proj_id}) + self.projectId = proj_id + if self.avalon_project is None: + self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) + self.projectId = self.avalon_project['_id'] + io.uninstall() + + self.importEntities = [] + exceptions = ['assetversion', 'job', 'user'] + + for entity in entities: + if entity.entity_type.lower() in exceptions: + continue + elif entity.entity_type.lower() in ['task']: + entity = entity['parent'] + try: + mongo_id = entity['custom_attributes'][self.ca_mongoid] + except: + return { + 'success': False, + 'message': "Please run 'Create Attributes' action or create custom attribute 'avalon_mongo_id' manually for {}".format(entity.entity_type) + } + + if entity not in self.importEntities: + self.importEntities.append(entity) + + if len(self.importEntities) < 1: + return + + self.setAvalonAttributes() + + io.install() + + for entity in self.importEntities: + self.importToAvalon(entity) + + io.uninstall() + + return True + + def importToAvalon(self, entity): + data = {} + + type = 'asset' + name = entity['name'] + print(1000*"*") + print(name) + silo = 'Film' + if entity.entity_type == 'Project': + type = 'project' + name = entity['full_name'] + data['code'] = entity['name'] + elif entity.entity_type in ['AssetBuild', 'Library']: + silo = 'Assets' + + os.environ["AVALON_ASSET"] = name + os.environ["AVALON_SILO"] = silo + + entity_type = entity.entity_type + + data['ftrackId'] = entity['id'] + data['entityType'] = entity_type + + for cust_attr in self.custom_attributes: + if cust_attr['entity_type'].lower() in ['asset']: + data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + + elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': + data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + + elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': + # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') + entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + # Get object id of entity type + ent_obj_type_id = self.session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id'] + + if cust_attr['object_type_id'] == ent_obj_type_id: + data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + + mongo_id = entity['custom_attributes'][self.ca_mongoid] + avalon_asset = io.find({'_id': mongo_id}) + if avalon_asset is None: + avalon_asset = io.find_one({'type': type, 'name': name}) + + if entity_type in ['project']: + config = self.getConfig() + template = {"schema": "avalon-core:inventory-1.0"} + + if self.avalon_project is None: + mongo_id = inventory.save(self.proj['full_name'], config, template) + + self.avalon_project = io.find({"_id": mongo_id}) + self.projectId = mongo_id + if self.avalon_project is None: + self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) + self.projectId = self.avalon_project['_id'] + + io.update_many( + {"_id": mongo_id}, + {'$set':{ + 'name':name, + 'config':config, + 'data':data, + }}) + return + + eLinks = [] + for e in entity['link']: + tmp = self.session.get(e['type'], e['id']) + eLinks.append(tmp) + + tasks = [] + for child in entity['children']: + if child.entity_type in ['Task']: + tasks.append(child['name']) + + folderStruct = [] + parents = [] + for i in range(1, len(eLinks)-1): + parents.append(eLinks[i]) + + for parent in parents: + name = self.checkName(parent['name']) + folderStruct.append(name) + parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + if parent['parent'].entity_type != 'project' and parentId is None: + self.importToAvalon(parent) + parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + + hierarchy = os.path.sep.join(folderStruct) + + data['tasks'] = tasks + data['parents'] = folderStruct + data['visualParent'] = parentId + data['hierarchy'] = hierarchy + + if self.avalon_project is None: + self.importToAvalon(self.proj) + + if avalon_asset is None: + mongo_id = inventory.create_asset(name, silo, data, self.projectId) + + io.update_many( + {"_id": mongo_id}, + {'$set':{ + 'name':name, + 'silo':silo, + 'data':data, + 'Parent': self.projectId}}) + + def checkName(self, input_name): + if input_name.find(" ") == -1: + name = input_name + else: + name = input_name.replace(" ", "-") + print("Name of {} was changed to {}".format(input_name, name)) + return name + + def getConfig(self, entity): + apps = [] + for app in entity['custom_attributes']['applications']: + try: + label = toml.load(lib.which_app(app))['label'] + apps.append({'name':app, 'label':label}) + except Exception as e: + print('Error with application {0} - {1}'.format(app, e)) + + config = { + 'schema': 'avalon-core:config-1.0', + 'tasks': [{'name': ''}], + 'apps': apps, + # TODO redo work!!! + 'template': { + 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', + 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', + 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} + } + return config + + def setAvalonAttributes(self): + self.custom_attributes = [] + all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() + for cust_attr in all_avalon_attr['custom_attribute_configurations']: + if 'avalon_' not in cust_attr['key']: + self.custom_attributes.append(cust_attr) + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + event = Sync_to_Avalon(session) + event.register() diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py new file mode 100644 index 0000000000..a8a4ffe420 --- /dev/null +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -0,0 +1,157 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2017 ftrack +import os +import logging +import getpass +import platform +import ftrack_api +import toml +from avalon import io, lib, pipeline +from avalon import session as sess +import acre + +from app.api import ( + Templates, + Logger +) + + +class BaseEvent(object): + '''Custom Action base class + + `label` a descriptive string identifing your action. + + `varaint` To group actions together, give them the same + label and specify a unique variant per action. + + `identifier` a unique identifier for your action. + + `description` a verbose descriptive text for you action + + ''' + + def __init__(self, session): + '''Expects a ftrack_api.Session instance''' + + self.logger = logging.getLogger( + '{0}.{1}'.format(__name__, self.__class__.__name__) + ) + + self._session = session + + @property + def session(self): + '''Return current session.''' + return self._session + + def register(self): + '''Registers the event, subscribing the the discover and launch topics.''' + self.session.event_hub.subscribe('topic=ftrack.update', self._launch) + + def _translate_event(self, session, event): + '''Return *event* translated structure to be used with the API.''' + print(100*"-") + print(event) + _selection = event['data'].get('entities',[]) + + _entities = list() + for entity in _selection: + if entity['entityType'] in ['socialfeed']: + continue + _entities.append( + ( + session.get(self._get_entity_type(entity), entity.get('entityId')) + # self._get_entity_type(entity), entity.get('entityId') + ) + ) + + return [ + _entities, + event + ] + + def _get_entity_type(self, entity): + '''Return translated entity type tht can be used with API.''' + # Get entity type and make sure it is lower cased. Most places except + # the component tab in the Sidebar will use lower case notation. + entity_type = entity.get('entityType').replace('_', '').lower() + + for schema in self.session.schemas: + alias_for = schema.get('alias_for') + + if ( + alias_for and isinstance(alias_for, str) and + alias_for.lower() == entity_type + ): + return schema['id'] + + for schema in self.session.schemas: + if schema['id'].lower() == entity_type: + return schema['id'] + + raise ValueError( + 'Unable to translate entity type: {0}.'.format(entity_type) + ) + + def _launch(self, event): + args = self._translate_event( + self.session, event + ) + + response = self.launch( + self.session, *args + ) + + return self._handle_result( + self.session, response, *args + ) + + def launch(self, session, entities, event): + '''Callback method for the custom action. + + return either a bool ( True if successful or False if the action failed ) + or a dictionary with they keys `message` and `success`, the message should be a + string and will be displayed as feedback to the user, success should be a bool, + True if successful or False if the action failed. + + *session* is a `ftrack_api.Session` instance + + *entities* is a list of tuples each containing the entity type and the entity id. + If the entity is a hierarchical you will always get the entity + type TypedContext, once retrieved through a get operation you + will have the "real" entity type ie. example Shot, Sequence + or Asset Build. + + *event* the unmodified original event + + ''' + raise NotImplementedError() + + + def _handle_result(self, session, result, entities, event): + '''Validate the returned result from the action callback''' + if isinstance(result, bool): + result = { + 'success': result, + 'message': ( + '{0} launched successfully.'.format( + self.__class__.__name__ + ) + ) + } + + elif isinstance(result, dict): + for key in ('success', 'message'): + if key in result: + continue + + raise KeyError( + 'Missing required key: {0}.'.format(key) + ) + + else: + self.logger.error( + 'Invalid result type must be bool or dictionary!' + ) + + return result diff --git a/pype/ftrack/events/file_version_statuses.py b/pype/ftrack/events/old/file_version_statuses.py similarity index 100% rename from pype/ftrack/events/file_version_statuses.py rename to pype/ftrack/events/old/file_version_statuses.py diff --git a/pype/ftrack/events/new_task_update.py b/pype/ftrack/events/old/new_task_update.py similarity index 100% rename from pype/ftrack/events/new_task_update.py rename to pype/ftrack/events/old/new_task_update.py diff --git a/pype/ftrack/events/radio_buttons.py b/pype/ftrack/events/old/radio_buttons.py similarity index 100% rename from pype/ftrack/events/radio_buttons.py rename to pype/ftrack/events/old/radio_buttons.py diff --git a/pype/ftrack/events/thumbnail_updates.py b/pype/ftrack/events/old/thumbnail_updates.py similarity index 100% rename from pype/ftrack/events/thumbnail_updates.py rename to pype/ftrack/events/old/thumbnail_updates.py diff --git a/pype/ftrack/events/version_to_task_status.py b/pype/ftrack/events/old/version_to_task_status.py similarity index 100% rename from pype/ftrack/events/version_to_task_status.py rename to pype/ftrack/events/old/version_to_task_status.py diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index 3be53492d4..7e5fe6d903 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -1,6 +1,5 @@ # import ftrack_api as local session import ftrack_api -from utils import print_entity_head # session = ftrack_api.Session() @@ -14,8 +13,7 @@ def test_event(event): for entity in event['data'].get('entities', []): if entity['entityType'] == 'task' and entity['action'] == 'update': - print "\n\nevent script: {}".format(__file__) - print_entity_head.print_entity_head(entity, session) + print("\n\nevent script: {}".format(__file__)) # for k in task.keys(): # print k, task[k] @@ -23,6 +21,6 @@ def test_event(event): # print task['assignments'] for e in entity.keys(): - print '{0}: {1}'.format(e, entity[e]) + print('{0}: {1}'.format(e, entity[e])) # end of event procedure ---------------------------------- From b8d803c2e70468887a880644dbfe9e188d16528f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Nov 2018 17:08:16 +0100 Subject: [PATCH 11/78] Event seems to work, need more testing and refactoring with action --- pype/ftrack/events/event_sync_to_avalon.py | 27 +++++++++++----------- pype/ftrack/events/ftrack_event_handler.py | 2 -- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index be5aef1086..d8ede03503 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -5,6 +5,7 @@ from ftrack_event_handler import BaseEvent from avalon import io, inventory, lib from avalon.vendor import toml import re +from bson.objectid import ObjectId class Sync_to_Avalon(BaseEvent): @@ -28,7 +29,7 @@ class Sync_to_Avalon(BaseEvent): proj_id = self.proj['custom_attributes'][self.ca_mongoid] io.install() - self.avalon_project = io.find({"_id": proj_id}) + self.avalon_project = io.find({"_id": ObjectId(proj_id)}) self.projectId = proj_id if self.avalon_project is None: self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) @@ -73,8 +74,6 @@ class Sync_to_Avalon(BaseEvent): type = 'asset' name = entity['name'] - print(1000*"*") - print(name) silo = 'Film' if entity.entity_type == 'Project': type = 'project' @@ -108,9 +107,7 @@ class Sync_to_Avalon(BaseEvent): data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] mongo_id = entity['custom_attributes'][self.ca_mongoid] - avalon_asset = io.find({'_id': mongo_id}) - if avalon_asset is None: - avalon_asset = io.find_one({'type': type, 'name': name}) + if entity_type in ['project']: config = self.getConfig() @@ -119,14 +116,14 @@ class Sync_to_Avalon(BaseEvent): if self.avalon_project is None: mongo_id = inventory.save(self.proj['full_name'], config, template) - self.avalon_project = io.find({"_id": mongo_id}) + self.avalon_project = io.find({"_id": ObjectId(mongo_id)}) self.projectId = mongo_id if self.avalon_project is None: self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) self.projectId = self.avalon_project['_id'] io.update_many( - {"_id": mongo_id}, + {"_id": ObjectId(mongo_id)}, {'$set':{ 'name':name, 'config':config, @@ -150,12 +147,12 @@ class Sync_to_Avalon(BaseEvent): parents.append(eLinks[i]) for parent in parents: - name = self.checkName(parent['name']) - folderStruct.append(name) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + parname = self.checkName(parent['name']) + folderStruct.append(parname) + parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] if parent['parent'].entity_type != 'project' and parentId is None: self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] hierarchy = os.path.sep.join(folderStruct) @@ -167,17 +164,21 @@ class Sync_to_Avalon(BaseEvent): if self.avalon_project is None: self.importToAvalon(self.proj) + avalon_asset = io.find_one({'_id': ObjectId(mongo_id)}) + if avalon_asset is None: + avalon_asset = io.find_one({'type': type, 'name': name}) if avalon_asset is None: mongo_id = inventory.create_asset(name, silo, data, self.projectId) io.update_many( - {"_id": mongo_id}, + {"_id": ObjectId(mongo_id)}, {'$set':{ 'name':name, 'silo':silo, 'data':data, 'Parent': self.projectId}}) + def checkName(self, input_name): if input_name.find(" ") == -1: name = input_name diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index a8a4ffe420..775d6c07c2 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -50,8 +50,6 @@ class BaseEvent(object): def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' - print(100*"-") - print(event) _selection = event['data'].get('entities',[]) _entities = list() From 785979b417a105f8ebdd4601e9f0d4aaa42734a4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 24 Nov 2018 01:01:27 +0100 Subject: [PATCH 12/78] improving logging in nuke --- pype/__init__.py | 9 +++-- pype/api.py | 6 +++- pype/nuke/__init__.py | 80 ++++++++++++++++++++++++++++++++++++------- 3 files changed, 78 insertions(+), 17 deletions(-) diff --git a/pype/__init__.py b/pype/__init__.py index 7f854b2073..d45ba59e74 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -6,6 +6,9 @@ from avalon import api as avalon from .launcher_actions import register_launcher_actions from .lib import collect_container_metadata +from pype.api import Logger +log = Logger.getLogger(__name__) + PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") @@ -15,13 +18,13 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load") def install(): - print("Registering global plug-ins..") + log.info("Registering global plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) def uninstall(): - print("Deregistering global plug-ins..") + log.info("Deregistering global plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) - print("Global plug-ins unregistred") + log.info("Global plug-ins unregistred") diff --git a/pype/api.py b/pype/api.py index e665d40535..478cb1cc95 100644 --- a/pype/api.py +++ b/pype/api.py @@ -15,6 +15,8 @@ from .action import ( RepairContextAction ) +from app.api import Logger + __all__ = [ # plugin classes "Extractor", @@ -25,5 +27,7 @@ __all__ = [ "ValidateMeshOrder", # action "get_errored_instances_from_context", - "RepairAction" + "RepairAction", + + "Logger" ] diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index db2b1f4982..8e3c33f9c3 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -1,7 +1,20 @@ import os - +import sys from avalon import api as avalon from pyblish import api as pyblish +from pype.api import Logger +# import logging +import nuke + +# removing logger handler created in avalon_core +loggers = [handler + for handler in Logger.logging.root.handlers[:]] + +if len(loggers) > 2: + Logger.logging.root.removeHandler(loggers[0]) + + +log = Logger.getLogger(__name__, "nuke") PARENT_DIR = os.path.dirname(__file__) PACKAGE_DIR = os.path.dirname(PARENT_DIR) @@ -12,9 +25,49 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "nuke", "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "nuke", "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nuke", "inventory") +self = sys.modules[__name__] +self.nLogger = None + + +class NukeHandler(Logger.logging.Handler): + ''' + Nuke Handler - emits logs into nuke's script editor. + warning will emit nuke.warning() + critical and fatal would popup msg dialog to alert of the error. + ''' + + def __init__(self): + Logger.logging.Handler.__init__(self) + + def emit(self, record): + # Formated message: + msg = self.format(record) + + # if record.levelname.lower() is "warning": + # nuke.warning(msg) + + elif record.levelname.lower() in ["critical", "fatal", "error"]: + nuke.message(record.message) + + # elif record.levelname.lower() is "info": + # log.info(msg) + # + # elif record.levelname.lower() is "debug": + # log.debug(msg) + + # else: + # sys.stdout.write(msg) + + +nuke_handler = NukeHandler() +log.addHandler(nuke_handler) +if not self.nLogger: + self.nLogger = log + def install(): - print("Registering Nuke plug-ins..") + + log.info("Registering Nuke plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) avalon.register_plugin_path(avalon.Creator, CREATE_PATH) @@ -23,21 +76,19 @@ def install(): pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) # Disable all families except for the ones we explicitly want to see - family_states = ["imagesequence", - "camera", - "pointcache"] + family_states = [ + "imagesequence", + "mov" + "camera", + "pointcache", + ] avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - # # work files start at app start - # workfiles.show( - # os.environ["AVALON_WORKDIR"] - # ) - def uninstall(): - print("Deregistering Nuke plug-ins..") + log.info("Deregistering Nuke plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) @@ -48,10 +99,13 @@ def uninstall(): def on_pyblish_instance_toggled(instance, new_value, old_value): """Toggle saver tool passthrough states on instance toggles.""" - from avalon.nuke import viewer_update_and_undo_stop, add_publish_knob, log + from avalon.nuke import ( + viewer_update_and_undo_stop, + add_publish_knob + ) writes = [n for n in instance if - n.Class() == "Write"] + n.Class() is "Write"] if not writes: return From 41865540f8c68e2dec8b2c75eba1aea59aa2b64a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 26 Nov 2018 10:37:56 +0100 Subject: [PATCH 13/78] adding app api Logger into pype api --- pype/api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/api.py b/pype/api.py index 478cb1cc95..c26d29d707 100644 --- a/pype/api.py +++ b/pype/api.py @@ -17,6 +17,7 @@ from .action import ( from app.api import Logger + __all__ = [ # plugin classes "Extractor", @@ -29,5 +30,6 @@ __all__ = [ "get_errored_instances_from_context", "RepairAction", - "Logger" + "Logger", + ] From 925ece4a5c9269d67ce73446f60ea87a5fdf7418 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 26 Nov 2018 10:38:27 +0100 Subject: [PATCH 14/78] adding project_setting gui into widgets --- pype/widgets/project_settings.py | 498 +++++++++++++++++++++++++++++++ 1 file changed, 498 insertions(+) create mode 100644 pype/widgets/project_settings.py diff --git a/pype/widgets/project_settings.py b/pype/widgets/project_settings.py new file mode 100644 index 0000000000..98c97b4885 --- /dev/null +++ b/pype/widgets/project_settings.py @@ -0,0 +1,498 @@ + + +from app import style +from avalon.vendor.Qt import QtCore, QtGui, QtWidgets +import os +import getpass +import platform + +import ftrack_api + + +# object symbol + + +class Project_name_getUI(QtWidgets.QWidget): + ''' + Project setting ui: here all the neceserry ui widgets are created + they are going to be used i later proces for dynamic linking of project + in list to project's attributes + ''' + + def __init__(self, parent=None): + super(Project_name_getUI, self).__init__(parent) + + self.platform = platform.system() + self.new_index = 0 + # get projects from ftrack + self.session = ftrack_api.Session() + self.projects_from_ft = self.session.query( + 'Project where status is active') + self.disks_from_ft = self.session.query('Disk') + self.schemas_from_ft = self.session.query('ProjectSchema') + self.projects = self._get_projects_ftrack() + + # define window geometry + self.setWindowTitle('Set project attributes') + self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + self.resize(550, 340) + self.setStyleSheet(style.load_stylesheet()) + + # define disk combobox widget + self.disks = self._get_all_disks() + self.disk_combobox_label = QtWidgets.QLabel('Destination storage:') + self.disk_combobox = QtWidgets.QComboBox() + + # define schema combobox widget + self.schemas = self._get_all_schemas() + self.schema_combobox_label = QtWidgets.QLabel('Project schema:') + self.schema_combobox = QtWidgets.QComboBox() + + # define fps widget + self.fps_label = QtWidgets.QLabel('Fps:') + self.fps_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.fps = QtWidgets.QLineEdit() + + # define project dir widget + self.project_dir_label = QtWidgets.QLabel('Project dir:') + self.project_dir_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.project_dir = QtWidgets.QLineEdit() + + self.project_path_label = QtWidgets.QLabel( + 'Project_path (if not then created):') + self.project_path_label.setAlignment( + QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) + project_path_font = QtGui.QFont( + "Helvetica [Cronyx]", 12, QtGui.QFont.Bold) + self.project_path = QtWidgets.QLabel() + self.project_path.setObjectName('nom_plan_label') + self.project_path.setStyleSheet( + 'QtWidgets.QLabel#nom_plan_label {color: red}') + self.project_path.setAlignment( + QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) + self.project_path.setFont(project_path_font) + + # define handles widget + self.handles_label = QtWidgets.QLabel('Handles:') + self.handles_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.handles = QtWidgets.QLineEdit() + + # define resolution widget + self.resolution_w_label = QtWidgets.QLabel('W:') + self.resolution_w = QtWidgets.QLineEdit() + self.resolution_h_label = QtWidgets.QLabel('H:') + self.resolution_h = QtWidgets.QLineEdit() + + devider = QtWidgets.QFrame() + # devider.Shape(QFrame.HLine) + devider.setFrameShape(QtWidgets.QFrame.HLine) + devider.setFrameShadow(QtWidgets.QFrame.Sunken) + + self.generate_lines() + + # define push buttons + self.set_pushbutton = QtWidgets.QPushButton('Set project') + self.cancel_pushbutton = QtWidgets.QPushButton('Cancel') + + # definition of layouts + ############################################ + action_layout = QtWidgets.QHBoxLayout() + action_layout.addWidget(self.set_pushbutton) + action_layout.addWidget(self.cancel_pushbutton) + + # schema property + schema_layout = QtWidgets.QGridLayout() + schema_layout.addWidget(self.schema_combobox, 0, 1) + schema_layout.addWidget(self.schema_combobox_label, 0, 0) + + # storage property + storage_layout = QtWidgets.QGridLayout() + storage_layout.addWidget(self.disk_combobox, 0, 1) + storage_layout.addWidget(self.disk_combobox_label, 0, 0) + + # fps property + fps_layout = QtWidgets.QGridLayout() + fps_layout.addWidget(self.fps, 1, 1) + fps_layout.addWidget(self.fps_label, 1, 0) + + # project dir property + project_dir_layout = QtWidgets.QGridLayout() + project_dir_layout.addWidget(self.project_dir, 1, 1) + project_dir_layout.addWidget(self.project_dir_label, 1, 0) + + # project path property + project_path_layout = QtWidgets.QGridLayout() + spacer_1_item = QtWidgets.QSpacerItem(10, 10) + project_path_layout.addItem(spacer_1_item, 0, 1) + project_path_layout.addWidget(self.project_path_label, 1, 1) + project_path_layout.addWidget(self.project_path, 2, 1) + spacer_2_item = QtWidgets.QSpacerItem(20, 20) + project_path_layout.addItem(spacer_2_item, 3, 1) + + # handles property + handles_layout = QtWidgets.QGridLayout() + handles_layout.addWidget(self.handles, 1, 1) + handles_layout.addWidget(self.handles_label, 1, 0) + + # resolution property + resolution_layout = QtWidgets.QGridLayout() + resolution_layout.addWidget(self.resolution_w_label, 1, 1) + resolution_layout.addWidget(self.resolution_w, 2, 1) + resolution_layout.addWidget(self.resolution_h_label, 1, 2) + resolution_layout.addWidget(self.resolution_h, 2, 2) + + # form project property layout + p_layout = QtWidgets.QGridLayout() + p_layout.addLayout(storage_layout, 1, 0) + p_layout.addLayout(schema_layout, 2, 0) + p_layout.addLayout(project_dir_layout, 3, 0) + p_layout.addLayout(fps_layout, 4, 0) + p_layout.addLayout(handles_layout, 5, 0) + p_layout.addLayout(resolution_layout, 6, 0) + p_layout.addWidget(devider, 7, 0) + spacer_item = QtWidgets.QSpacerItem( + 150, + 40, + QtWidgets.QSizePolicy.Minimum, + QtWidgets.QSizePolicy.Expanding + ) + p_layout.addItem(spacer_item, 8, 0) + + # form with list to one layout with project property + list_layout = QtWidgets.QGridLayout() + list_layout.addLayout(p_layout, 1, 0) + list_layout.addWidget(self.listWidget, 1, 1) + + root_layout = QtWidgets.QVBoxLayout() + root_layout.addLayout(project_path_layout) + root_layout.addWidget(devider) + root_layout.addLayout(list_layout) + root_layout.addLayout(action_layout) + + self.setLayout(root_layout) + + def generate_lines(self): + ''' + Will generate lines of project list + ''' + + self.listWidget = QtWidgets.QListWidget() + for self.index, p in enumerate(self.projects): + item = QtWidgets.QListWidgetItem("{full_name}".format(**p)) + # item.setSelected(False) + self.listWidget.addItem(item) + print(self.listWidget.indexFromItem(item)) + # self.listWidget.setCurrentItem(self.listWidget.itemFromIndex(1)) + + # add options to schemas widget + self.schema_combobox.addItems(self.schemas) + + # add options to disk widget + self.disk_combobox.addItems(self.disks) + + # populate content of project info widgets + self.projects[1] = self._fill_project_attributes_widgets(p, None) + + def _fill_project_attributes_widgets(self, p=None, index=None): + ''' + will generate actual informations wich are saved on ftrack + ''' + + if index is None: + self.new_index = 1 + + if not p: + pass + # change schema selection + for i, schema in enumerate(self.schemas): + if p['project_schema']['name'] in schema: + break + self.schema_combobox.setCurrentIndex(i) + + disk_name, disk_path = self._build_disk_path() + for i, disk in enumerate(self.disks): + if disk_name in disk: + break + # change disk selection + self.disk_combobox.setCurrentIndex(i) + + # change project_dir selection + if "{root}".format(**p): + self.project_dir.setPlaceholderText("{root}".format(**p)) + else: + print("not root so it was replaced with name") + self.project_dir.setPlaceholderText("{name}".format(**p)) + p['root'] = p['name'] + + # set project path to show where it will be created + self.project_path.setText( + os.path.join(self.disks[i].split(' ')[-1], + self.project_dir.text())) + + # change fps selection + self.fps.setPlaceholderText("{custom_attributes[fps]}".format(**p)) + + # change handles selection + self.handles.setPlaceholderText( + "{custom_attributes[handles]}".format(**p)) + + # change resolution selection + self.resolution_w.setPlaceholderText( + "{custom_attributes[resolution_width]}".format(**p)) + self.resolution_h.setPlaceholderText( + "{custom_attributes[resolution_height]}".format(**p)) + + self.update_disk() + + return p + + def fix_project_path_literals(self, dir): + return dir.replace(' ', '_').lower() + + def update_disk(self): + disk = self.disk_combobox.currentText().split(' ')[-1] + + dir = self.project_dir.text() + if not dir: + dir = "{root}".format(**self.projects[self.new_index]) + self.projects[self.new_index]['project_path'] = os.path.normpath( + self.fix_project_path_literals(os.path.join(disk, dir))) + else: + self.projects[self.new_index]['project_path'] = os.path.normpath( + self.fix_project_path_literals(os.path.join(disk, dir))) + + self.projects[self.new_index]['disk'] = self.disks_from_ft[ + self.disk_combobox.currentIndex()] + self.projects[self.new_index]['disk_id'] = self.projects[ + self.new_index]['disk']['id'] + + # set project path to show where it will be created + self.project_path.setText( + self.projects[self.new_index]['project_path']) + + def update_resolution(self): + # update all values in resolution + if self.resolution_w.text(): + self.projects[self.new_index]['custom_attributes'][ + 'resolution_width'] = int(self.resolution_w.text()) + if self.resolution_h.text(): + self.projects[self.new_index]['custom_attributes'][ + 'resolution_height'] = int(self.resolution_h.text()) + + def _update_attributes_by_list_selection(self): + # generate actual selection index + self.new_index = self.listWidget.currentRow() + self.project_dir.setText('') + self.fps.setText('') + self.handles.setText('') + self.resolution_w.setText('') + self.resolution_h.setText('') + + # update project properities widgets and write changes + # into project dictionaries + self.projects[self.new_index] = self._fill_project_attributes_widgets( + self.projects[self.new_index], self.new_index) + + self.update_disk() + + def _build_disk_path(self): + if self.platform == "Windows": + print(self.projects[self.index].keys()) + print(self.projects[self.new_index]['disk']) + return self.projects[self.new_index]['disk'][ + 'name'], self.projects[self.new_index]['disk']['windows'] + else: + return self.projects[self.new_index]['disk'][ + 'name'], self.projects[self.new_index]['disk']['unix'] + + def _get_all_schemas(self): + schemas_list = [] + + for s in self.schemas_from_ft: + # print d.keys() + # if 'Pokus' in s['name']: + # continue + schemas_list.append('{}'.format(s['name'])) + print("\nschemas in ftrack: {}\n".format(schemas_list)) + return schemas_list + + def _get_all_disks(self): + disks_list = [] + for d in self.disks_from_ft: + # print d.keys() + if self.platform == "Windows": + if 'Local drive' in d['name']: + d['windows'] = os.path.join(d['windows'], + os.getenv('USERNAME') + or os.getenv('USER') + or os.getenv('LOGNAME')) + disks_list.append('"{}" at {}'.format(d['name'], d['windows'])) + else: + if 'Local drive' in d['name']: + d['unix'] = os.path.join(d['unix'], getpass.getuser()) + disks_list.append('"{}" at {}'.format(d['name'], d['unix'])) + return disks_list + + def _get_projects_ftrack(self): + + projects_lst = [] + for project in self.projects_from_ft: + # print project.keys() + projects_dict = {} + + for k in project.keys(): + ''' # TODO: delete this in production version ''' + + # if 'test' not in project['name']: + # continue + + # print '{}: {}\n'.format(k, project[k]) + + if '_link' == k: + # print project[k] + content = project[k] + for kc in content[0].keys(): + if content[0]['name']: + content[0][kc] = content[0][kc].encode( + 'ascii', 'ignore').decode('ascii') + print('{}: {}\n'.format(kc, content[0][kc])) + projects_dict[k] = content + print(project[k]) + print(projects_dict[k]) + elif 'root' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'disk' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'name' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k].encode( + 'ascii', 'ignore').decode('ascii') + elif 'disk_id' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'id' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'full_name' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k].encode( + 'ascii', 'ignore').decode('ascii') + elif 'project_schema_id' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'project_schema' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + elif 'custom_attributes' == k: + print('{}: {}\n'.format(k, project[k])) + projects_dict[k] = project[k] + else: + pass + + if projects_dict: + projects_lst.append(projects_dict) + + return projects_lst + + +class Project_name_get(Project_name_getUI): + def __init__(self, parent=None): + super(Project_name_get, self).__init__(parent) + # self.input_project_name.textChanged.connect(self.input_project_name.placeholderText) + + self.set_pushbutton.clicked.connect(lambda: self.execute()) + self.cancel_pushbutton.clicked.connect(self.close) + + self.listWidget.itemSelectionChanged.connect( + self._update_attributes_by_list_selection) + self.disk_combobox.currentIndexChanged.connect(self.update_disk) + self.schema_combobox.currentIndexChanged.connect(self.update_schema) + self.project_dir.textChanged.connect(self.update_disk) + self.fps.textChanged.connect(self.update_fps) + self.handles.textChanged.connect(self.update_handles) + self.resolution_w.textChanged.connect(self.update_resolution) + self.resolution_h.textChanged.connect(self.update_resolution) + + def update_handles(self): + self.projects[self.new_index]['custom_attributes']['handles'] = int( + self.handles.text()) + + def update_fps(self): + self.projects[self.new_index]['custom_attributes']['fps'] = int( + self.fps.text()) + + def update_schema(self): + self.projects[self.new_index]['project_schema'] = self.schemas_from_ft[ + self.schema_combobox.currentIndex()] + self.projects[self.new_index]['project_schema_id'] = self.projects[ + self.new_index]['project_schema']['id'] + + def execute(self): + # import ft_utils + # import hiero + # get the project which has been selected + print("well and what") + # set the project as context and create entity + # entity is task created with the name of user which is creating it + + # get the project_path and create dir if there is not any + print(self.projects[self.new_index]['project_path'].replace( + self.disk_combobox.currentText().split(' ')[-1].lower(), '')) + + # get the schema and recreate a starting project regarding the selection + # set_hiero_template(project_schema=self.projects[self.new_index][ + # 'project_schema']['name']) + + # set all project properities + # project = hiero.core.Project() + # project.setFramerate( + # int(self.projects[self.new_index]['custom_attributes']['fps'])) + # project.projectRoot() + # print 'handles: {}'.format(self.projects[self.new_index]['custom_attributes']['handles']) + # print 'resolution_width: {}'.format(self.projects[self.new_index]['custom_attributes']['resolution_width']) + # print 'resolution_width: {}'.format(self.projects[self.new_index]['custom_attributes']['resolution_height']) + # print "<< {}".format(self.projects[self.new_index]) + + # get path for the hrox file + # root = context.data('ftrackData')['Project']['root'] + # hrox_script_path = ft_utils.getPathsYaml(taskid, templateList=templates, root=root) + + # save the hrox into the correct path + self.session.commit() + self.close() + +# +# def set_hiero_template(project_schema=None): +# import hiero +# hiero.core.closeAllProjects() +# hiero_plugin_path = [ +# p for p in os.environ['HIERO_PLUGIN_PATH'].split(';') +# if 'hiero_plugin_path' in p +# ][0] +# path = os.path.normpath( +# os.path.join(hiero_plugin_path, 'Templates', project_schema + '.hrox')) +# print('---> path to template: {}'.format(path)) +# return hiero.core.openProject(path) + + +# def set_out_ft_session(): +# session = ftrack_api.Session() +# projects_to_ft = session.query('Project where status is active') + + +def main(): + import sys + app = QtWidgets.QApplication(sys.argv) + panel = Project_name_get() + panel.show() + + sys.exit(app.exec_()) + + +if __name__ == "__main__": + main() From 8c093ce2adc5220b266cab8b33d5f5e235f980bb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 26 Nov 2018 14:26:44 +0100 Subject: [PATCH 15/78] Creating backup --- pype/ftrack/actions/action_syncToAvalon.py | 57 +++++-------- pype/ftrack/events/event_sync_to_avalon.py | 89 ++++++++++---------- pype/ftrack/events/ftrack_event_handler.py | 1 - pype/ftrack/events/test_event.py | 36 ++++---- pype/ftrack/ftrack_utils.py | 29 ++++++- pype/lib.py | 26 ++++++ pype/utils/__init__.py | 98 ---------------------- pype/utils/lib.py | 96 +++++++++++++++++++++ 8 files changed, 229 insertions(+), 203 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index bdf8649714..0530dde69c 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -7,10 +7,11 @@ import os import ftrack_api import json import re +from pype import lib from ftrack_action_handler import BaseAction - -from avalon import io, inventory, lib +from avalon import io, inventory from avalon.vendor import toml +from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): '''Edit meta data action.''' @@ -27,6 +28,7 @@ class SyncToAvalon(BaseAction): def discover(self, session, entities, event): ''' Validation ''' + discover = False for entity in entities: if entity.entity_type.lower() not in ['task', 'assetversion']: @@ -55,7 +57,7 @@ class SyncToAvalon(BaseAction): print("action <" + self.__class__.__name__ + "> is running") #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug - self.setAvalonAttributes(session) + self.setAvalonAttributes() self.importable = [] # get from top entity in hierarchy all parent entities @@ -107,9 +109,10 @@ class SyncToAvalon(BaseAction): 'success': True, 'message': "Synchronization was successfull" } - def setAvalonAttributes(self, session): + + def setAvalonAttributes(self): self.custom_attributes = [] - all_avalon_attr = session.query('CustomAttributeGroup where name is "avalon"').one() + all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() for cust_attr in all_avalon_attr['custom_attribute_configurations']: if 'avalon_' not in cust_attr['key']: self.custom_attributes.append(cust_attr) @@ -132,28 +135,6 @@ class SyncToAvalon(BaseAction): print("Name of {} was changed to {}".format(input_name, name)) return name - def getConfig(self, entity): - apps = [] - for app in entity['custom_attributes']['applications']: - try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) - except Exception as e: - print('Error with application {0} - {1}'.format(app, e)) - - config = { - 'schema': 'avalon-core:config-1.0', - 'tasks': [{'name': ''}], - 'apps': apps, - # TODO redo work!!! - 'template': { - 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', - 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', - 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} - } - return config - - def importToAvalon(self, session, entity): eLinks = [] @@ -170,9 +151,6 @@ class SyncToAvalon(BaseAction): os.environ["AVALON_PROJECT"] = entityProj["full_name"] os.environ["AVALON_ASSET"] = entityProj['full_name'] - # Set project template - template = {"schema": "avalon-core:inventory-1.0"} - # --- Begin: PUSH TO Avalon --- io.install() ## ----- PROJECT ------ @@ -185,25 +163,28 @@ class SyncToAvalon(BaseAction): data['entityType'] = entity_type for cust_attr in self.custom_attributes: + key = cust_attr['key'] if cust_attr['entity_type'].lower() in ['asset']: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) # Get object id of entity type - ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id'] + ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] if cust_attr['object_type_id'] == ent_obj_type_id: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] - if entity.entity_type.lower() in ['project']: + if entity_type.lower() in ['project']: # Set project Config - config = self.getConfig(entity) + config = ftrack_utils.get_config(entity) + # Set project template + template = lib.get_avalon_project_template_schema() if avalon_project is None: inventory.save(entityProj['full_name'], config, template) @@ -233,7 +214,7 @@ class SyncToAvalon(BaseAction): ## ----- ASSETS ------ # Presets: # TODO how to check if entity is Asset Library or AssetBuild? - if entity.entity_type in ['AssetBuild', 'Library']: + if entity_type in ['AssetBuild', 'Library']: silo = 'Assets' else: silo = 'Film' diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index d8ede03503..4c09251b53 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1,18 +1,20 @@ import os import sys +import re import ftrack_api from ftrack_event_handler import BaseEvent -from avalon import io, inventory, lib +from pype import lib +from avalon import io, inventory from avalon.vendor import toml -import re from bson.objectid import ObjectId +from pype.ftrack import ftrack_utils class Sync_to_Avalon(BaseEvent): def launch(self, session, entities, event): self.ca_mongoid = 'avalon_mongo_id' - self.proj = None + for entity in entities: try: base_proj = entity['link'][0] @@ -37,12 +39,9 @@ class Sync_to_Avalon(BaseEvent): io.uninstall() self.importEntities = [] - exceptions = ['assetversion', 'job', 'user'] for entity in entities: - if entity.entity_type.lower() in exceptions: - continue - elif entity.entity_type.lower() in ['task']: + if entity.entity_type.lower() in ['task']: entity = entity['parent'] try: mongo_id = entity['custom_attributes'][self.ca_mongoid] @@ -72,46 +71,48 @@ class Sync_to_Avalon(BaseEvent): def importToAvalon(self, entity): data = {} + entity_type = entity.entity_type + type = 'asset' name = entity['name'] silo = 'Film' - if entity.entity_type == 'Project': + if entity_type in ['Project']: type = 'project' name = entity['full_name'] data['code'] = entity['name'] - elif entity.entity_type in ['AssetBuild', 'Library']: + elif entity_type in ['AssetBuild', 'Library']: silo = 'Assets' os.environ["AVALON_ASSET"] = name os.environ["AVALON_SILO"] = silo - entity_type = entity.entity_type - data['ftrackId'] = entity['id'] data['entityType'] = entity_type for cust_attr in self.custom_attributes: + key = cust_attr['key'] if cust_attr['entity_type'].lower() in ['asset']: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) # Get object id of entity type - ent_obj_type_id = self.session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id'] + ent_obj_type_id = self.session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] if cust_attr['object_type_id'] == ent_obj_type_id: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] mongo_id = entity['custom_attributes'][self.ca_mongoid] - if entity_type in ['project']: - config = self.getConfig() - template = {"schema": "avalon-core:inventory-1.0"} + if entity_type.lower() in ['project']: + + config = ftrack_utils.get_config(entity) + template = lib.get_avalon_project_template_schema() if self.avalon_project is None: mongo_id = inventory.save(self.proj['full_name'], config, template) @@ -131,6 +132,10 @@ class Sync_to_Avalon(BaseEvent): }}) return + + if self.avalon_project is None: + self.importToAvalon(self.proj) + eLinks = [] for e in entity['link']: tmp = self.session.get(e['type'], e['id']) @@ -161,13 +166,12 @@ class Sync_to_Avalon(BaseEvent): data['visualParent'] = parentId data['hierarchy'] = hierarchy - if self.avalon_project is None: - self.importToAvalon(self.proj) - avalon_asset = io.find_one({'_id': ObjectId(mongo_id)}) if avalon_asset is None: avalon_asset = io.find_one({'type': type, 'name': name}) - if avalon_asset is None: + if avalon_asset is None: + mongo_id = inventory.create_asset(name, silo, data, self.projectId) + elif avalon_asset['name'] != name: mongo_id = inventory.create_asset(name, silo, data, self.projectId) io.update_many( @@ -176,7 +180,7 @@ class Sync_to_Avalon(BaseEvent): 'name':name, 'silo':silo, 'data':data, - 'Parent': self.projectId}}) + 'parent': self.projectId}}) def checkName(self, input_name): @@ -187,27 +191,6 @@ class Sync_to_Avalon(BaseEvent): print("Name of {} was changed to {}".format(input_name, name)) return name - def getConfig(self, entity): - apps = [] - for app in entity['custom_attributes']['applications']: - try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) - except Exception as e: - print('Error with application {0} - {1}'.format(app, e)) - - config = { - 'schema': 'avalon-core:config-1.0', - 'tasks': [{'name': ''}], - 'apps': apps, - # TODO redo work!!! - 'template': { - 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', - 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', - 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} - } - return config - def setAvalonAttributes(self): self.custom_attributes = [] all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() @@ -215,6 +198,22 @@ class Sync_to_Avalon(BaseEvent): if 'avalon_' not in cust_attr['key']: self.custom_attributes.append(cust_attr) + def _translate_event(self, session, event): + exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] + _selection = event['data'].get('entities',[]) + + _entities = list() + for entity in _selection: + if entity['entityType'] in exceptions: + continue + _entities.append( + ( + session.get(self._get_entity_type(entity), entity.get('entityId')) + ) + ) + + return [_entities, event] + def register(session, **kw): '''Register plugin. Called when used as an plugin.''' diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index 775d6c07c2..a555a5324a 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -59,7 +59,6 @@ class BaseEvent(object): _entities.append( ( session.get(self._get_entity_type(entity), entity.get('entityId')) - # self._get_entity_type(entity), entity.get('entityId') ) ) diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index 7e5fe6d903..bf15928f98 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -1,26 +1,26 @@ -# import ftrack_api as local session +import os +import sys import ftrack_api -# -session = ftrack_api.Session() - -# ---------------------------------- +from ftrack_event_handler import BaseEvent -def test_event(event): - '''just a testing event''' +class Test_Event(BaseEvent): - # start of event procedure ---------------------------------- - for entity in event['data'].get('entities', []): - if entity['entityType'] == 'task' and entity['action'] == 'update': + def launch(self, session, entities, event): + '''just a testing event''' + exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] + selection = event['data'].get('entities',[]) + for entity in selection: + if entity['entityType'] in exceptions: + print(100*"*") + print(entity) - print("\n\nevent script: {}".format(__file__)) - # for k in task.keys(): - # print k, task[k] - # print '\n' - # print task['assignments'] +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' - for e in entity.keys(): - print('{0}: {1}'.format(e, entity[e])) + if not isinstance(session, ftrack_api.session.Session): + return - # end of event procedure ---------------------------------- + event = Test_Event(session) + event.register() diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 23531a9fdd..68e83a9e6e 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -2,13 +2,36 @@ import ftrack_api import os +import traceback from pprint import * +from pype import lib +def get_apps(entity): + """ Get apps from project + Requirements: + 'Entity' MUST be object of ftrack entity with entity_type 'Project' + Checking if app from ftrack is available in Templates/bin/{app_name}.toml -def checkLogin(): - # check Environments FTRACK_API_USER, FTRACK_API_KEY - pass + Returns: + Array with dictionaries with app Name and Label + """ + apps = [] + for app in entity['custom_attributes']['applications']: + try: + label = toml.load(lib.which_app(app))['label'] + apps.append({'name':app, 'label':label}) + except Exception as e: + print('Error with application {0} - {1}'.format(app, e)) + return apps +def get_config(self, entity): + config = {} + config['schema'] = lib.get_avalon_project_config_schema() + config['tasks'] = [{'name': ''}] + config['apps'] = get_apps(entity) + config['template'] = lib.get_avalon_project_template() + + return config def checkRegex(): # _handle_result -> would be solution? diff --git a/pype/lib.py b/pype/lib.py index 3ce1441e3d..34bdda2a17 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -335,3 +335,29 @@ def get_asset_data(asset=None): data = document.get("data", {}) return data + +def get_avalon_project_config_schema(): + schema = 'avalon-core:config-1.0' + return schema + +def get_avalon_project_template_schema(): + schema = {"schema": "avalon-core:inventory-1.0"} + return schema + +def get_avalon_project_template(): + from app.api import Templates + + """Get avalon template + + Returns: + dictionary with templates + """ + template = Templates(type=["anatomy"]) + proj_template = {} + # proj_template['workfile'] = '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>' + # proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' + # proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' + proj_template['workfile'] = template.anatomy.avalon.workfile + proj_template['work'] = template.anatomy.avalon.work + proj_template['publish'] = template.anatomy.avalon.publish + return proj_template diff --git a/pype/utils/__init__.py b/pype/utils/__init__.py index 318d875f60..e69de29bb2 100644 --- a/pype/utils/__init__.py +++ b/pype/utils/__init__.py @@ -1,98 +0,0 @@ -from .lib import * - - -def load_capture_preset(path): - import capture_gui - import capture - - path = path - preset = capture_gui.lib.load_json(path) - print preset - - options = dict() - - # CODEC - id = 'Codec' - for key in preset[id]: - options[str(key)] = preset[id][key] - - # GENERIC - id = 'Generic' - for key in preset[id]: - if key.startswith('isolate'): - pass - # options['isolate'] = preset[id][key] - else: - options[str(key)] = preset[id][key] - - # RESOLUTION - id = 'Resolution' - options['height'] = preset[id]['height'] - options['width'] = preset[id]['width'] - - # DISPLAY OPTIONS - id = 'Display Options' - disp_options = {} - for key in preset['Display Options']: - if key.startswith('background'): - disp_options[key] = preset['Display Options'][key] - else: - disp_options['displayGradient'] = True - - options['display_options'] = disp_options - - # VIEWPORT OPTIONS - temp_options = {} - id = 'Renderer' - for key in preset[id]: - temp_options[str(key)] = preset[id][key] - - temp_options2 = {} - id = 'Viewport Options' - light_options = {0: "default", - 1: 'all', - 2: 'selected', - 3: 'flat', - 4: 'nolights'} - for key in preset[id]: - if key == 'high_quality': - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = 4 - temp_options2['textureMaxResolution'] = 512 - temp_options2['enableTextureMaxRes'] = True - - if key == 'alphaCut': - temp_options2['transparencyAlgorithm'] = 5 - temp_options2['transparencyQuality'] = 1 - - if key == 'headsUpDisplay': - temp_options['headsUpDisplay'] = True - - if key == 'displayLights': - temp_options[str(key)] = light_options[preset[id][key]] - else: - temp_options[str(key)] = preset[id][key] - - for key in ['override_viewport_options', 'high_quality', 'alphaCut']: - temp_options.pop(key, None) - - options['viewport_options'] = temp_options - options['viewport2_options'] = temp_options2 - - # use active sound track - scene = capture.parse_active_scene() - options['sound'] = scene['sound'] - cam_options = dict() - cam_options['overscan'] = 1.0 - cam_options['displayFieldChart'] = False - cam_options['displayFilmGate'] = False - cam_options['displayFilmOrigin'] = False - cam_options['displayFilmPivot'] = False - cam_options['displayGateMask'] = False - cam_options['displayResolution'] = False - cam_options['displaySafeAction'] = False - cam_options['displaySafeTitle'] = False - - # options['display_options'] = temp_options - - return options diff --git a/pype/utils/lib.py b/pype/utils/lib.py index 8fdc1a4455..8b7be1a3fe 100644 --- a/pype/utils/lib.py +++ b/pype/utils/lib.py @@ -105,3 +105,99 @@ def filter_instances(context, plugin): instances = pyblish.api.instances_by_plugin(allInstances, plugin) return instances + +def load_capture_preset(path): + import capture_gui + import capture + + path = path + preset = capture_gui.lib.load_json(path) + print preset + + options = dict() + + # CODEC + id = 'Codec' + for key in preset[id]: + options[str(key)] = preset[id][key] + + # GENERIC + id = 'Generic' + for key in preset[id]: + if key.startswith('isolate'): + pass + # options['isolate'] = preset[id][key] + else: + options[str(key)] = preset[id][key] + + # RESOLUTION + id = 'Resolution' + options['height'] = preset[id]['height'] + options['width'] = preset[id]['width'] + + # DISPLAY OPTIONS + id = 'Display Options' + disp_options = {} + for key in preset['Display Options']: + if key.startswith('background'): + disp_options[key] = preset['Display Options'][key] + else: + disp_options['displayGradient'] = True + + options['display_options'] = disp_options + + # VIEWPORT OPTIONS + temp_options = {} + id = 'Renderer' + for key in preset[id]: + temp_options[str(key)] = preset[id][key] + + temp_options2 = {} + id = 'Viewport Options' + light_options = {0: "default", + 1: 'all', + 2: 'selected', + 3: 'flat', + 4: 'nolights'} + for key in preset[id]: + if key == 'high_quality': + temp_options2['multiSampleEnable'] = True + temp_options2['multiSampleCount'] = 4 + temp_options2['textureMaxResolution'] = 512 + temp_options2['enableTextureMaxRes'] = True + + if key == 'alphaCut': + temp_options2['transparencyAlgorithm'] = 5 + temp_options2['transparencyQuality'] = 1 + + if key == 'headsUpDisplay': + temp_options['headsUpDisplay'] = True + + if key == 'displayLights': + temp_options[str(key)] = light_options[preset[id][key]] + else: + temp_options[str(key)] = preset[id][key] + + for key in ['override_viewport_options', 'high_quality', 'alphaCut']: + temp_options.pop(key, None) + + options['viewport_options'] = temp_options + options['viewport2_options'] = temp_options2 + + # use active sound track + scene = capture.parse_active_scene() + options['sound'] = scene['sound'] + cam_options = dict() + cam_options['overscan'] = 1.0 + cam_options['displayFieldChart'] = False + cam_options['displayFilmGate'] = False + cam_options['displayFilmOrigin'] = False + cam_options['displayFilmPivot'] = False + cam_options['displayGateMask'] = False + cam_options['displayResolution'] = False + cam_options['displaySafeAction'] = False + cam_options['displaySafeTitle'] = False + + # options['display_options'] = temp_options + + return options From 484d674b423490264681fd01ab060f98f38b653b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 27 Nov 2018 08:27:49 +0100 Subject: [PATCH 16/78] adding menu.py and colorspace setting --- pype/nuke/__init__.py | 74 +++++++++++++------- pype/nuke/lib.py | 61 +++++++++++++++- pype/nuke/menu.py | 12 ++++ pype/plugins/nuke/create/create_write_exr.py | 7 +- 4 files changed, 128 insertions(+), 26 deletions(-) create mode 100644 pype/nuke/menu.py diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 8e3c33f9c3..97c9d714fc 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -2,20 +2,22 @@ import os import sys from avalon import api as avalon from pyblish import api as pyblish -from pype.api import Logger -# import logging +from ..api import Logger +from pype.nuke import menu + import nuke # removing logger handler created in avalon_core -loggers = [handler - for handler in Logger.logging.root.handlers[:]] - -if len(loggers) > 2: - Logger.logging.root.removeHandler(loggers[0]) +for name, handler in [(handler.get_name(), handler) + for handler in Logger.logging.root.handlers[:]]: + if "pype" not in str(name).lower(): + Logger.logging.root.removeHandler(handler) log = Logger.getLogger(__name__, "nuke") +AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") + PARENT_DIR = os.path.dirname(__file__) PACKAGE_DIR = os.path.dirname(PARENT_DIR) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") @@ -38,35 +40,57 @@ class NukeHandler(Logger.logging.Handler): def __init__(self): Logger.logging.Handler.__init__(self) + self.set_name("Pype_Nuke_Handler") def emit(self, record): # Formated message: msg = self.format(record) - # if record.levelname.lower() is "warning": - # nuke.warning(msg) - - elif record.levelname.lower() in ["critical", "fatal", "error"]: - nuke.message(record.message) - - # elif record.levelname.lower() is "info": - # log.info(msg) - # - # elif record.levelname.lower() is "debug": - # log.debug(msg) - - # else: - # sys.stdout.write(msg) + if record.levelname.lower() in [ + "warning", + "critical", + "fatal", + "error" + ]: + nuke.message(msg) +'''Adding Nuke Logging Handler''' nuke_handler = NukeHandler() -log.addHandler(nuke_handler) +if nuke_handler.get_name() \ + not in [handler.get_name() + for handler in Logger.logging.root.handlers[:]]: + Logger.logging.getLogger().addHandler(nuke_handler) + if not self.nLogger: - self.nLogger = log + self.nLogger = Logger + + +def reload_config(): + """Attempt to reload pipeline at run-time. + + CAUTION: This is primarily for development and debugging purposes. + + """ + + import importlib + + for module in ( + "{}".format(AVALON_CONFIG), + "{}.nuke".format(AVALON_CONFIG), + "{}.nuke.lib".format(AVALON_CONFIG), + "{}.nuke.menu".format(AVALON_CONFIG) + ): + log.info("Reloading module: {}...".format(module)) + module = importlib.import_module(module) + try: + reload(module) + except Exception: + importlib.reload(module) def install(): - + reload_config() log.info("Registering Nuke plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) @@ -86,6 +110,8 @@ def install(): avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states + menu.install() + def uninstall(): log.info("Deregistering Nuke plug-ins..") diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 3971b7c977..7a30abad94 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,10 +1,13 @@ import sys - +from pprint import pprint from avalon.vendor.Qt import QtGui import avalon.nuke +from app.api import Logger + import nuke +log = Logger.getLogger(__name__, "nuke") self = sys.modules[__name__] self._project = None @@ -58,3 +61,59 @@ def get_additional_data(container): ] return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} + + +def check_viewers(viewer): + filter_knobs = [ + "viewerProcess", + "wipe_position" + ] + viewers = [n for n in nuke.allNodes() if n.Class() == 'Viewer'] + erased_viewers = [] + + for v in viewers: + v['viewerProcess'].setValue(str(viewer.viewerProcess)) + if str(viewer.viewerProcess) not in v['viewerProcess'].value(): + copy_inputs = v.dependencies() + copy_knobs = {k: v[k].value() for k in v.knobs() + if k not in filter_knobs} + pprint(copy_knobs) + # delete viewer with wrong settings + erased_viewers.append(v['name'].value()) + nuke.delete(v) + + # create new viewer + nv = nuke.createNode("Viewer") + + # connect to original inputs + for i, n in enumerate(copy_inputs): + nv.setInput(i, n) + + # set coppied knobs + for k, v in copy_knobs.items(): + print(k, v) + nv[k].setValue(v) + + # set viewerProcess + nv['viewerProcess'].setValue(str(viewer.viewerProcess)) + + if erased_viewers: + log.warning( + "Attention! Viewer nodes {} were erased." + "It had wrong color profile".format(erased_viewers)) + + +def set_colorspace(): + from app.api import Templates + + t = Templates(type=["colorspace"]) + colorspace = t.colorspace + + nuke_colorspace = getattr(colorspace, "nuke", None) + check_viewers(nuke_colorspace.viewer) + try: + for key in nuke_colorspace: + log.info("{}".format(key)) + except TypeError: + log.error("Nuke is not in templates! \n\n\n" + "contact your supervisor!") diff --git a/pype/nuke/menu.py b/pype/nuke/menu.py new file mode 100644 index 0000000000..97e2432e16 --- /dev/null +++ b/pype/nuke/menu.py @@ -0,0 +1,12 @@ +import nuke +from avalon.api import Session + +from pype.nuke import lib + + +def install(): + menubar = nuke.menu("Nuke") + menu = menubar.findItem(Session["AVALON_LABEL"]) + + menu.addSeparator() + menu.addCommand("Set colorspace...", lib.set_colorspace) diff --git a/pype/plugins/nuke/create/create_write_exr.py b/pype/plugins/nuke/create/create_write_exr.py index b77b5735e8..8aa9ba5310 100644 --- a/pype/plugins/nuke/create/create_write_exr.py +++ b/pype/plugins/nuke/create/create_write_exr.py @@ -1,7 +1,9 @@ import os +from pprint import pprint import avalon.api import avalon.nuke import nuke +from app import api class CrateWriteExr(avalon.api.Creator): @@ -12,12 +14,15 @@ class CrateWriteExr(avalon.api.Creator): icon = "sign-out" # TODO: create container of metadata into user knob # TODO: if write node selected it will add metadata - + # def __init__(self, *args, **kwargs): # super(CrateWriteExr, self).__init__(*args, **kwargs) # self.data.setdefault("subset", "this") def process(self): + templates = api.Templates(type=["dataflow", "metadata", "colorspace"]) + templates = templates.format() + pprint(templates) # nuke = getattr(sys.modules["__main__"], "nuke", None) data = {} ext = "exr" From 01cf4230ed131f7842f8ea62c2e1a3a13359a937 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 27 Nov 2018 09:43:47 +0100 Subject: [PATCH 17/78] Creating Backup --- pype/ftrack/events/ftrack_event_handler.py | 9 ++++++--- pype/ftrack/events/test_event.py | 3 +-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index a555a5324a..10a2eff0d1 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -3,7 +3,7 @@ import os import logging import getpass -import platform +# import platform import ftrack_api import toml from avalon import io, lib, pipeline @@ -33,7 +33,7 @@ class BaseEvent(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.logger = logging.getLogger( + self.logger = Logger.getLogger( '{0}.{1}'.format(__name__, self.__class__.__name__) ) @@ -61,7 +61,10 @@ class BaseEvent(object): session.get(self._get_entity_type(entity), entity.get('entityId')) ) ) - + try: + if _entities[0]['project'].entity_type in ['project']: + _entities = None + _entities = list() return [ _entities, event diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index bf15928f98..7839168970 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -2,7 +2,7 @@ import os import sys import ftrack_api from ftrack_event_handler import BaseEvent - +from app import api class Test_Event(BaseEvent): @@ -18,7 +18,6 @@ class Test_Event(BaseEvent): def register(session, **kw): '''Register plugin. Called when used as an plugin.''' - if not isinstance(session, ftrack_api.session.Session): return From e58948f5ba09e79239377e24e96c16e171c15ef3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 27 Nov 2018 14:24:08 +0100 Subject: [PATCH 18/78] update nuke.lib.set_colorspace() --- pype/nuke/lib.py | 38 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 7a30abad94..71534b92de 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -63,7 +63,10 @@ def get_additional_data(container): return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} -def check_viewers(viewer): +def set_viewers_colorspace(viewer): + assert isinstance(viewer, dict), log.error( + "set_viewers_colorspace(): argument should be dictionary") + filter_knobs = [ "viewerProcess", "wipe_position" @@ -103,6 +106,21 @@ def check_viewers(viewer): "It had wrong color profile".format(erased_viewers)) +def set_root_colorspace(root_dict): + assert isinstance(root_dict, dict), log.error( + "set_root_colorspace(): argument should be dictionary") + for knob, value in root_dict.items(): + if nuke.root()[knob].value() not in value: + nuke.root()[knob].setValue(str(value)) + log.info("nuke.root()['{}'] changed to: {}".format(knob, value)) + + +def set_writes_colorspace(write_dict): + assert isinstance(write_dict, dict), log.error( + "set_root_colorspace(): argument should be dictionary") + log.info("set_writes_colorspace(): {}".format(write_dict)) + + def set_colorspace(): from app.api import Templates @@ -110,7 +128,23 @@ def set_colorspace(): colorspace = t.colorspace nuke_colorspace = getattr(colorspace, "nuke", None) - check_viewers(nuke_colorspace.viewer) + + try: + set_root_colorspace(nuke_colorspace.root) + except AttributeError: + log.error( + "set_colorspace(): missing `root` settings in template") + try: + set_viewers_colorspace(nuke_colorspace.viewer) + except AttributeError: + log.error( + "set_colorspace(): missing `viewer` settings in template") + try: + set_writes_colorspace(nuke_colorspace.write) + except AttributeError: + log.error( + "set_colorspace(): missing `write` settings in template") + try: for key in nuke_colorspace: log.info("{}".format(key)) From 5aa0edffd499a544ea0372dbf53aa276f3fbb411 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 27 Nov 2018 19:19:17 +0100 Subject: [PATCH 19/78] Events work but don't show messag eto user --- pype/ftrack/actions/action_syncToAvalon.py | 24 ----- pype/ftrack/events/event_sync_to_avalon.py | 113 ++++++++++++++------- pype/ftrack/events/ftrack_event_handler.py | 19 ++-- pype/ftrack/events/test_event.py | 10 +- pype/ftrack/ftrack_utils.py | 2 +- 5 files changed, 96 insertions(+), 72 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 1c3cc25264..e2cf4e07f8 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -134,31 +134,7 @@ class SyncToAvalon(BaseAction): self.log.info("Name of {} was changed to {}".format(input_name, name)) return name -<<<<<<< HEAD -======= - def getConfig(self, entity): - apps = [] - for app in entity['custom_attributes']['applications']: - try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) - except Exception as e: - self.log.error('Error with application {0} - {1}'.format(app, e)) - config = { - 'schema': 'avalon-core:config-1.0', - 'tasks': [{'name': ''}], - 'apps': apps, - # TODO redo work!!! - 'template': { - 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', - 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', - 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} - } - return config - - ->>>>>>> develop def importToAvalon(self, session, entity): eLinks = [] diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 4c09251b53..c4c0db0d55 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -13,7 +13,12 @@ class Sync_to_Avalon(BaseEvent): def launch(self, session, entities, event): self.ca_mongoid = 'avalon_mongo_id' + for ent in event['data']['entities']: + if self.ca_mongoid in ent['keys']: + return False self.proj = None + self.nameShotAsset = [] + self.nameChanged = [] for entity in entities: try: @@ -24,22 +29,28 @@ class Sync_to_Avalon(BaseEvent): break if self.proj is None: - return + return False os.environ["AVALON_PROJECT"] = self.proj['full_name'] - proj_id = self.proj['custom_attributes'][self.ca_mongoid] + self.projectId = self.proj['custom_attributes'][self.ca_mongoid] io.install() - self.avalon_project = io.find({"_id": ObjectId(proj_id)}) - self.projectId = proj_id + try: + self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)}) + except: + self.avalon_project = None + + importEntities = [] + if self.avalon_project is None: self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) - self.projectId = self.avalon_project['_id'] + if self.avalon_project is None: + importEntities.append(self.proj) + else: + self.projectId = self.avalon_project['_id'] io.uninstall() - self.importEntities = [] - for entity in entities: if entity.entity_type.lower() in ['task']: entity = entity['parent'] @@ -51,21 +62,36 @@ class Sync_to_Avalon(BaseEvent): 'message': "Please run 'Create Attributes' action or create custom attribute 'avalon_mongo_id' manually for {}".format(entity.entity_type) } - if entity not in self.importEntities: - self.importEntities.append(entity) + if entity not in importEntities: + importEntities.append(entity) - if len(self.importEntities) < 1: - return + if len(importEntities) < 1: + return False self.setAvalonAttributes() io.install() - - for entity in self.importEntities: + for entity in importEntities: self.importToAvalon(entity) io.uninstall() + message = "" + if len(self.nameChanged) > 0: + names = ", ".join(self.nameChanged) + message += "These entities name can't be changed in avalon, please reset DB or use restore action: {} \n".format(names) + if len(self.nameShotAsset) > 0: + names = ", ".join(self.nameChanged) + message += "These entities are already used in avalon, duplicates with new name were created: {}".format(names) + + session.commit() + + if message != "": + return { + 'success': False, + 'message': message + } + return True def importToAvalon(self, entity): @@ -108,28 +134,28 @@ class Sync_to_Avalon(BaseEvent): mongo_id = entity['custom_attributes'][self.ca_mongoid] - - if entity_type.lower() in ['project']: - + if entity_type in ['Project']: config = ftrack_utils.get_config(entity) template = lib.get_avalon_project_template_schema() if self.avalon_project is None: - mongo_id = inventory.save(self.proj['full_name'], config, template) + inventory.save(name, config, template) + self.avalon_project = io.find_one({'type': 'project', 'name': name}) - self.avalon_project = io.find({"_id": ObjectId(mongo_id)}) - self.projectId = mongo_id - if self.avalon_project is None: - self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) - self.projectId = self.avalon_project['_id'] + self.projectId = self.avalon_project['_id'] + data['code'] = entity['name'] io.update_many( - {"_id": ObjectId(mongo_id)}, + {"_id": ObjectId(self.projectId)}, {'$set':{ 'name':name, 'config':config, 'data':data, }}) + try: + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) + except Exception as e: + self.log.error(e) return @@ -148,31 +174,45 @@ class Sync_to_Avalon(BaseEvent): folderStruct = [] parents = [] + parentId = None + for i in range(1, len(eLinks)-1): parents.append(eLinks[i]) for parent in parents: parname = self.checkName(parent['name']) folderStruct.append(parname) - parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] - if parent['parent'].entity_type != 'project' and parentId is None: + avalonAarent = io.find_one({'type': 'asset', 'name': parname}) + if parent['parent'].entity_type != 'project' and avalonAarent is None: self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] + parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] hierarchy = os.path.sep.join(folderStruct) data['tasks'] = tasks - data['parents'] = folderStruct - data['visualParent'] = parentId - data['hierarchy'] = hierarchy + if parentId is not None: + data['parents'] = folderStruct + data['visualParent'] = parentId + data['hierarchy'] = hierarchy + + avalon_asset = None + + if mongo_id is not "": + avalon_asset = io.find_one({'_id': ObjectId(mongo_id)}) - avalon_asset = io.find_one({'_id': ObjectId(mongo_id)}) if avalon_asset is None: - avalon_asset = io.find_one({'type': type, 'name': name}) + avalon_asset = io.find_one({'type': 'asset', 'name': name}) if avalon_asset is None: - mongo_id = inventory.create_asset(name, silo, data, self.projectId) - elif avalon_asset['name'] != name: - mongo_id = inventory.create_asset(name, silo, data, self.projectId) + mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId)) + else: + if name != avalon_asset['name']: + string = "'{}->{}'".format(name, avalon_asset['name']) + if entity_type in ['Shot','AssetBuild']: + self.nameShotAsset.append(string) + mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId)) + else: + self.nameChanged.append(string) + return io.update_many( {"_id": ObjectId(mongo_id)}, @@ -182,6 +222,11 @@ class Sync_to_Avalon(BaseEvent): 'data':data, 'parent': self.projectId}}) + try: + entity['custom_attributes'][self.ca_mongoid] = str(mongo_id) + except Exception as e: + self.log.error(e) + def checkName(self, input_name): if input_name.find(" ") == -1: diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index 10a2eff0d1..009c2b2d57 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -33,9 +33,7 @@ class BaseEvent(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.logger = Logger.getLogger( - '{0}.{1}'.format(__name__, self.__class__.__name__) - ) + self.log = Logger.getLogger(self.__class__.__name__) self._session = session @@ -61,10 +59,7 @@ class BaseEvent(object): session.get(self._get_entity_type(entity), entity.get('entityId')) ) ) - try: - if _entities[0]['project'].entity_type in ['project']: - _entities = None - _entities = list() + return [ _entities, event @@ -98,6 +93,14 @@ class BaseEvent(object): self.session, event ) + # TODO REMOVE THIS - ONLY FOR TEST PROJECT + for a in args[0]: + try: + if (a['project']['name'] != 'eventproj'): + return True + except: + continue + response = self.launch( self.session, *args ) @@ -150,7 +153,7 @@ class BaseEvent(object): ) else: - self.logger.error( + self.log.error( 'Invalid result type must be bool or dictionary!' ) diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index 7839168970..aa3bbb6e0d 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -1,5 +1,6 @@ import os import sys +import re import ftrack_api from ftrack_event_handler import BaseEvent from app import api @@ -7,13 +8,12 @@ from app import api class Test_Event(BaseEvent): def launch(self, session, entities, event): + '''just a testing event''' exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] - selection = event['data'].get('entities',[]) - for entity in selection: - if entity['entityType'] in exceptions: - print(100*"*") - print(entity) + + + return True def register(session, **kw): diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 68e83a9e6e..7891aae0b9 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -24,7 +24,7 @@ def get_apps(entity): print('Error with application {0} - {1}'.format(app, e)) return apps -def get_config(self, entity): +def get_config(entity): config = {} config['schema'] = lib.get_avalon_project_config_schema() config['tasks'] = [{'name': ''}] From ddcb6f88deaf379dfb9852967ea1103c6715f05d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 27 Nov 2018 23:41:00 +0100 Subject: [PATCH 20/78] adding pype.templates for improving loading contextual templates / implement templates installing into pype.nuke.__init__ --- pype/__init__.py | 10 ++++++++-- pype/api.py | 19 +++++++++++++++++++ pype/nuke/__init__.py | 30 ++++++++++++++++++++---------- pype/nuke/lib.py | 7 ++----- pype/templates.py | 34 ++++++++++++++++++++++++++++++++++ 5 files changed, 83 insertions(+), 17 deletions(-) create mode 100644 pype/templates.py diff --git a/pype/__init__.py b/pype/__init__.py index d45ba59e74..8bd31c060d 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -6,8 +6,14 @@ from avalon import api as avalon from .launcher_actions import register_launcher_actions from .lib import collect_container_metadata -from pype.api import Logger -log = Logger.getLogger(__name__) +import logging +log = logging.getLogger(__name__) + +# do not delete these are mandatory +Anatomy = None +Dataflow = None +Metadata = None +Colorspace = None PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") diff --git a/pype/api.py b/pype/api.py index c26d29d707..0100d913a6 100644 --- a/pype/api.py +++ b/pype/api.py @@ -17,6 +17,16 @@ from .action import ( from app.api import Logger +from . import ( + Anatomy, + Colorspace, + Metadata, + Dataflow +) +from .templates import ( + load_data_from_templates, + reset_data_from_templates +) __all__ = [ # plugin classes @@ -32,4 +42,13 @@ __all__ = [ "Logger", + # contectual templates + "load_data_from_templates", + "reset_data_from_templates", + + "Anatomy", + "Colorspace", + "Metadata", + "Dataflow" + ] diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 97c9d714fc..455e2f8088 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -2,19 +2,21 @@ import os import sys from avalon import api as avalon from pyblish import api as pyblish -from ..api import Logger + +from .. import api as pype + from pype.nuke import menu import nuke # removing logger handler created in avalon_core for name, handler in [(handler.get_name(), handler) - for handler in Logger.logging.root.handlers[:]]: + for handler in pype.Logger.logging.root.handlers[:]]: if "pype" not in str(name).lower(): - Logger.logging.root.removeHandler(handler) + pype.Logger.logging.root.removeHandler(handler) -log = Logger.getLogger(__name__, "nuke") +log = pype.Logger.getLogger(__name__, "nuke") AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") @@ -31,7 +33,7 @@ self = sys.modules[__name__] self.nLogger = None -class NukeHandler(Logger.logging.Handler): +class NukeHandler(pype.Logger.logging.Handler): ''' Nuke Handler - emits logs into nuke's script editor. warning will emit nuke.warning() @@ -39,7 +41,7 @@ class NukeHandler(Logger.logging.Handler): ''' def __init__(self): - Logger.logging.Handler.__init__(self) + pype.Logger.logging.Handler.__init__(self) self.set_name("Pype_Nuke_Handler") def emit(self, record): @@ -59,11 +61,11 @@ class NukeHandler(Logger.logging.Handler): nuke_handler = NukeHandler() if nuke_handler.get_name() \ not in [handler.get_name() - for handler in Logger.logging.root.handlers[:]]: - Logger.logging.getLogger().addHandler(nuke_handler) + for handler in pype.Logger.logging.root.handlers[:]]: + pype.Logger.logging.getLogger().addHandler(nuke_handler) if not self.nLogger: - self.nLogger = Logger + self.nLogger = pype.Logger def reload_config(): @@ -76,7 +78,7 @@ def reload_config(): import importlib for module in ( - "{}".format(AVALON_CONFIG), + "{}.templates".format(AVALON_CONFIG), "{}.nuke".format(AVALON_CONFIG), "{}.nuke.lib".format(AVALON_CONFIG), "{}.nuke.menu".format(AVALON_CONFIG) @@ -90,7 +92,9 @@ def reload_config(): def install(): + reload_config() + log.info("Registering Nuke plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) @@ -112,6 +116,9 @@ def install(): menu.install() + # load data from templates + pype.load_data_from_templates() + def uninstall(): log.info("Deregistering Nuke plug-ins..") @@ -121,6 +128,9 @@ def uninstall(): pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) + # reset data from templates + pype.reset_data_from_templates() + def on_pyblish_instance_toggled(instance, new_value, old_value): """Toggle saver tool passthrough states on instance toggles.""" diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 71534b92de..c68d296450 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -122,12 +122,9 @@ def set_writes_colorspace(write_dict): def set_colorspace(): - from app.api import Templates + from pype import api as pype - t = Templates(type=["colorspace"]) - colorspace = t.colorspace - - nuke_colorspace = getattr(colorspace, "nuke", None) + nuke_colorspace = getattr(pype.Colorspace, "nuke", None) try: set_root_colorspace(nuke_colorspace.root) diff --git a/pype/templates.py b/pype/templates.py new file mode 100644 index 0000000000..f62488caee --- /dev/null +++ b/pype/templates.py @@ -0,0 +1,34 @@ +import os + + +from app.api import (Templates, Logger) + +log = Logger.getLogger(__name__, + os.getenv("AVALON_APP", "pype-config")) + + +def load_data_from_templates(): + from . import api + if not any([ + api.Dataflow, + api.Anatomy, + api.Colorspace, + api.Metadata] + ): + # base = Templates() + t = Templates(type=["anatomy", "metadata", "dataflow", "colorspace"]) + api.Anatomy = t.anatomy + api.Metadata = t.metadata.format() + data = {"metadata": api.Metadata} + api.Dataflow = t.dataflow.format(data) + api.Colorspace = t.colorspace + log.info("Data from templates were Loaded...") + + +def reset_data_from_templates(): + from . import api + api.Dataflow = None + api.Anatomy = None + api.Colorspace = None + api.Metadata = None + log.info("Data from templates were Unloaded...") From 3943b0c3dcd9f098ce0fbc115763c8babdfdfa21 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 28 Nov 2018 22:51:45 +0100 Subject: [PATCH 21/78] adding CreateWrite, updating pype.nuke modules --- pype/nuke/__init__.py | 7 +++ pype/nuke/lib.py | 34 ++++++++++ pype/nuke/templates.py | 35 +++++++++++ pype/plugins/nuke/create/create_write.py | 65 ++++++++++++++++++++ pype/plugins/nuke/create/create_write_exr.py | 55 ----------------- 5 files changed, 141 insertions(+), 55 deletions(-) create mode 100644 pype/nuke/templates.py create mode 100644 pype/plugins/nuke/create/create_write.py delete mode 100644 pype/plugins/nuke/create/create_write_exr.py diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 455e2f8088..2554ed60af 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -7,6 +7,8 @@ from .. import api as pype from pype.nuke import menu +from .lib import create_write_node + import nuke # removing logger handler created in avalon_core @@ -81,6 +83,7 @@ def reload_config(): "{}.templates".format(AVALON_CONFIG), "{}.nuke".format(AVALON_CONFIG), "{}.nuke.lib".format(AVALON_CONFIG), + "{}.nuke.templates".format(AVALON_CONFIG), "{}.nuke.menu".format(AVALON_CONFIG) ): log.info("Reloading module: {}...".format(module)) @@ -105,6 +108,10 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ + "render", + "still" + "lifeGroup", + "backdrop", "imagesequence", "mov" "camera", diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index c68d296450..57af77ba5d 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,4 +1,5 @@ import sys +from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui import avalon.nuke @@ -12,6 +13,39 @@ self = sys.modules[__name__] self._project = None +def create_write_node(name, avalon_data, data_templates): + from .templates import ( + get_dataflow, + get_colorspace + ) + nuke_dataflow_writes = get_dataflow(**data_templates) + nuke_colorspace_writes = get_colorspace(**data_templates) + + data = OrderedDict({ + "file": "pathToFile/file.exr" + }) + + # adding dataflow template + {data.update({k: v}) + for k, v in nuke_dataflow_writes.items() + if k not in ["id", "previous"]} + + # adding dataflow template + {data.update({k: v}) + for k, v in nuke_colorspace_writes.items()} + + data = avalon.nuke.lib.fix_data_for_node_create(data) + + log.info(data) + + instance = avalon.nuke.lib.add_write_node( + name, + **data + ) + instance = avalon.nuke.lib.imprint(instance, avalon_data) + return instance + + def update_frame_range(start, end, root=None): """Set Nuke script start and end frame range diff --git a/pype/nuke/templates.py b/pype/nuke/templates.py new file mode 100644 index 0000000000..7a6bf9a229 --- /dev/null +++ b/pype/nuke/templates.py @@ -0,0 +1,35 @@ +from pype import api as pype + +log = pype.Logger.getLogger(__name__, "nuke") + + +def get_dataflow(**kwarg): + host = kwarg.get("host", "nuke") + cls = kwarg.get("cls", None) + family = kwarg.get("family", None) + assert any([host, cls]), log.error("nuke.templates.get_dataflow():" + "Missing mandatory kwargs `host`, `cls`") + + nuke_dataflow = getattr(pype.Dataflow, str(host), None) + nuke_dataflow_node = getattr(nuke_dataflow.nodes, str(cls), None) + if family: + nuke_dataflow_node = getattr(nuke_dataflow_node, str(family), None) + + log.info("Dataflow: {}".format(nuke_dataflow_node)) + return nuke_dataflow_node + + +def get_colorspace(**kwarg): + host = kwarg.get("host", "nuke") + cls = kwarg.get("cls", None) + family = kwarg.get("family", None) + assert any([host, cls]), log.error("nuke.templates.get_colorspace():" + "Missing mandatory kwargs `host`, `cls`") + + nuke_colorspace = getattr(pype.Colorspace, str(host), None) + nuke_colorspace_node = getattr(nuke_colorspace, str(cls), None) + if family: + nuke_colorspace_node = getattr(nuke_colorspace_node, str(family), None) + + log.info("Colorspace: {}".format(nuke_colorspace_node)) + return nuke_colorspace_node diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py new file mode 100644 index 0000000000..b0a6f3de9b --- /dev/null +++ b/pype/plugins/nuke/create/create_write.py @@ -0,0 +1,65 @@ +import avalon.api +import avalon.nuke +from pype.nuke import ( + create_write_node +) +from pype import api as pype + +log = pype.Logger.getLogger(__name__, "nuke") + + +class CrateWriteRender(avalon.nuke.Creator): + name = "WriteRender" + label = "Create Write Render" + hosts = ["nuke"] + family = "render" + icon = "sign-out" + + def process(self): + instance = super(CrateWriteRender, self).process() + + if not instance: + data_templates = { + "cls": "write", + "family": self.family + } + create_write_node(self.name, self.data, data_templates) + return + + +class CrateWritePrerender(avalon.nuke.Creator): + name = "WritePrerender" + label = "Create Write Prerender" + hosts = ["nuke"] + family = "prerender" + icon = "sign-out" + + def process(self): + instance = super(CrateWritePrerender, self).process() + + if not instance: + data_templates = { + "cls": "write", + "family": self.family + } + create_write_node(self.name, self.data, data_templates) + return None + + +class CrateWriteStill(avalon.nuke.Creator): + name = "WriteStill" + label = "Create Write Still" + hosts = ["nuke"] + family = "still" + icon = "image" + + def process(self): + instance = super(CrateWriteStill, self).process() + + if not instance: + data_templates = { + "cls": "write", + "family": self.family + } + create_write_node(self.name, self.data, data_templates) + return diff --git a/pype/plugins/nuke/create/create_write_exr.py b/pype/plugins/nuke/create/create_write_exr.py deleted file mode 100644 index 8aa9ba5310..0000000000 --- a/pype/plugins/nuke/create/create_write_exr.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -from pprint import pprint -import avalon.api -import avalon.nuke -import nuke -from app import api - - -class CrateWriteExr(avalon.api.Creator): - name = "Write_exr" - label = "Create Write: exr" - hosts = ["nuke"] - family = "write" - icon = "sign-out" - # TODO: create container of metadata into user knob - # TODO: if write node selected it will add metadata - - # def __init__(self, *args, **kwargs): - # super(CrateWriteExr, self).__init__(*args, **kwargs) - # self.data.setdefault("subset", "this") - - def process(self): - templates = api.Templates(type=["dataflow", "metadata", "colorspace"]) - templates = templates.format() - pprint(templates) - # nuke = getattr(sys.modules["__main__"], "nuke", None) - data = {} - ext = "exr" - - # todo: improve method of getting current environment - # todo: pref avalon.Session over os.environ - - workdir = os.path.normpath(os.environ["AVALON_WORKDIR"]) - - filename = "{}.####.exr".format(self.name) - filepath = os.path.join( - workdir, - "render", - ext, - filename - ).replace("\\", "/") - - with avalon.nuke.viewer_update_and_undo_stop(): - w = nuke.createNode( - "Write", - "name {}".format(self.name)) - # w.knob('colorspace').setValue() - w.knob('file').setValue(filepath) - w.knob('file_type').setValue(ext) - w.knob('datatype').setValue("16 bit half") - w.knob('compression').setValue("Zip (1 scanline)") - w.knob('create_directories').setValue(True) - w.knob('autocrop').setValue(True) - - return data From fe2162cbd2688c22b71fdbafc5771944c8b290de Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Nov 2018 10:54:52 +0100 Subject: [PATCH 22/78] ftrack connecting to pype.templates --- pype/ftrack/actions/ftrack_action_handler.py | 28 ++++++++++---------- pype/ftrack/ftrackRun.py | 20 +++++++++----- pype/templates.py | 3 ++- 3 files changed, 30 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index fa108ec78b..d6bf0a2863 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -10,14 +10,12 @@ from avalon import io, lib, pipeline from avalon import session as sess import acre -from app.api import ( - Templates, - Logger -) -t = Templates( - type=["anatomy"] -) +from pype import api as pype + +log = pype.Logger.getLogger(__name__, "ftrack") + +log.debug("pype.Anatomy: {}".format(pype.Anatomy)) class AppAction(object): @@ -34,7 +32,7 @@ class AppAction(object): def __init__(self, session, label, name, executable, variant=None, icon=None, description=None): '''Expects a ftrack_api.Session instance''' - self.log = Logger.getLogger(self.__class__.__name__) + self.log = pype.Logger.getLogger(self.__class__.__name__) # self.logger = Logger.getLogger(__name__) @@ -241,7 +239,7 @@ class AppAction(object): os.environ["AVALON_APP"] = self.identifier os.environ["AVALON_APP_NAME"] = self.identifier + "_" + self.variant - anatomy = t.anatomy + anatomy = pype.Anatomy io.install() hierarchy = io.find_one({"type": 'asset', "name": entity['parent']['name']})[ 'data']['parents'] @@ -255,9 +253,10 @@ class AppAction(object): "task": entity['name'], "asset": entity['parent']['name'], "hierarchy": hierarchy} - - anatomy = anatomy.format(data) - + try: + anatomy = anatomy.format(data) + except Exception as e: + log.error("{0} Error in anatomy.format: {1}".format(__name__, e)) os.environ["AVALON_WORKDIR"] = os.path.join(anatomy.work.root, anatomy.work.folder) # TODO Add paths to avalon setup from tomls @@ -398,7 +397,7 @@ class BaseAction(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.log = Logger.getLogger(self.__class__.__name__) + self.log = pype.Logger.getLogger(self.__class__.__name__) if self.label is None: raise ValueError( @@ -435,7 +434,8 @@ class BaseAction(object): ), self._launch ) - self.log.info("----- action - <" + self.__class__.__name__ + "> - Has been registered -----") + self.log.info("----- action - <" + self.__class__.__name__ + + "> - Has been registered -----") def _discover(self, event): args = self._translate_event( diff --git a/pype/ftrack/ftrackRun.py b/pype/ftrack/ftrackRun.py index e90530b3b2..7fddf171da 100644 --- a/pype/ftrack/ftrackRun.py +++ b/pype/ftrack/ftrackRun.py @@ -7,11 +7,19 @@ import time from app import style from app.vendor.Qt import QtCore, QtGui, QtWidgets from pype.ftrack import credentials, login_dialog as login_dialog -from app.api import Logger + from FtrackServer import FtrackServer -log = Logger.getLogger(__name__) +from pype import api as pype + + +# load data from templates +pype.load_data_from_templates() + +log = pype.Logger.getLogger(__name__, "ftrack") # Validation if alredy logged into Ftrack + + class FtrackRunner: def __init__(self, main_parent=None, parent=None): @@ -76,7 +84,7 @@ class FtrackRunner: def runActionServer(self): if self.actionThread is None: self.actionThread = threading.Thread(target=self.setActionServer) - self.actionThread.daemon=True + self.actionThread.daemon = True self.actionThread.start() log.info("Ftrack action server launched") @@ -107,7 +115,7 @@ class FtrackRunner: def runEventServer(self): if self.eventThread is None: self.eventThread = threading.Thread(target=self.setEventServer) - self.eventThread.daemon=True + self.eventThread.daemon = True self.eventThread.start() log.info("Ftrack event server launched") @@ -168,9 +176,9 @@ class FtrackRunner: self.smEventS.addAction(self.aStopEventS) # Actions - basic - self.aLogin = QtWidgets.QAction("Login",self.menu) + self.aLogin = QtWidgets.QAction("Login", self.menu) self.aLogin.triggered.connect(self.validate) - self.aLogout = QtWidgets.QAction("Logout",self.menu) + self.aLogout = QtWidgets.QAction("Logout", self.menu) self.aLogout.triggered.connect(self.logout) self.menu.addAction(self.aLogin) diff --git a/pype/templates.py b/pype/templates.py index f62488caee..38e120bec3 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -13,7 +13,8 @@ def load_data_from_templates(): api.Dataflow, api.Anatomy, api.Colorspace, - api.Metadata] + api.Metadata + ] ): # base = Templates() t = Templates(type=["anatomy", "metadata", "dataflow", "colorspace"]) From d6a15e259a10fdf07e3c81a2b24539f0985cbd4f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 11:28:50 +0100 Subject: [PATCH 23/78] Creating backup --- pype/ftrack/events/event_show_message.py | 32 ++++++++++++++++++++++ pype/ftrack/events/ftrack_event_handler.py | 20 ++++++++++++++ pype/ftrack/events/test_event.py | 4 +-- 3 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 pype/ftrack/events/event_show_message.py diff --git a/pype/ftrack/events/event_show_message.py b/pype/ftrack/events/event_show_message.py new file mode 100644 index 0000000000..b0309713d9 --- /dev/null +++ b/pype/ftrack/events/event_show_message.py @@ -0,0 +1,32 @@ +import os +import sys +import re +import ftrack_api +from ftrack_event_handler import BaseEvent +from app import api + +class Show_Message(BaseEvent): + + def launch(self, event): + + self.session.event_hub.publish_reply(event, event['data']) + return event['data'] + + def register(self): + # self.session.event_hub.subscribe('topic=show_message_topic', self.launch) + + self.session.event_hub.subscribe( + 'topic=ftrack.action.launch and data.actionIdentifier={0} and source.user.username={1}'.format( + self.identifier, + self.session.api_user + ), + self._launch + ) + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + if not isinstance(session, ftrack_api.session.Session): + return + + event = Show_Message(session) + event.register() diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index 009c2b2d57..f3ad931985 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -46,6 +46,8 @@ class BaseEvent(object): '''Registers the event, subscribing the the discover and launch topics.''' self.session.event_hub.subscribe('topic=ftrack.update', self._launch) + self.log.info("----- event - <" + self.__class__.__name__ + "> - Has been registered -----") + def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' _selection = event['data'].get('entities',[]) @@ -130,6 +132,24 @@ class BaseEvent(object): ''' raise NotImplementedError() + def show_message(self, event, input_message, result = False): + if not isinstance(result, bool): + result = False + + try: + message = str(input_message) + except: + return + source = {} + source['id'] = event['source']['applicationId'] + source['user'] = event['source']['user'] + self.session.event_hub.publish_reply(event, event['data'], source) + # event = ftrack_api.event.base.Event( + # topic='show_message_topic', + # data={'success':result, 'message': message} + # ) + # + # self.session.event_hub.publish(event) def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index aa3bbb6e0d..19022e3360 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -11,8 +11,8 @@ class Test_Event(BaseEvent): '''just a testing event''' exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] - - + self.show_message(event,"Test",True) + self.log.info(event['source']) return True From 6a94143cc8b6b1f1b4c961e583cc65edb9cc9171 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 11:34:18 +0100 Subject: [PATCH 24/78] Action raises error and show message to user with info about: What he did! --- pype/ftrack/actions/action_syncToAvalon.py | 297 +++++++++++---------- pype/lib.py | 62 +++++ 2 files changed, 220 insertions(+), 139 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 65b84fbc7b..851820cf1d 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -7,10 +7,12 @@ import os import ftrack_api import json import re +from pype import lib from ftrack_action_handler import BaseAction - -from avalon import io, inventory, lib +from bson.objectid import ObjectId +from avalon import io, inventory from avalon.vendor import toml +from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): '''Edit meta data action.''' @@ -27,6 +29,7 @@ class SyncToAvalon(BaseAction): def discover(self, session, entities, event): ''' Validation ''' + discover = False for entity in entities: if entity.entity_type.lower() not in ['task', 'assetversion']: @@ -53,9 +56,9 @@ class SyncToAvalon(BaseAction): try: self.log.info("action <" + self.__class__.__name__ + "> is running") - + self.ca_mongoid = 'avalon_mongo_id' #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug - self.setAvalonAttributes(session) + self.setAvalonAttributes() self.importable = [] # get from top entity in hierarchy all parent entities @@ -70,33 +73,57 @@ class SyncToAvalon(BaseAction): self.getShotAsset(entity) # Check duplicate name - raise error if found - all_names = {} + all_names = [] duplicates = [] for e in self.importable: - name = self.checkName(e['name']) - if name in all_names: - duplicates.append("'{}'-'{}'".format(all_names[name], e['name'])) + lib.avalon_check_name(e) + if e['name'] in all_names: + duplicates.append("'{}'".format(e['name'])) else: - all_names[name] = e['name'] + all_names.append(e['name']) if len(duplicates) > 0: - raise ValueError("Unable to sync: Entity name duplication: {}".format(", ".join(duplicates))) + raise ValueError("Entity name duplication: {}".format(", ".join(duplicates))) + + ## ----- PROJECT ------ + # store Ftrack project- self.importable[0] must be project entity!!! + self.entityProj = self.importable[0] + # set AVALON_ env + os.environ["AVALON_PROJECT"] = self.entityProj["full_name"] + os.environ["AVALON_ASSET"] = self.entityProj["full_name"] + + self.avalon_project = None + + io.install() # Import all entities to Avalon DB for e in self.importable: self.importToAvalon(session, e) + io.uninstall() + job['status'] = 'done' session.commit() self.log.info('Synchronization to Avalon was successfull!') + except ValueError as ve: + job['status'] = 'failed' + session.commit() + message = str(ve) + self.log.error('Error during syncToAvalon: {}'.format(message)) + except Exception as e: job['status'] = 'failed' - message = str(e) - self.log.error('During synchronization to Avalon went something wrong! ({})'.format(message)) + session.commit() + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno) + self.log.error('Error during syncToAvalon: {}'.format(log_message)) + message = 'Unexpected Error!!! (Please check Log for more information)' if len(message) > 0: + message = "Unable to sync: {}".format(message) return { 'success': False, 'message': message @@ -106,9 +133,10 @@ class SyncToAvalon(BaseAction): 'success': True, 'message': "Synchronization was successfull" } - def setAvalonAttributes(self, session): + + def setAvalonAttributes(self): self.custom_attributes = [] - all_avalon_attr = session.query('CustomAttributeGroup where name is "avalon"').one() + all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() for cust_attr in all_avalon_attr['custom_attribute_configurations']: if 'avalon_' not in cust_attr['key']: self.custom_attributes.append(cust_attr) @@ -123,60 +151,7 @@ class SyncToAvalon(BaseAction): for child in childrens: self.getShotAsset(child) - def checkName(self, input_name): - if input_name.find(" ") == -1: - name = input_name - else: - name = input_name.replace(" ", "-") - self.log.info("Name of {} was changed to {}".format(input_name, name)) - return name - - def getConfig(self, entity): - apps = [] - for app in entity['custom_attributes']['applications']: - try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) - except Exception as e: - self.log.error('Error with application {0} - {1}'.format(app, e)) - - config = { - 'schema': 'avalon-core:config-1.0', - 'tasks': [{'name': ''}], - 'apps': apps, - # TODO redo work!!! - 'template': { - 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', - 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', - 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} - } - return config - - - def importToAvalon(self, session, entity): - eLinks = [] - - ca_mongoid = 'avalon_mongo_id' - - # get needed info of entity and all parents - for e in entity['link']: - tmp = session.get(e['type'], e['id']) - eLinks.append(tmp) - - entityProj = eLinks[0] - - # set AVALON_PROJECT env - os.environ["AVALON_PROJECT"] = entityProj["full_name"] - os.environ["AVALON_ASSET"] = entityProj['full_name'] - - # Set project template - template = {"schema": "avalon-core:inventory-1.0"} - - # --- Begin: PUSH TO Avalon --- - io.install() - ## ----- PROJECT ------ - # If project don't exists -> ELSE - avalon_project = io.find_one({"type": "project", "name": entityProj["full_name"]}) + def getData(self, entity, session): entity_type = entity.entity_type data = {} @@ -184,65 +159,25 @@ class SyncToAvalon(BaseAction): data['entityType'] = entity_type for cust_attr in self.custom_attributes: + key = cust_attr['key'] if cust_attr['entity_type'].lower() in ['asset']: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] + data[key] = entity['custom_attributes'][key] elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) # Get object id of entity type - ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id'] + ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] if cust_attr['object_type_id'] == ent_obj_type_id: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] - - - if entity.entity_type.lower() in ['project']: - # Set project Config - config = self.getConfig(entity) - - if avalon_project is None: - inventory.save(entityProj['full_name'], config, template) - else: - io.update_many({'type': 'project','name': entityProj['full_name']}, - {'$set':{'config':config}}) + data[key] = entity['custom_attributes'][key] + if entity_type in ['Project']: data['code'] = entity['name'] - - # Store info about project (FtrackId) - io.update_many({ - 'type': 'project', - 'name': entity['full_name']}, - {'$set':{'data':data}}) - - projectId = io.find_one({"type": "project", "name": entityProj["full_name"]})["_id"] - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(projectId) - else: - self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() - return - - # Store project Id - projectId = avalon_project["_id"] - - ## ----- ASSETS ------ - # Presets: - # TODO how to check if entity is Asset Library or AssetBuild? - if entity.entity_type in ['AssetBuild', 'Library']: - silo = 'Assets' - else: - silo = 'Film' - - os.environ['AVALON_SILO'] = silo - - # Get list of parents without project - parents = [] - for i in range(1, len(eLinks)-1): - parents.append(eLinks[i]) + return data # Get info for 'Data' in Avalon DB tasks = [] @@ -250,16 +185,22 @@ class SyncToAvalon(BaseAction): if child.entity_type in ['Task']: tasks.append(child['name']) + # Get list of parents without project + parents = [] + for i in range(1, len(entity['link'])-1): + tmp = session.get(entity['link'][i]['type'], entity['link'][i]['id']) + parents.append(tmp) + folderStruct = [] parentId = None for parent in parents: - name = self.checkName(parent['name']) - folderStruct.append(name) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + parName = parent['name'] + folderStruct.append(parName) + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] if parent['parent'].entity_type != 'project' and parentId is None: self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] hierarchy = os.path.sep.join(folderStruct) @@ -268,37 +209,115 @@ class SyncToAvalon(BaseAction): data['tasks'] = tasks data['hierarchy'] = hierarchy + return data - name = self.checkName(entity['name']) + def importToAvalon(self, session, entity): + # --- Begin: PUSH TO Avalon --- + + entity_type = entity.entity_type + + if entity_type.lower() in ['project']: + # Set project Config + config = ftrack_utils.get_config(entity) + # Set project template + template = lib.get_avalon_project_template_schema() + if self.ca_mongoid in entity['custom_attributes']: + projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) + self.avalon_project = io.find_one({"_id": projectId}) + + if self.avalon_project is None: + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + if self.avalon_project is None: + inventory.save(entity['full_name'], config, template) + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + + elif self.avalon_project['name'] != entity['full_name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(avalon_asset['name'], name)) + + data = self.getData(entity, session) + + # Store info about project (FtrackId) + io.update_many({ + 'type': 'project', + 'name': entity['full_name'] + }, { + '$set':{'data':data, 'config':config} + }) + + self.projectId = self.avalon_project["_id"] + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) + else: + self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) + return + + ## ----- ASSETS ------ + # Presets: + # TODO how to check if entity is Asset Library or AssetBuild? + silo = 'Film' + if entity_type in ['AssetBuild', 'Library']: + silo = 'Assets' + + os.environ['AVALON_SILO'] = silo + + name = entity['name'] os.environ['AVALON_ASSET'] = name + data = self.getData(entity, session) + # Try to find asset in current database - avalon_asset = io.find_one({'type': 'asset', 'name': name}) - # Create if don't exists + avalon_asset = None + if self.ca_mongoid in entity['custom_attributes']: + try: + entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid]) + avalon_asset = io.find_one({"_id": entityId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) + if avalon_asset is None: - inventory.create_asset(name, silo, data, projectId) - self.log.debug("Asset {} - created".format(name)) - # Raise error if it seems to be different ent. with same name + avalon_asset = io.find_one({'type': 'asset', 'name': name}) + # Create if don't exists + if avalon_asset is None: + inventory.create_asset(name, silo, data, self.projectId) + self.log.debug("Asset {} - created".format(name)) - elif (avalon_asset['data']['ftrackId'] != data['ftrackId'] or - avalon_asset['data']['visualParent'] != data['visualParent'] or - avalon_asset['data']['parents'] != data['parents']): - raise ValueError('Entity <{}> is not same'.format(name)) - # Else update info - else: - io.update_many({'type': 'asset','name': name}, - {'$set':{'data':data, 'silo': silo}}) - # TODO check if is asset in same folder!!! ???? FEATURE FOR FUTURE - self.log.debug("Asset {} - updated".format(name)) + # Raise error if it seems to be different ent. with same name + else: + update = False + aD = avalon_asset['data'] + attr = ['ftrackId', 'visualParent', 'parents'] + for a in attr: + if a not in aD: update = True + if update is False: + if (avalon_asset['data']['ftrackId'] != data['ftrackId'] or + avalon_asset['data']['visualParent'] != data['visualParent'] or + avalon_asset['data']['parents'] != data['parents']): + raise ValueError('Entity <{}> is not same like in Avalon DB'.format(name)) + + elif avalon_asset['name'] != entity['name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) + + # Update info + io.update_many({'type': 'asset','name': name}, + {'$set':{'data':data, 'silo': silo}}) + + self.log.debug("Asset {} - updated".format(name)) + + entityId = io.find_one({'type': 'asset', 'name': name})['_id'] ## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK # Set custom attribute to avalon/mongo id of entity (parentID is last) - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(parentId) + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(entityId) else: self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() session.commit() diff --git a/pype/lib.py b/pype/lib.py index 3ce1441e3d..43f41b336c 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -9,6 +9,7 @@ from .vendor.pather.error import ParseError import avalon.io as io import avalon.api +import avalon log = logging.getLogger(__name__) @@ -335,3 +336,64 @@ def get_asset_data(asset=None): data = document.get("data", {}) return data + +def get_avalon_project_config_schema(): + schema = 'avalon-core:config-1.0' + return schema + +def get_avalon_project_template_schema(): + schema = {"schema": "avalon-core:inventory-1.0"} + return schema + +def get_avalon_project_template(): + from app.api import Templates + + """Get avalon template + + Returns: + dictionary with templates + """ + template = Templates(type=["anatomy"]) + proj_template = {} + proj_template['workfile'] = '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>' + proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' + proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' + # TODO this down should work but it can't be in default.toml: + # - Raises error when App (e.g. Nuke) is started + # proj_template['workfile'] = template.anatomy.avalon.workfile + # proj_template['work'] = template.anatomy.avalon.work + # proj_template['publish'] = template.anatomy.avalon.publish + return proj_template + +def avalon_check_name(self, entity, inSchema = None): + alright = True + name = entity['name'] + if " " in name: + alright = False + + data = {} + data['data'] = {} + data['type'] = 'asset' + schema = "avalon-core:asset-2.0" + # TODO have project any REGEX check? + if entity.entity_type in ['Project']: + # data['type'] = 'project' + name = entity['full_name'] + # schema = get_avalon_project_template_schema()['schema'] + # elif entity.entity_type in ['AssetBuild','Library']: + # data['silo'] = 'Assets' + # else: + # data['silo'] = 'Film' + data['silo'] = 'Film' + + if inSchema is not None: + schema = inSchema + data['schema'] = schema + data['name'] = name + try: + avalon.schema.validate(data) + except ValidationError: + alright = False + + if alright is False: + raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) From 5db5d7d086c0d5fe4a233461b9786ac53ec8122c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 15:12:20 +0100 Subject: [PATCH 25/78] Creating backup --- pype/ftrack/events/event_show_message.py | 32 ------------ pype/ftrack/events/test_event.py | 63 ++++++++++++++++++++++-- 2 files changed, 60 insertions(+), 35 deletions(-) delete mode 100644 pype/ftrack/events/event_show_message.py diff --git a/pype/ftrack/events/event_show_message.py b/pype/ftrack/events/event_show_message.py deleted file mode 100644 index b0309713d9..0000000000 --- a/pype/ftrack/events/event_show_message.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -import sys -import re -import ftrack_api -from ftrack_event_handler import BaseEvent -from app import api - -class Show_Message(BaseEvent): - - def launch(self, event): - - self.session.event_hub.publish_reply(event, event['data']) - return event['data'] - - def register(self): - # self.session.event_hub.subscribe('topic=show_message_topic', self.launch) - - self.session.event_hub.subscribe( - 'topic=ftrack.action.launch and data.actionIdentifier={0} and source.user.username={1}'.format( - self.identifier, - self.session.api_user - ), - self._launch - ) - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - if not isinstance(session, ftrack_api.session.Session): - return - - event = Show_Message(session) - event.register() diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index 19022e3360..a4e42fc8ae 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -10,9 +10,37 @@ class Test_Event(BaseEvent): def launch(self, session, entities, event): '''just a testing event''' - exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] - self.show_message(event,"Test",True) - self.log.info(event['source']) + result = True + message = "test message" + data = { + 'success':result, + 'message': message, + } + + self.log.info(event['data']['entities']) + # event['source']['id'] + + + # self.session.event_hub.publish_reply(event, data, subscriber.metadata) + + # subscriber = None + # self.log.info("before Message") + # for s in self.session.event_hub._subscribers: + # if 'topic=custom_message_show' == str(s.subscription): + # subscriber = s + # break + # + # if subscriber is not None: + # id = subs.metadata['id'] + # + # event = ftrack_api.event.base.Event( + # topic='topic=custom_message_show', + # data=data + # ) + # self.session.event_hub.publish(event) + # self.log.info("after Message") + # self.show_message(event,"Test",True) + # self.log.info(event['source']) return True @@ -23,3 +51,32 @@ def register(session, **kw): event = Test_Event(session) event.register() + +# ] From e71b3aef8dfe2dfb3dc1e7fdf6a067f956890d42 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 15:17:32 +0100 Subject: [PATCH 26/78] Added few featers that are from different unmerged branches --- pype/ftrack/actions/action_syncToAvalon.py | 2 +- pype/ftrack/actions/ftrack_action_handler.py | 5 ++- pype/ftrack/ftrack_utils.py | 34 ++++++++++++++++++-- pype/lib.py | 33 ------------------- 4 files changed, 36 insertions(+), 38 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 851820cf1d..f7fd8e9641 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -77,7 +77,7 @@ class SyncToAvalon(BaseAction): duplicates = [] for e in self.importable: - lib.avalon_check_name(e) + ftrack_utils.avalon_check_name(e) if e['name'] in all_names: duplicates.append("'{}'".format(e['name'])) else: diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index fa108ec78b..f16647be76 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -74,6 +74,8 @@ class AppAction(object): self._launch ) + self.log.info("Application '{}' - Registered successfully".format(self.label)) + def _discover(self, event): args = self._translate_event( self.session, event @@ -435,7 +437,8 @@ class BaseAction(object): ), self._launch ) - self.log.info("----- action - <" + self.__class__.__name__ + "> - Has been registered -----") + + self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__)) def _discover(self, event): args = self._translate_event( diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 23531a9fdd..829733efef 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -4,10 +4,38 @@ import ftrack_api import os from pprint import * +def avalon_check_name(self, entity, inSchema = None): + alright = True + name = entity['name'] + if " " in name: + alright = False -def checkLogin(): - # check Environments FTRACK_API_USER, FTRACK_API_KEY - pass + data = {} + data['data'] = {} + data['type'] = 'asset' + schema = "avalon-core:asset-2.0" + # TODO have project any REGEX check? + if entity.entity_type in ['Project']: + # data['type'] = 'project' + name = entity['full_name'] + # schema = get_avalon_project_template_schema()['schema'] + # elif entity.entity_type in ['AssetBuild','Library']: + # data['silo'] = 'Assets' + # else: + # data['silo'] = 'Film' + data['silo'] = 'Film' + + if inSchema is not None: + schema = inSchema + data['schema'] = schema + data['name'] = name + try: + avalon.schema.validate(data) + except ValidationError: + alright = False + + if alright is False: + raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) def checkRegex(): diff --git a/pype/lib.py b/pype/lib.py index 43f41b336c..0b9e066703 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -364,36 +364,3 @@ def get_avalon_project_template(): # proj_template['work'] = template.anatomy.avalon.work # proj_template['publish'] = template.anatomy.avalon.publish return proj_template - -def avalon_check_name(self, entity, inSchema = None): - alright = True - name = entity['name'] - if " " in name: - alright = False - - data = {} - data['data'] = {} - data['type'] = 'asset' - schema = "avalon-core:asset-2.0" - # TODO have project any REGEX check? - if entity.entity_type in ['Project']: - # data['type'] = 'project' - name = entity['full_name'] - # schema = get_avalon_project_template_schema()['schema'] - # elif entity.entity_type in ['AssetBuild','Library']: - # data['silo'] = 'Assets' - # else: - # data['silo'] = 'Film' - data['silo'] = 'Film' - - if inSchema is not None: - schema = inSchema - data['schema'] = schema - data['name'] = name - try: - avalon.schema.validate(data) - except ValidationError: - alright = False - - if alright is False: - raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) From c684a84df2c8636fa63e0cf9fcffae3ac0648d55 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 15:47:14 +0100 Subject: [PATCH 27/78] Avalon check name is working now --- pype/ftrack/ftrack_utils.py | 44 +++++++++++++++++++++++++++++++++---- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 829733efef..27bf49d7d1 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -1,10 +1,18 @@ -# fttrack help functions - -import ftrack_api import os +import sys from pprint import * -def avalon_check_name(self, entity, inSchema = None): +import ftrack_api +from pype import lib + +import avalon.io as io +import avalon.api +import avalon +from app.api import Logger + +log = Logger.getLogger(__name__) + +def avalon_check_name(entity, inSchema = None): alright = True name = entity['name'] if " " in name: @@ -38,6 +46,34 @@ def avalon_check_name(self, entity, inSchema = None): raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) + +def get_apps(entity): + """ Get apps from project + Requirements: + 'Entity' MUST be object of ftrack entity with entity_type 'Project' + Checking if app from ftrack is available in Templates/bin/{app_name}.toml + + Returns: + Array with dictionaries with app Name and Label + """ + apps = [] + for app in entity['custom_attributes']['applications']: + try: + label = toml.load(lib.which_app(app))['label'] + apps.append({'name':app, 'label':label}) + except Exception as e: + print('Error with application {0} - {1}'.format(app, e)) + return apps + +def get_config(entity): + config = {} + config['schema'] = lib.get_avalon_project_config_schema() + config['tasks'] = [{'name': ''}] + config['apps'] = get_apps(entity) + config['template'] = lib.get_avalon_project_template() + + return config + def checkRegex(): # _handle_result -> would be solution? # """ TODO Check if name of entities match REGEX""" From 30c4066d2d93b9a330ed71916056d889353868cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 16:30:07 +0100 Subject: [PATCH 28/78] events can show messages to user now --- pype/ftrack/events/event_sync_to_avalon.py | 40 +++++---------- pype/ftrack/events/ftrack_event_handler.py | 24 +++++---- pype/ftrack/events/test_event.py | 59 +--------------------- 3 files changed, 28 insertions(+), 95 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index c4c0db0d55..4854a36f8c 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -17,8 +17,6 @@ class Sync_to_Avalon(BaseEvent): if self.ca_mongoid in ent['keys']: return False self.proj = None - self.nameShotAsset = [] - self.nameChanged = [] for entity in entities: try: @@ -54,43 +52,33 @@ class Sync_to_Avalon(BaseEvent): for entity in entities: if entity.entity_type.lower() in ['task']: entity = entity['parent'] + try: mongo_id = entity['custom_attributes'][self.ca_mongoid] except: - return { - 'success': False, - 'message': "Please run 'Create Attributes' action or create custom attribute 'avalon_mongo_id' manually for {}".format(entity.entity_type) - } + message = "Please run 'Create Attributes' action or create custom attribute 'avalon_mongo_id' manually for {}".format(entity.entity_type) + self.show_message(event, message, False) + return if entity not in importEntities: importEntities.append(entity) if len(importEntities) < 1: - return False + return self.setAvalonAttributes() io.install() + for entity in importEntities: self.importToAvalon(entity) io.uninstall() - message = "" - if len(self.nameChanged) > 0: - names = ", ".join(self.nameChanged) - message += "These entities name can't be changed in avalon, please reset DB or use restore action: {} \n".format(names) - if len(self.nameShotAsset) > 0: - names = ", ".join(self.nameChanged) - message += "These entities are already used in avalon, duplicates with new name were created: {}".format(names) - session.commit() if message != "": - return { - 'success': False, - 'message': message - } + self.show_message(event, message, False) return True @@ -162,22 +150,20 @@ class Sync_to_Avalon(BaseEvent): if self.avalon_project is None: self.importToAvalon(self.proj) - eLinks = [] - for e in entity['link']: - tmp = self.session.get(e['type'], e['id']) - eLinks.append(tmp) - tasks = [] for child in entity['children']: if child.entity_type in ['Task']: tasks.append(child['name']) folderStruct = [] - parents = [] parentId = None - for i in range(1, len(eLinks)-1): - parents.append(eLinks[i]) + parents = [] + for i in range(1, len(entity['link'])-1): + tmp_type = entity['link'][i]['type'] + tmp_id = entity['link'][i]['id'] + tmp = self.session.get(tmp_type, tmp_id) + parents.append(tmp) for parent in parents: parname = self.checkName(parent['name']) diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index f3ad931985..d0ecd53192 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -140,16 +140,20 @@ class BaseEvent(object): message = str(input_message) except: return - source = {} - source['id'] = event['source']['applicationId'] - source['user'] = event['source']['user'] - self.session.event_hub.publish_reply(event, event['data'], source) - # event = ftrack_api.event.base.Event( - # topic='show_message_topic', - # data={'success':result, 'message': message} - # ) - # - # self.session.event_hub.publish(event) + + user_id = event['source']['user']['id'] + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.action.trigger-user-interface', + data=dict( + type='message', + success=False, + message=message + ), + target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id) + ), + on_error='ignore' + ) def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/test_event.py index a4e42fc8ae..c2586aa666 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/test_event.py @@ -10,37 +10,9 @@ class Test_Event(BaseEvent): def launch(self, session, entities, event): '''just a testing event''' - result = True - message = "test message" - data = { - 'success':result, - 'message': message, - } - self.log.info(event['data']['entities']) - # event['source']['id'] + # self.show_message(event,"test",True) - - # self.session.event_hub.publish_reply(event, data, subscriber.metadata) - - # subscriber = None - # self.log.info("before Message") - # for s in self.session.event_hub._subscribers: - # if 'topic=custom_message_show' == str(s.subscription): - # subscriber = s - # break - # - # if subscriber is not None: - # id = subs.metadata['id'] - # - # event = ftrack_api.event.base.Event( - # topic='topic=custom_message_show', - # data=data - # ) - # self.session.event_hub.publish(event) - # self.log.info("after Message") - # self.show_message(event,"Test",True) - # self.log.info(event['source']) return True @@ -51,32 +23,3 @@ def register(session, **kw): event = Test_Event(session) event.register() - -# ] From f415883a667d3e4c0287d7e561be81b980910757 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 17:02:49 +0100 Subject: [PATCH 29/78] ftrack_utils:Imported toml --- pype/ftrack/actions/action_syncToAvalon.py | 2 +- pype/ftrack/ftrack_utils.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index f7fd8e9641..f612e03d43 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -11,7 +11,7 @@ from pype import lib from ftrack_action_handler import BaseAction from bson.objectid import ObjectId from avalon import io, inventory -from avalon.vendor import toml + from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 27bf49d7d1..f2e2dd6d84 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -4,10 +4,10 @@ from pprint import * import ftrack_api from pype import lib - import avalon.io as io import avalon.api import avalon +from avalon.vendor import toml from app.api import Logger log = Logger.getLogger(__name__) @@ -59,10 +59,17 @@ def get_apps(entity): apps = [] for app in entity['custom_attributes']['applications']: try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) + app_config = {} + app_file = toml.load(avalon.lib.which_app(app)) + app_config['name'] = app + app_config['label'] = app_file['label'] + if 'ftrack_label' in app_file: + app_config['ftrack_label'] = app_file['ftrack_label'] + + apps.append(app_config) + except Exception as e: - print('Error with application {0} - {1}'.format(app, e)) + log.warning('Error with application {0} - {1}'.format(app, e)) return apps def get_config(entity): From 476208b1e23c0587e0a90e106aff9d57edde72f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Nov 2018 17:11:02 +0100 Subject: [PATCH 30/78] Removed features from different branch --- pype/ftrack/ftrack_utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index f2e2dd6d84..d1d4176153 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -60,11 +60,8 @@ def get_apps(entity): for app in entity['custom_attributes']['applications']: try: app_config = {} - app_file = toml.load(avalon.lib.which_app(app)) app_config['name'] = app - app_config['label'] = app_file['label'] - if 'ftrack_label' in app_file: - app_config['ftrack_label'] = app_file['ftrack_label'] + app_config['label'] = toml.load(avalon.lib.which_app(app))['label'] apps.append(app_config) From 40a5a4a065525c1463abfde18b8bf014c6014957 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Nov 2018 18:38:18 +0100 Subject: [PATCH 31/78] updating Creating of writes --- pype/api.py | 20 ++++++- pype/nuke/__init__.py | 9 ++- pype/nuke/lib.py | 48 ++++++++++++++-- pype/nuke/templates.py | 4 ++ pype/plugins/nuke/create/create_write.py | 1 + pype/templates.py | 73 ++++++++++++++++++++++-- 6 files changed, 142 insertions(+), 13 deletions(-) diff --git a/pype/api.py b/pype/api.py index 0100d913a6..36094feb7f 100644 --- a/pype/api.py +++ b/pype/api.py @@ -25,7 +25,14 @@ from . import ( ) from .templates import ( load_data_from_templates, - reset_data_from_templates + reset_data_from_templates, + get_project_name, + get_project_code, + get_hiearchy, + get_asset, + get_task, + fill_avalon_workdir, + get_version_from_workfile ) __all__ = [ @@ -43,9 +50,20 @@ __all__ = [ "Logger", # contectual templates + # get data to preloaded templates "load_data_from_templates", "reset_data_from_templates", + # get contextual data + "get_project_name", + "get_project_code", + "get_hiearchy", + "get_asset", + "get_task", + "fill_avalon_workdir", + "get_version_from_workfile", + + # preloaded templates "Anatomy", "Colorspace", "Metadata", diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 2554ed60af..ba76b0b1e3 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -7,7 +7,9 @@ from .. import api as pype from pype.nuke import menu -from .lib import create_write_node +from .lib import ( + create_write_node +) import nuke @@ -80,6 +82,9 @@ def reload_config(): import importlib for module in ( + "app", + "app.api", + "{}.api".format(AVALON_CONFIG), "{}.templates".format(AVALON_CONFIG), "{}.nuke".format(AVALON_CONFIG), "{}.nuke.lib".format(AVALON_CONFIG), @@ -95,7 +100,7 @@ def reload_config(): def install(): - + pype.fill_avalon_workdir() reload_config() log.info("Registering Nuke plug-ins..") diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 57af77ba5d..60c460e5a0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,18 +1,40 @@ import sys +import os from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui import avalon.nuke - -from app.api import Logger - +import pype.api as pype import nuke -log = Logger.getLogger(__name__, "nuke") +log = pype.Logger.getLogger(__name__, "nuke") self = sys.modules[__name__] self._project = None +def format_anatomy(data): + from .templates import ( + get_anatomy + ) + file = script_name() + + anatomy = get_anatomy() + padding = anatomy.render.padding + + data.update({ + "hierarchy": pype.get_hiearchy(), + "frame": "#"*padding, + "VERSION": pype.get_version_from_workfile(file) + }) + + log.info("format_anatomy:anatomy: {}".format(anatomy)) + return anatomy.format(data) + + +def script_name(): + return nuke.root().knob('name').value() + + def create_write_node(name, avalon_data, data_templates): from .templates import ( get_dataflow, @@ -20,9 +42,23 @@ def create_write_node(name, avalon_data, data_templates): ) nuke_dataflow_writes = get_dataflow(**data_templates) nuke_colorspace_writes = get_colorspace(**data_templates) + try: + anatomy_filled = format_anatomy({ + "subset": avalon_data["subset"], + "asset": avalon_data["asset"], + "task": pype.get_task(), + "family": avalon_data["family"], + "project": {"name": pype.get_project_name(), + "code": pype.get_project_code()}, + "representation": nuke_dataflow_writes.file_type, + }) + except Exception as e: + log.error("problem with resolving anatomy tepmlate: {}".format(e)) + + log.debug("anatomy_filled.render: {}".format(anatomy_filled.render)) data = OrderedDict({ - "file": "pathToFile/file.exr" + "file": str(anatomy_filled.render.path).replace("\\", "/") }) # adding dataflow template @@ -36,7 +72,7 @@ def create_write_node(name, avalon_data, data_templates): data = avalon.nuke.lib.fix_data_for_node_create(data) - log.info(data) + log.debug(data) instance = avalon.nuke.lib.add_write_node( name, diff --git a/pype/nuke/templates.py b/pype/nuke/templates.py index 7a6bf9a229..5b0b45ac76 100644 --- a/pype/nuke/templates.py +++ b/pype/nuke/templates.py @@ -3,6 +3,10 @@ from pype import api as pype log = pype.Logger.getLogger(__name__, "nuke") +def get_anatomy(**kwarg): + return pype.Anatomy + + def get_dataflow(**kwarg): host = kwarg.get("host", "nuke") cls = kwarg.get("cls", None) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index b0a6f3de9b..3fffb696ea 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -24,6 +24,7 @@ class CrateWriteRender(avalon.nuke.Creator): "family": self.family } create_write_node(self.name, self.data, data_templates) + return diff --git a/pype/templates.py b/pype/templates.py index 38e120bec3..7e4b962d52 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -1,8 +1,7 @@ import os - - -from app.api import (Templates, Logger) - +import re +from avalon import io +from app.api import (Templates, Logger, format) log = Logger.getLogger(__name__, os.getenv("AVALON_APP", "pype-config")) @@ -33,3 +32,69 @@ def reset_data_from_templates(): api.Colorspace = None api.Metadata = None log.info("Data from templates were Unloaded...") + + +def get_version_from_workfile(file): + pattern = re.compile(r"_v([0-9]*)") + try: + v_string = pattern.findall(file)[0] + return v_string + except IndexError: + log.error("templates:get_version_from_workfile:" + "`{}` missing version string." + "Example `v004`".format(file)) + + +def get_project_code(): + return io.find_one({"type": "project"})["data"]["code"] + + +def get_project_name(): + project_name = os.getenv("AVALON_PROJECT", None) + assert project_name, log.error("missing `AVALON_PROJECT`" + "in environment variables") + return project_name + + +def get_asset(): + asset = os.getenv("AVALON_ASSET", None) + assert asset, log.error("missing `AVALON_ASSET`" + "in environment variables") + return asset + + +def get_task(): + task = os.getenv("AVALON_TASK", None) + assert task, log.error("missing `AVALON_TASK`" + "in environment variables") + return task + + +def get_hiearchy(): + hierarchy = io.find_one({ + "type": 'asset', + "name": get_asset()} + )['data']['parents'] + + if hierarchy: + # hierarchy = os.path.sep.join(hierarchy) + return os.path.join(*hierarchy) + + +def fill_avalon_workdir(): + awd = os.getenv("AVALON_WORKDIR", None) + assert awd, log.error("missing `AVALON_WORKDIR`" + "in environment variables") + if "{" not in awd: + return + + data = { + "hierarchy": get_hiearchy(), + "task": get_task(), + "asset": get_asset(), + "project": {"name": get_project_name(), + "code": get_project_code()}} + + awd_filled = os.path.normpath(format(awd, data)) + os.environ["AVALON_WORKDIR"] = awd_filled + log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled)) From cb69f0dce1673419ae65d1b645969f7e2a6db42e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 29 Nov 2018 18:41:47 +0100 Subject: [PATCH 32/78] cleaning up plugins/nuke/publish/*, updating collecting writes and render local --- .../collect_deadline_user.py | 0 .../global/publish/submit_publish_job.py | 2 +- .../collect_render_target.py | 0 .../extract_nuke_write.py | 0 .../publish_image_sequences.py | 0 .../submit_deadline.py | 0 .../validate_nuke_settings.py | 0 .../validate_proxy_mode.py | 0 .../validate_write_nodes.py | 0 .../nuke/inventory/select_containers.py | 2 +- pype/plugins/nuke/load/actions.py | 4 ++-- pype/plugins/nuke/load/load_sequence.py | 2 +- .../nuke/publish/collect_nuke_writes.py | 20 +++++++++---------- .../nuke/publish/extract_output_directory.py | 2 +- pype/plugins/nuke/publish/render_local.py | 17 ++++++++-------- 15 files changed, 24 insertions(+), 25 deletions(-) rename pype/plugins/global/{publish => _publish_unused}/collect_deadline_user.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/collect_render_target.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/extract_nuke_write.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/publish_image_sequences.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/submit_deadline.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/validate_nuke_settings.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/validate_proxy_mode.py (100%) rename pype/plugins/nuke/{publish => _publish_unused}/validate_write_nodes.py (100%) diff --git a/pype/plugins/global/publish/collect_deadline_user.py b/pype/plugins/global/_publish_unused/collect_deadline_user.py similarity index 100% rename from pype/plugins/global/publish/collect_deadline_user.py rename to pype/plugins/global/_publish_unused/collect_deadline_user.py diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 1933713577..ee1934c05e 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -125,7 +125,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): hosts = ["fusion", "maya", "nuke"] families = [ - "saver.deadline", + "render", "renderlayer", "imagesequence" ] diff --git a/pype/plugins/nuke/publish/collect_render_target.py b/pype/plugins/nuke/_publish_unused/collect_render_target.py similarity index 100% rename from pype/plugins/nuke/publish/collect_render_target.py rename to pype/plugins/nuke/_publish_unused/collect_render_target.py diff --git a/pype/plugins/nuke/publish/extract_nuke_write.py b/pype/plugins/nuke/_publish_unused/extract_nuke_write.py similarity index 100% rename from pype/plugins/nuke/publish/extract_nuke_write.py rename to pype/plugins/nuke/_publish_unused/extract_nuke_write.py diff --git a/pype/plugins/nuke/publish/publish_image_sequences.py b/pype/plugins/nuke/_publish_unused/publish_image_sequences.py similarity index 100% rename from pype/plugins/nuke/publish/publish_image_sequences.py rename to pype/plugins/nuke/_publish_unused/publish_image_sequences.py diff --git a/pype/plugins/nuke/publish/submit_deadline.py b/pype/plugins/nuke/_publish_unused/submit_deadline.py similarity index 100% rename from pype/plugins/nuke/publish/submit_deadline.py rename to pype/plugins/nuke/_publish_unused/submit_deadline.py diff --git a/pype/plugins/nuke/publish/validate_nuke_settings.py b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py similarity index 100% rename from pype/plugins/nuke/publish/validate_nuke_settings.py rename to pype/plugins/nuke/_publish_unused/validate_nuke_settings.py diff --git a/pype/plugins/nuke/publish/validate_proxy_mode.py b/pype/plugins/nuke/_publish_unused/validate_proxy_mode.py similarity index 100% rename from pype/plugins/nuke/publish/validate_proxy_mode.py rename to pype/plugins/nuke/_publish_unused/validate_proxy_mode.py diff --git a/pype/plugins/nuke/publish/validate_write_nodes.py b/pype/plugins/nuke/_publish_unused/validate_write_nodes.py similarity index 100% rename from pype/plugins/nuke/publish/validate_write_nodes.py rename to pype/plugins/nuke/_publish_unused/validate_write_nodes.py diff --git a/pype/plugins/nuke/inventory/select_containers.py b/pype/plugins/nuke/inventory/select_containers.py index 89ac31d660..339e3a4992 100644 --- a/pype/plugins/nuke/inventory/select_containers.py +++ b/pype/plugins/nuke/inventory/select_containers.py @@ -1,7 +1,7 @@ from avalon import api -class NukeSelectContainers(api.InventoryAction): +class SelectContainers(api.InventoryAction): label = "Select Containers" icon = "mouse-pointer" diff --git a/pype/plugins/nuke/load/actions.py b/pype/plugins/nuke/load/actions.py index 94ae2999f6..f3b7748f01 100644 --- a/pype/plugins/nuke/load/actions.py +++ b/pype/plugins/nuke/load/actions.py @@ -5,7 +5,7 @@ from avalon import api -class NukeSetFrameRangeLoader(api.Loader): +class SetFrameRangeLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" families = ["animation", @@ -38,7 +38,7 @@ class NukeSetFrameRangeLoader(api.Loader): lib.update_frame_range(start, end) -class NukeSetFrameRangeWithHandlesLoader(api.Loader): +class SetFrameRangeWithHandlesLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" families = ["animation", diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 8d89998aa8..0b771a7007 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -118,7 +118,7 @@ def loader_shift(node, frame, relative=True): return int(shift) -class NukeLoadSequence(api.Loader): +class LoadSequence(api.Loader): """Load image sequence into Nuke""" families = ["imagesequence"] diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 2774c2ed54..97948f3f8c 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -34,24 +34,24 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): output_type = "mov" # Get frame range - start_frame = int(nuke.root()["first_frame"].getValue()) - end_frame = int(nuke.root()["last_frame"].getValue()) + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) if node["use_limit"].getValue(): - start_frame = int(node["first"].getValue()) - end_frame = int(node["last"].getValue()) + first_frame = int(node["first"].getValue()) + last_frame = int(node["last"].getValue()) # Add collection collection = None path = nuke.filename(node) - path += " [{0}-{1}]".format(start_frame, end_frame) + path += " [{0}-{1}]".format(first_frame, last_frame) collection = clique.parse(path) subset = node.name() # Include start and end render frame in label label = "{subset} ({start}-{end})".format(subset=subset, - start=int(start_frame), - end=int(end_frame)) + start=int(first_frame), + end=int(last_frame)) # Create instance instance = context.create_instance(subset) @@ -71,7 +71,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): else: value = False - instance.data.update({ "asset": os.environ["AVALON_ASSET"], # todo: not a constant "path": nuke.filename(node), @@ -83,12 +82,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "family": "write", "publish": value, "collection": collection, - "start_frame": start_frame, - "end_frame": end_frame, + "first_frame": first_frame, + "last_frame": last_frame, "output_type": output_type }) instances.append(instance) - self.log.info("writeNode collected: {}".format(subset)) context.data["write_instances"] = instances diff --git a/pype/plugins/nuke/publish/extract_output_directory.py b/pype/plugins/nuke/publish/extract_output_directory.py index 3064fad3c5..36ddb35e30 100644 --- a/pype/plugins/nuke/publish/extract_output_directory.py +++ b/pype/plugins/nuke/publish/extract_output_directory.py @@ -20,7 +20,7 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin): path = instance.data["collection"].format() if "output_path" in instance.data.keys(): - path = instance.data["output_path"] + path = instance.data["path"] if not path: return diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index eebdefc29f..0bd7495d6f 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -1,6 +1,5 @@ import pyblish.api - -import avalon.fusion as fusion +import nuke class NukeRenderLocal(pyblish.api.InstancePlugin): @@ -28,17 +27,19 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): else: context.data[key] = True - current_comp = context.data["currentFile"] - start_frame = instance.data["startFrame"] - end_frame = instance.data["end_frame"] + self.log.debug("instance collected: {}".format(instance.data)) + + first_frame = instance.data.get("first_frame", None) + last_frame = instance.data.get("last_frame", None) + node_subset_name = instance.data.get("subset", None) self.log.info("Starting render") - self.log.info("Start frame: {}".format(start_frame)) - self.log.info("End frame: {}".format(end_frame)) + self.log.info("Start frame: {}".format(first_frame)) + self.log.info("End frame: {}".format(last_frame)) # Render frames result = nuke.execute( - node.name(), + node_subset_name, int(first_frame), int(last_frame) ) From 8ab05e77012a9575d0e3b8fd24f030e22f9bfcaf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 30 Nov 2018 10:49:38 +0100 Subject: [PATCH 33/78] local render --- pype/plugins/global/publish/submit_publish_job.py | 2 +- pype/plugins/nuke/publish/collect_nuke_writes.py | 2 +- pype/plugins/nuke/publish/render_local.py | 8 +++----- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index ee1934c05e..cb852f7c43 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -125,7 +125,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): hosts = ["fusion", "maya", "nuke"] families = [ - "render", + "render.deadline", "renderlayer", "imagesequence" ] diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 97948f3f8c..1459f828f0 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -78,7 +78,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, - "families": ["render"], + "families": ["render.local"], "family": "write", "publish": value, "collection": collection, diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 0bd7495d6f..9e69134600 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -14,7 +14,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder label = "Render Local" hosts = ["nuke"] - families = ["write", "render.local"] + families = ["render.local"] def process(self, instance): @@ -38,11 +38,9 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): self.log.info("End frame: {}".format(last_frame)) # Render frames - result = nuke.execute( + nuke.execute( node_subset_name, int(first_frame), int(last_frame) ) - - if not result: - raise RuntimeError("Comp render failed") + self.log.info('Finished render') From e8c185b58826151ae0af1819a7eb11118c180aac Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 30 Nov 2018 12:14:32 +0100 Subject: [PATCH 34/78] change collect write to collecting all instances --- .../nuke/publish/collect_nuke_writes.py | 97 ++----------------- 1 file changed, 10 insertions(+), 87 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index 1459f828f0..b92d03c40b 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -6,18 +6,17 @@ import clique @pyblish.api.log -class CollectNukeWrites(pyblish.api.ContextPlugin): +class CollectNukeInstances(pyblish.api.ContextPlugin): """Collect all write nodes.""" order = pyblish.api.CollectorOrder - label = "Writes" + label = "Collect Instances" hosts = ["nuke", "nukeassist"] # targets = ["default", "process"] def process(self, context): - instances = [] # creating instances per write node for node in nuke.allNodes(): if node.Class() != "Write": @@ -44,7 +43,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # Add collection collection = None path = nuke.filename(node) - path += " [{0}-{1}]".format(first_frame, last_frame) + path += " [{0}-{1}]".format(str(first_frame), str(last_frame)) collection = clique.parse(path) subset = node.name() @@ -63,13 +62,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): knob.setValue(False) node.addKnob(knob) - # Compare against selection - selection = instance.context.data.get("selection", []) - if selection: - if list(set(instance) and set(selection)): - value = True - else: - value = False instance.data.update({ "asset": os.environ["AVALON_ASSET"], # todo: not a constant @@ -80,87 +72,18 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "label": label, "families": ["render.local"], "family": "write", - "publish": value, + "publish": node.knob("publish"), "collection": collection, "first_frame": first_frame, "last_frame": last_frame, "output_type": output_type }) - instances.append(instance) - self.log.info("writeNode collected: {}".format(subset)) - context.data["write_instances"] = instances + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=self.sort_by_family) - context.data["instances"] = ( - context.data.get("instances", []) + instances) + return context - -class CollectNukeWritesPublish(pyblish.api.ContextPlugin): - """Collect all write instances for publishing.""" - - order = CollectNukeWrites.order + 0.01 - label = "Writes Publish" - hosts = ["nuke", "nukeassist"] - - # targets = ["default"] - - def process(self, context): - - for item in context.data["write_instances"]: - - # If the collection was not generated. - if not item.data["collection"]: - continue - - missing_files = [] - for f in item.data["collection"]: - # print f - if not os.path.exists(f): - missing_files.append(f) - - for f in missing_files: - item.data["collection"].remove(f) - - if not list(item.data["collection"]): - continue - - instance = context.create_instance(item.data["name"]) - - for key, value in item.data.iteritems(): - # print key, value - instance.data[key] = value - - instance.data["families"] = ["publish"] - instance.data["label"] += ( - " - " + os.path.basename(instance.data["collection"].format())) - - for node in item: - instance.add(node) - - # Adding/Checking publish attribute - if "publish" not in node.knobs(): - knob = nuke.Boolean_Knob("publish", "Publish") - knob.setValue(False) - node.addKnob(knob) - - value = bool(node["publish"].getValue()) - - # Compare against selection - selection = instance.context.data.get("selection", []) - if selection: - if list(set(instance) & set(selection)): - value = True - else: - value = False - - instance.data["publish"] = value - - def instanceToggled(instance, value): - # Removing and adding the knob to support NukeAssist, where - # you can't modify the knob value directly. - instance[0].removeKnob(instance[0]["publish"]) - knob = nuke.Boolean_Knob("publish", "Publish") - knob.setValue(value) - instance[0].addKnob(knob) - - instance.data["instanceToggled"] = instanceToggled + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) From c080823a44c60c2d0b26710edc90362d1e094f08 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 30 Nov 2018 16:43:54 +0100 Subject: [PATCH 35/78] fix create write [subset, families] --- pype/nuke/lib.py | 29 ++++--- pype/nuke/templates.py | 18 ++-- pype/plugins/nuke/create/create_write.py | 101 ++++++++++++++++++++--- setup/nuke/nuke_path/menu.py | 20 ++--- 4 files changed, 124 insertions(+), 44 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 60c460e5a0..9db0280b5e 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,5 +1,4 @@ import sys -import os from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui @@ -27,7 +26,7 @@ def format_anatomy(data): "VERSION": pype.get_version_from_workfile(file) }) - log.info("format_anatomy:anatomy: {}".format(anatomy)) + # log.info("format_anatomy:anatomy: {}".format(anatomy)) return anatomy.format(data) @@ -35,19 +34,19 @@ def script_name(): return nuke.root().knob('name').value() -def create_write_node(name, avalon_data, data_templates): +def create_write_node(name, data): from .templates import ( get_dataflow, get_colorspace ) - nuke_dataflow_writes = get_dataflow(**data_templates) - nuke_colorspace_writes = get_colorspace(**data_templates) + nuke_dataflow_writes = get_dataflow(**data) + nuke_colorspace_writes = get_colorspace(**data) try: anatomy_filled = format_anatomy({ - "subset": avalon_data["subset"], - "asset": avalon_data["asset"], + "subset": data["avalon"]["subset"], + "asset": data["avalon"]["asset"], "task": pype.get_task(), - "family": avalon_data["family"], + "family": data["avalon"]["family"], "project": {"name": pype.get_project_name(), "code": pype.get_project_code()}, "representation": nuke_dataflow_writes.file_type, @@ -57,28 +56,28 @@ def create_write_node(name, avalon_data, data_templates): log.debug("anatomy_filled.render: {}".format(anatomy_filled.render)) - data = OrderedDict({ + _data = OrderedDict({ "file": str(anatomy_filled.render.path).replace("\\", "/") }) # adding dataflow template - {data.update({k: v}) + {_data.update({k: v}) for k, v in nuke_dataflow_writes.items() if k not in ["id", "previous"]} # adding dataflow template - {data.update({k: v}) + {_data.update({k: v}) for k, v in nuke_colorspace_writes.items()} - data = avalon.nuke.lib.fix_data_for_node_create(data) + _data = avalon.nuke.lib.fix_data_for_node_create(_data) - log.debug(data) + log.debug(_data) instance = avalon.nuke.lib.add_write_node( name, - **data + **_data ) - instance = avalon.nuke.lib.imprint(instance, avalon_data) + instance = avalon.nuke.lib.imprint(instance, data["avalon"]) return instance diff --git a/pype/nuke/templates.py b/pype/nuke/templates.py index 5b0b45ac76..16cb6062a2 100644 --- a/pype/nuke/templates.py +++ b/pype/nuke/templates.py @@ -8,32 +8,34 @@ def get_anatomy(**kwarg): def get_dataflow(**kwarg): + log.info(kwarg) host = kwarg.get("host", "nuke") - cls = kwarg.get("cls", None) - family = kwarg.get("family", None) + cls = kwarg.get("class", None) + preset = kwarg.get("preset", None) assert any([host, cls]), log.error("nuke.templates.get_dataflow():" "Missing mandatory kwargs `host`, `cls`") nuke_dataflow = getattr(pype.Dataflow, str(host), None) nuke_dataflow_node = getattr(nuke_dataflow.nodes, str(cls), None) - if family: - nuke_dataflow_node = getattr(nuke_dataflow_node, str(family), None) + if preset: + nuke_dataflow_node = getattr(nuke_dataflow_node, str(preset), None) log.info("Dataflow: {}".format(nuke_dataflow_node)) return nuke_dataflow_node def get_colorspace(**kwarg): + log.info(kwarg) host = kwarg.get("host", "nuke") - cls = kwarg.get("cls", None) - family = kwarg.get("family", None) + cls = kwarg.get("class", None) + preset = kwarg.get("preset", None) assert any([host, cls]), log.error("nuke.templates.get_colorspace():" "Missing mandatory kwargs `host`, `cls`") nuke_colorspace = getattr(pype.Colorspace, str(host), None) nuke_colorspace_node = getattr(nuke_colorspace, str(cls), None) - if family: - nuke_colorspace_node = getattr(nuke_colorspace_node, str(family), None) + if preset: + nuke_colorspace_node = getattr(nuke_colorspace_node, str(preset), None) log.info("Colorspace: {}".format(nuke_colorspace_node)) return nuke_colorspace_node diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 3fffb696ea..c42e3262e8 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -1,3 +1,4 @@ +from collections import OrderedDict import avalon.api import avalon.nuke from pype.nuke import ( @@ -5,25 +6,55 @@ from pype.nuke import ( ) from pype import api as pype + log = pype.Logger.getLogger(__name__, "nuke") +def subset_to_families(subset, family, families): + subset_sufx = str(subset).replace(family, "") + new_subset = families + subset_sufx + return "{}.{}".format(family, new_subset) + + class CrateWriteRender(avalon.nuke.Creator): name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - family = "render" + family = "render" # change this to template + families = "write" # do not change! icon = "sign-out" + def __init__(self, *args, **kwargs): + super(CrateWriteRender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + # creating pype subset + data["subset"] = subset_to_families( + self.data["subset"], + self.family, + self.families + ) + # swaping family with families + data["family"] = self.families + data["families"] = self.family + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + def process(self): instance = super(CrateWriteRender, self).process() if not instance: data_templates = { - "cls": "write", - "family": self.family + "class": self.families, + # only one is required + "preset": self.family, + "avalon": self.data } - create_write_node(self.name, self.data, data_templates) + + create_write_node(self.name, data_templates) return @@ -33,18 +64,42 @@ class CrateWritePrerender(avalon.nuke.Creator): label = "Create Write Prerender" hosts = ["nuke"] family = "prerender" + families = "write" icon = "sign-out" + def __init__(self, *args, **kwargs): + super(CrateWritePrerender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + # creating pype subset + data["subset"] = subset_to_families( + self.data["subset"], + self.family, + self.families + ) + # swaping family with families + data["family"] = self.families + data["families"] = self.family + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + def process(self): instance = super(CrateWritePrerender, self).process() if not instance: data_templates = { - "cls": "write", - "family": self.family + "class": self.families, + # only one is required + "preset": self.family, + "avalon": self.data } - create_write_node(self.name, self.data, data_templates) - return None + + create_write_node(self.name, data_templates) + + return class CrateWriteStill(avalon.nuke.Creator): @@ -52,15 +107,39 @@ class CrateWriteStill(avalon.nuke.Creator): label = "Create Write Still" hosts = ["nuke"] family = "still" + families = "write" icon = "image" + def __init__(self, *args, **kwargs): + super(CrateWriteStill, self).__init__(*args, **kwargs) + + data = OrderedDict() + + # creating pype subset + data["subset"] = subset_to_families( + self.data["subset"], + self.family, + self.families + ) + # swaping family with families + data["family"] = self.families + data["families"] = self.family + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + def process(self): instance = super(CrateWriteStill, self).process() if not instance: data_templates = { - "cls": "write", - "family": self.family + "class": self.families, + # only one is required + "preset": self.family, + "avalon": self.data } - create_write_node(self.name, self.data, data_templates) + + create_write_node(self.name, data_templates) + return diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index a454b8bd66..6cfa1e3373 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -2,16 +2,16 @@ from avalon.tools import workfiles import nuke # auto fix version paths in write nodes following root name of script -cmd = ''' -import re -rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group() -for each in nuke.allNodes(): - if each.Class() == 'Write': - each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value())) -''' -nuke.knobDefault('onScriptSave', cmd) - -print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n' +# cmd = ''' +# import re +# rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group() +# for each in nuke.allNodes(): +# if each.Class() == 'Write': +# each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value())) +# ''' +# nuke.knobDefault('onScriptSave', cmd) +# +# print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n' ffmpeg_cmd = '''if nuke.env['LINUX']: nuke.tcl('load ffmpegReader') From 6f9d55ce30d2ef4763ad1ea53596e637534731bc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 30 Nov 2018 17:11:03 +0100 Subject: [PATCH 36/78] updating create write still --- pype/nuke/lib.py | 2 ++ pype/plugins/nuke/create/create_write.py | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9db0280b5e..7e2539d073 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -73,6 +73,8 @@ def create_write_node(name, data): log.debug(_data) + _data["frame_range"] = data.get("frame_range", None) + instance = avalon.nuke.lib.add_write_node( name, **_data diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index c42e3262e8..673d75911d 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -47,14 +47,13 @@ class CrateWriteRender(avalon.nuke.Creator): instance = super(CrateWriteRender, self).process() if not instance: - data_templates = { + write_data = { "class": self.families, - # only one is required "preset": self.family, "avalon": self.data } - create_write_node(self.name, data_templates) + create_write_node(self.name, write_data) return @@ -90,14 +89,13 @@ class CrateWritePrerender(avalon.nuke.Creator): instance = super(CrateWritePrerender, self).process() if not instance: - data_templates = { + write_data = { "class": self.families, - # only one is required "preset": self.family, "avalon": self.data } - create_write_node(self.name, data_templates) + create_write_node(self.name, write_data) return @@ -130,16 +128,18 @@ class CrateWriteStill(avalon.nuke.Creator): self.data = data def process(self): + import nuke instance = super(CrateWriteStill, self).process() if not instance: - data_templates = { + write_data = { "class": self.families, - # only one is required "preset": self.family, - "avalon": self.data + "avalon": self.data, + "frame_range": [nuke.frame(), nuke.frame()] } - create_write_node(self.name, data_templates) + nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame())) + create_write_node(self.name, write_data) return From 13bcdb0959146e69e6ded7d43df7e4411c08720f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 30 Nov 2018 18:59:20 +0100 Subject: [PATCH 37/78] publish instances update --- pype/nuke/lib.py | 114 ++++++++++-------- ...llect_nuke_writes.py => collect_writes.py} | 19 +-- .../plugins/nuke/publish/collect_instances.py | 53 ++++++++ 3 files changed, 126 insertions(+), 60 deletions(-) rename pype/plugins/nuke/{publish/collect_nuke_writes.py => collect_writes.py} (88%) create mode 100644 pype/plugins/nuke/publish/collect_instances.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9db0280b5e..a94bb5932b 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -81,57 +81,6 @@ def create_write_node(name, data): return instance -def update_frame_range(start, end, root=None): - """Set Nuke script start and end frame range - - Args: - start (float, int): start frame - end (float, int): end frame - root (object, Optional): root object from nuke's script - - Returns: - None - - """ - - knobs = { - "first_frame": start, - "last_frame": end - } - - with avalon.nuke.viewer_update_and_undo_stop(): - for key, value in knobs.items(): - if root: - root[key].setValue(value) - else: - nuke.root()[key].setValue(value) - - -def get_additional_data(container): - """Get Nuke's related data for the container - - Args: - container(dict): the container found by the ls() function - - Returns: - dict - """ - - node = container["_tool"] - tile_color = node['tile_color'].value() - if tile_color is None: - return {} - - hex = '%08x' % tile_color - rgba = [ - float(int(hex[0:2], 16)) / 255.0, - float(int(hex[2:4], 16)) / 255.0, - float(int(hex[4:6], 16)) / 255.0 - ] - - return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} - - def set_viewers_colorspace(viewer): assert isinstance(viewer, dict), log.error( "set_viewers_colorspace(): argument should be dictionary") @@ -217,3 +166,66 @@ def set_colorspace(): except TypeError: log.error("Nuke is not in templates! \n\n\n" "contact your supervisor!") + + +def get_avalon_knob_data(node): + import toml + try: + data = toml.loads(node['avalon'].value()) + except: + return None + return data + +# TODO: bellow functions are wip and needs to be check where they are used +# ------------------------------------ + + +def update_frame_range(start, end, root=None): + """Set Nuke script start and end frame range + + Args: + start (float, int): start frame + end (float, int): end frame + root (object, Optional): root object from nuke's script + + Returns: + None + + """ + + knobs = { + "first_frame": start, + "last_frame": end + } + + with avalon.nuke.viewer_update_and_undo_stop(): + for key, value in knobs.items(): + if root: + root[key].setValue(value) + else: + nuke.root()[key].setValue(value) + + +def get_additional_data(container): + """Get Nuke's related data for the container + + Args: + container(dict): the container found by the ls() function + + Returns: + dict + """ + + node = container["_tool"] + tile_color = node['tile_color'].value() + if tile_color is None: + return {} + + hex = '%08x' % tile_color + rgba = [ + float(int(hex[0:2], 16)) / 255.0, + float(int(hex[2:4], 16)) / 255.0, + float(int(hex[4:6], 16)) / 255.0 + ] + + return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/collect_writes.py similarity index 88% rename from pype/plugins/nuke/publish/collect_nuke_writes.py rename to pype/plugins/nuke/collect_writes.py index b92d03c40b..1d46d889dd 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/collect_writes.py @@ -13,8 +13,6 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): label = "Collect Instances" hosts = ["nuke", "nukeassist"] - # targets = ["default", "process"] - def process(self, context): # creating instances per write node @@ -43,10 +41,12 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # Add collection collection = None path = nuke.filename(node) - path += " [{0}-{1}]".format(str(first_frame), str(last_frame)) + path += " [{0}-{1}]".format( + str(first_frame), + str(last_frame) + ) collection = clique.parse(path) - subset = node.name() # Include start and end render frame in label label = "{subset} ({start}-{end})".format(subset=subset, start=int(first_frame), @@ -62,23 +62,24 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): knob.setValue(False) node.addKnob(knob) - instance.data.update({ - "asset": os.environ["AVALON_ASSET"], # todo: not a constant + "asset": os.environ["AVALON_ASSET"], "path": nuke.filename(node), - "subset": subset, "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, "families": ["render.local"], - "family": "write", - "publish": node.knob("publish"), "collection": collection, "first_frame": first_frame, "last_frame": last_frame, "output_type": output_type }) + def instanceToggled(instance, value): + instance[0]["publish"].setValue(value) + + instance.data["instanceToggled"] = instanceToggled + # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py new file mode 100644 index 0000000000..387716dbca --- /dev/null +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -0,0 +1,53 @@ +import os + +import nuke +import pyblish.api +from pype.nuke.lib import get_avalon_knob_data + + +@pyblish.api.log +class CollectNukeInstances(pyblish.api.ContextPlugin): + """Collect all write nodes.""" + + order = pyblish.api.CollectorOrder + label = "Collect Instances" + hosts = ["nuke", "nukeassist"] + + def process(self, context): + + # creating instances per write node + for node in nuke.allNodes(): + + if node["disable"].value(): + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data(node) + if not avalon_knob_data: + continue + subset = avalon_knob_data["subset"] + + # Create instance + instance = context.create_instance(subset) + instance.add(node) + + instance.data.update({ + "asset": os.environ["AVALON_ASSET"], + "label": node.name(), + "name": node.name(), + "subset": subset, + "families": [avalon_knob_data["families"]], + "family": avalon_knob_data["family"], + "publish": node.knob("publish").value() + }) + self.log.info("collected instance: {}".format(instance.data)) + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=self.sort_by_family) + + self.log.info("context: {}".format(context)) + + return context + + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) From a18125803a94dece8a163fcd342542c40115cf2d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 01:39:19 +0100 Subject: [PATCH 38/78] update publishing render locally --- pype/nuke/__init__.py | 31 ++++----- .../plugins/nuke/publish/collect_instances.py | 17 +++-- .../nuke/{ => publish}/collect_writes.py | 68 +++++++++++-------- .../nuke/publish/validate_collection.py | 39 +++++++++++ 4 files changed, 101 insertions(+), 54 deletions(-) rename pype/plugins/nuke/{ => publish}/collect_writes.py (51%) create mode 100644 pype/plugins/nuke/publish/validate_collection.py diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index ba76b0b1e3..371fe2a786 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -144,29 +144,24 @@ def uninstall(): pype.reset_data_from_templates() -def on_pyblish_instance_toggled(instance, new_value, old_value): - """Toggle saver tool passthrough states on instance toggles.""" +def on_pyblish_instance_toggled(instance, old_value, new_value): + """Toggle node passthrough states on instance toggles.""" + self.log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( + instance, old_value, new_value)) from avalon.nuke import ( viewer_update_and_undo_stop, add_publish_knob ) - writes = [n for n in instance if - n.Class() is "Write"] - if not writes: - return - # Whether instances should be passthrough based on new value - passthrough = not new_value - with viewer_update_and_undo_stop(): - for n in writes: - try: - n["publish"].value() - except ValueError: - n = add_publish_knob(n) - log.info(" `Publish` knob was added to write node..") - current = n["publish"].value() - if current != passthrough: - n["publish"].setValue(passthrough) + with viewer_update_and_undo_stop(): + n = instance[0] + try: + n["publish"].value() + except ValueError: + n = add_publish_knob(n) + log.info(" `Publish` knob was added to write node..") + + n["publish"].setValue(new_value) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 387716dbca..f1fa1276c2 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -7,18 +7,21 @@ from pype.nuke.lib import get_avalon_knob_data @pyblish.api.log class CollectNukeInstances(pyblish.api.ContextPlugin): - """Collect all write nodes.""" + """Collect all nodes with Avalon knob.""" order = pyblish.api.CollectorOrder label = "Collect Instances" hosts = ["nuke", "nukeassist"] def process(self, context): - + instances = [] # creating instances per write node for node in nuke.allNodes(): - if node["disable"].value(): + try: + if node["disable"].value(): + continue + except Exception: continue # get data from avalon knob @@ -41,12 +44,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "publish": node.knob("publish").value() }) self.log.info("collected instance: {}".format(instance.data)) + instances.append(instance) + + context.data["instances"] = instances + # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) - self.log.info("context: {}".format(context)) - - return context + self.log.debug("context: {}".format(context)) def sort_by_family(self, instance): """Sort by family""" diff --git a/pype/plugins/nuke/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py similarity index 51% rename from pype/plugins/nuke/collect_writes.py rename to pype/plugins/nuke/publish/collect_writes.py index 1d46d889dd..e83402ba48 100644 --- a/pype/plugins/nuke/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -3,24 +3,25 @@ import os import nuke import pyblish.api import clique +import logging +log = logging.getLogger(__name__) @pyblish.api.log -class CollectNukeInstances(pyblish.api.ContextPlugin): +class CollectNukeWrites(pyblish.api.ContextPlugin): """Collect all write nodes.""" - order = pyblish.api.CollectorOrder - label = "Collect Instances" + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect Writes" hosts = ["nuke", "nukeassist"] def process(self, context): + for instance in context.data["instances"]: + self.log.debug("checking instance: {}".format(instance)) + node = instance[0] - # creating instances per write node - for node in nuke.allNodes(): if node.Class() != "Write": continue - if node["disable"].value(): - continue # Determine defined file type ext = node["file_type"].value() @@ -41,49 +42,56 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # Add collection collection = None path = nuke.filename(node) + + if "#" in path: + path_split = path.split("#") + length = len(path_split)-1 + path = "{}%0{}d{}".format(path_split[0], length, path_split[-1]) + path += " [{0}-{1}]".format( str(first_frame), str(last_frame) ) - collection = clique.parse(path) + self.log.info("collection: {}".format(path)) + + try: + collection = clique.parse(path) + + except Exception as e: + self.log.warning(e) + collection = None # Include start and end render frame in label - label = "{subset} ({start}-{end})".format(subset=subset, - start=int(first_frame), - end=int(last_frame)) + name = node.name() - # Create instance - instance = context.create_instance(subset) - instance.add(node) - - # Adding/Checking publish and render target attribute - if "render_local" not in node.knobs(): - knob = nuke.Boolean_Knob("render_local", "Local rendering") - knob.setValue(False) - node.addKnob(knob) + label = "{0} ({1}-{2})".format( + name, + int(first_frame), + int(last_frame) + ) + self.log.debug("checking for error: {}".format(label)) + # # Adding/Checking publish and render target attribute + # if "render_local" not in node.knobs(): + # knob = nuke.Boolean_Knob("render_local", "Local rendering") + # knob.setValue(False) + # node.addKnob(knob) + self.log.debug("checking for error: {}".format(label)) instance.data.update({ - "asset": os.environ["AVALON_ASSET"], "path": nuke.filename(node), "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, - "families": ["render.local"], + "families": ["{}.local".format(instance.data["families"][0])], "collection": collection, "first_frame": first_frame, "last_frame": last_frame, "output_type": output_type }) - def instanceToggled(instance, value): - instance[0]["publish"].setValue(value) + self.log.debug("instance.data: {}".format(instance.data)) - instance.data["instanceToggled"] = instanceToggled - - # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=self.sort_by_family) - - return context + self.log.debug("context: {}".format(context)) def sort_by_family(self, instance): """Sort by family""" diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py new file mode 100644 index 0000000000..4f15b1e495 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -0,0 +1,39 @@ +import os +import pyblish.api + + +@pyblish.api.log +class RepairCollectionAction(pyblish.api.Action): + label = "Repair" + on = "failed" + icon = "wrench" + + def process(self, instance, plugin): + self.log.info("this is going to be repaired") + + +class ValidateCollection(pyblish.api.InstancePlugin): + """ Validates file output. """ + + order = pyblish.api.ValidatorOrder + optional = True + families = ["write"] + label = "Check Full Img Sequence" + hosts = ["nuke"] + actions = [RepairCollectionAction] + + def process(self, instance): + + missing_files = [] + for f in instance.data["collection"]: + # print f + if not os.path.exists(f): + missing_files.append(f) + + for f in missing_files: + instance.data["collection"].remove(f) + + frame_length = instance.data["last_frame"] - instance.data["first_frame"] + + assert len(list(instance.data["collection"])) is frame_length, self.log.info( + "{} missing frames. Use repair to render all frames".format(__name__)) From 4ae098e4d2a4434849c7f42152102d8bc59a31af Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:39:38 +0100 Subject: [PATCH 39/78] minor cleanups --- .../_publish_unused/collect_deadline_user.py | 60 ---- .../global/publish/collect_templates.py | 3 - pype/plugins/global/publish/integrate.py | 1 + .../global/publish/submit_publish_job.py | 330 ------------------ .../global/publish/validate_templates.py | 42 --- 5 files changed, 1 insertion(+), 435 deletions(-) delete mode 100644 pype/plugins/global/_publish_unused/collect_deadline_user.py delete mode 100644 pype/plugins/global/publish/submit_publish_job.py delete mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/_publish_unused/collect_deadline_user.py b/pype/plugins/global/_publish_unused/collect_deadline_user.py deleted file mode 100644 index f4d13a0545..0000000000 --- a/pype/plugins/global/_publish_unused/collect_deadline_user.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import subprocess - -import pyblish.api - -CREATE_NO_WINDOW = 0x08000000 - - -def deadline_command(cmd): - # Find Deadline - path = os.environ.get("DEADLINE_PATH", None) - assert path is not None, "Variable 'DEADLINE_PATH' must be set" - - executable = os.path.join(path, "deadlinecommand") - if os.name == "nt": - executable += ".exe" - assert os.path.exists( - executable), "Deadline executable not found at %s" % executable - assert cmd, "Must have a command" - - query = (executable, cmd) - - process = subprocess.Popen(query, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, - creationflags=CREATE_NO_WINDOW) - out, err = process.communicate() - - return out - - -class CollectDeadlineUser(pyblish.api.ContextPlugin): - """Retrieve the local active Deadline user""" - - order = pyblish.api.CollectorOrder + 0.499 - label = "Deadline User" - - hosts = ['maya', 'fusion', 'nuke'] - families = [ - "renderlayer", - "saver.deadline", - "imagesequence" - ] - - - def process(self, context): - """Inject the current working file""" - user = None - try: - user = deadline_command("GetCurrentUserName").strip() - except: - self.log.warning("Deadline command seems not to be working") - - if not user: - self.log.warning("No Deadline user found. " - "Do you have Deadline installed?") - return - - self.log.info("Found Deadline user: {}".format(user)) - context.data['deadlineUser'] = user diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index f2a3da7df4..48b6c448e3 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -18,6 +18,3 @@ class CollectTemplates(pyblish.api.ContextPlugin): type=["anatomy"] ) context.data['anatomy'] = templates.anatomy - for key in templates.anatomy: - self.log.info(str(key) + ": " + str(templates.anatomy[key])) - # return diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 87ffa2aaa3..e20f59133c 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -233,6 +233,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "root": root, "project": PROJECT, "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py deleted file mode 100644 index cb852f7c43..0000000000 --- a/pype/plugins/global/publish/submit_publish_job.py +++ /dev/null @@ -1,330 +0,0 @@ -import os -import json -import re - -from avalon import api, io -from avalon.vendor import requests, clique - -import pyblish.api - - -def _get_script(): - """Get path to the image sequence script""" - try: - from pype.fusion.scripts import publish_filesequence - except Exception as e: - raise RuntimeError("Expected module 'publish_imagesequence'" - "to be available") - - module_path = publish_filesequence.__file__ - if module_path.endswith(".pyc"): - module_path = module_path[:-len(".pyc")] + ".py" - - return module_path - - -# Logic to retrieve latest files concerning extendFrames -def get_latest_version(asset_name, subset_name, family): - # Get asset - asset_name = io.find_one({"type": "asset", - "name": asset_name}, - projection={"name": True}) - - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset_name["_id"]}, - projection={"_id": True, "name": True}) - - # Check if subsets actually exists (pre-run check) - assert subset, "No subsets found, please publish with `extendFrames` off" - - # Get version - version_projection = {"name": True, - "data.startFrame": True, - "data.endFrame": True, - "parent": True} - - version = io.find_one({"type": "version", - "parent": subset["_id"], - "data.families": family}, - projection=version_projection, - sort=[("name", -1)]) - - assert version, "No version found, this is a bug" - - return version - - -def get_resources(version, extension=None): - """ - Get the files from the specific version - """ - query = {"type": "representation", "parent": version["_id"]} - if extension: - query["name"] = extension - - representation = io.find_one(query) - assert representation, "This is a bug" - - directory = api.get_representation_path(representation) - print("Source: ", directory) - resources = sorted([os.path.normpath(os.path.join(directory, fname)) - for fname in os.listdir(directory)]) - - return resources - - -def get_resource_files(resources, frame_range, override=True): - - res_collections, _ = clique.assemble(resources) - assert len(res_collections) == 1, "Multiple collections found" - res_collection = res_collections[0] - - # Remove any frames - if override: - for frame in frame_range: - if frame not in res_collection.indexes: - continue - res_collection.indexes.remove(frame) - - return list(res_collection) - - -class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): - """Submit image sequence publish jobs to Deadline. - - These jobs are dependent on a deadline job submission prior to this - plug-in. - - Renders are submitted to a Deadline Web Service as - supplied via the environment variable AVALON_DEADLINE - - Options in instance.data: - - deadlineSubmission (dict, Required): The returned .json - data from the job submission to deadline. - - - outputDir (str, Required): The output directory where the metadata - file should be generated. It's assumed that this will also be - final folder containing the output files. - - - ext (str, Optional): The extension (including `.`) that is required - in the output filename to be picked up for image sequence - publishing. - - - publishJobState (str, Optional): "Active" or "Suspended" - This defaults to "Suspended" - - This requires a "startFrame" and "endFrame" to be present in instance.data - or in context.data. - - """ - - label = "Submit image sequence jobs to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 - - hosts = ["fusion", "maya", "nuke"] - - families = [ - "render.deadline", - "renderlayer", - "imagesequence" - ] - - def process(self, instance): - - # AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", - # "http://localhost:8082") - # assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" - - try: - deadline_url = os.environ["DEADLINE_REST_URL"] - except KeyError: - self.log.error("Deadline REST API url not found.") - - # Get a submission job - job = instance.data.get("deadlineSubmissionJob") - if not job: - raise RuntimeError("Can't continue without valid deadline " - "submission prior to this plug-in.") - - data = instance.data.copy() - subset = data["subset"] - state = data.get("publishJobState", "Suspended") - job_name = "{batch} - {subset} [publish image sequence]".format( - batch=job["Props"]["Name"], - subset=subset - ) - - # Add in start/end frame - context = instance.context - start = instance.data.get("startFrame", context.data["startFrame"]) - end = instance.data.get("endFrame", context.data["endFrame"]) - resources = [] - - # Add in regex for sequence filename - # This assumes the output files start with subset name and ends with - # a file extension. - if "ext" in instance.data: - ext = re.escape(instance.data["ext"]) - else: - ext = "\.\D+" - - regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset), - ext=ext) - - # Write metadata for publish job - render_job = data.pop("deadlineSubmissionJob") - metadata = { - "regex": regex, - "startFrame": start, - "endFrame": end, - "families": ["imagesequence"], - - # Optional metadata (for debugging) - "metadata": { - "instance": data, - "job": job, - "session": api.Session.copy() - } - } - - # Ensure output dir exists - output_dir = instance.data["outputDir"] - if not os.path.isdir(output_dir): - os.makedirs(output_dir) - - if data.get("extendFrames", False): - - family = "imagesequence" - override = data["overrideExistingFrame"] - - # override = data.get("overrideExistingFrame", False) - out_file = render_job.get("OutFile") - if not out_file: - raise RuntimeError("OutFile not found in render job!") - - extension = os.path.splitext(out_file[0])[1] - _ext = extension[1:] - - # Frame comparison - prev_start = None - prev_end = None - resource_range = range(int(start), int(end)+1) - - # Gather all the subset files (one subset per render pass!) - subset_names = [data["subset"]] - subset_names.extend(data.get("renderPasses", [])) - - for subset_name in subset_names: - version = get_latest_version(asset_name=data["asset"], - subset_name=subset_name, - family=family) - - # Set prev start / end frames for comparison - if not prev_start and not prev_end: - prev_start = version["data"]["startFrame"] - prev_end = version["data"]["endFrame"] - - subset_resources = get_resources(version, _ext) - resource_files = get_resource_files(subset_resources, - resource_range, - override) - - resources.extend(resource_files) - - updated_start = min(start, prev_start) - updated_end = max(end, prev_end) - - # Update metadata and instance start / end frame - self.log.info("Updating start / end frame : " - "{} - {}".format(updated_start, updated_end)) - - # TODO : Improve logic to get new frame range for the - # publish job (publish_filesequence.py) - # The current approach is not following Pyblish logic which is based - # on Collect / Validate / Extract. - - # ---- Collect Plugins --- - # Collect Extend Frames - Only run if extendFrames is toggled - # # # Store in instance: - # # # Previous rendered files per subset based on frames - # # # --> Add to instance.data[resources] - # # # Update publish frame range - - # ---- Validate Plugins --- - # Validate Extend Frames - # # # Check if instance has the requirements to extend frames - # There might have been some things which can be added to the list - # Please do so when fixing this. - - # Start frame - metadata["startFrame"] = updated_start - metadata["metadata"]["instance"]["startFrame"] = updated_start - - # End frame - metadata["endFrame"] = updated_end - metadata["metadata"]["instance"]["endFrame"] = updated_end - - metadata_filename = "{}_metadata.json".format(subset) - metadata_path = os.path.join(output_dir, metadata_filename) - with open(metadata_path, "w") as f: - json.dump(metadata, f, indent=4, sort_keys=True) - - # Generate the payload for Deadline submission - payload = { - "JobInfo": { - "Plugin": "Python", - "BatchName": job["Props"]["Batch"], - "Name": job_name, - "JobType": "Normal", - "JobDependency0": job["_id"], - "UserName": job["Props"]["User"], - "Comment": instance.context.data.get("comment", ""), - "InitialStatus": state - }, - "PluginInfo": { - "Version": "3.6", - "ScriptFile": _get_script(), - "Arguments": '--path "{}"'.format(metadata_path), - "SingleFrameOnly": "True" - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - # Transfer the environment from the original job to this dependent - # job so they use the same environment - environment = job["Props"].get("Env", {}) - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - - # Avoid copied pools and remove secondary pool - payload["JobInfo"]["Pool"] = "none" - payload["JobInfo"].pop("SecondaryPool", None) - - self.log.info("Submitting..") - self.log.info(json.dumps(payload, indent=4, sort_keys=True)) - - url = "{}/api/jobs".format(deadline_url) - response = requests.post(url, json=payload) - if not response.ok: - raise Exception(response.text) - - # Copy files from previous render if extendFrame is True - if data.get("extendFrames", False): - - self.log.info("Preparing to copy ..") - import shutil - - dest_path = data["outputDir"] - for source in resources: - src_file = os.path.basename(source) - dest = os.path.join(dest_path, src_file) - shutil.copy(source, dest) - - self.log.info("Finished copying %i files" % len(resources)) diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py deleted file mode 100644 index f806104bb2..0000000000 --- a/pype/plugins/global/publish/validate_templates.py +++ /dev/null @@ -1,42 +0,0 @@ -import pyblish.api -from app.api import ( - Templates -) - -class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filed""" - - label = "Validate Templates" - order = pyblish.api.ValidatorOrder - 0.1 - hosts = ["maya", "houdini", "nuke"] - - def process(self, context): - - anatomy = context.data["anatomy"] - if not anatomy: - raise RuntimeError("Did not find templates") - else: - data = { "project": {"name": "D001_projectsx", - "code": "prjX"}, - "representation": "exr", - "VERSION": 3, - "SUBVERSION": 10, - "task": "animation", - "asset": "sh001", - "hierarchy": "ep101/sq01/sh010"} - - - anatomy = context.data["anatomy"].format(data) - self.log.info(anatomy.work.path) - - data = { "project": {"name": "D001_projectsy", - "code": "prjY"}, - "representation": "abc", - "VERSION": 1, - "SUBVERSION": 5, - "task": "lookdev", - "asset": "bob", - "hierarchy": "ep101/sq01/bob"} - - anatomy = context.data["anatomy"].format(data) - self.log.info(anatomy.work.file) From 71b51d1fa9c2bbd14a8fc3235d1cbd2ddadb1ef8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:40:37 +0100 Subject: [PATCH 40/78] add publish of already rendered frames, make write instance wotk with publish, render and famr attributes. --- .../nuke/publish/collect_nuke_writes.py | 26 +- .../publish/collect_prerendered_frames.py | 23 ++ .../nuke/publish/integrate_rendered_frames.py | 361 ++++++++++++++++++ pype/plugins/nuke/publish/render_local.py | 4 +- .../nuke/publish/validate_collections.py | 34 ++ 5 files changed, 438 insertions(+), 10 deletions(-) create mode 100644 pype/plugins/nuke/publish/collect_prerendered_frames.py create mode 100644 pype/plugins/nuke/publish/integrate_rendered_frames.py create mode 100644 pype/plugins/nuke/publish/validate_collections.py diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index b92d03c40b..443b08f691 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -4,7 +4,6 @@ import nuke import pyblish.api import clique - @pyblish.api.log class CollectNukeInstances(pyblish.api.ContextPlugin): """Collect all write nodes.""" @@ -57,11 +56,16 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instance.add(node) # Adding/Checking publish and render target attribute - if "render_local" not in node.knobs(): - knob = nuke.Boolean_Knob("render_local", "Local rendering") + if "farm" not in node.knobs(): + knob = nuke.Boolean_Knob("farm", "Farm Rendering") knob.setValue(False) node.addKnob(knob) + # Adding/Checking publish and render target attribute + if "render" not in node.knobs(): + knob = nuke.Boolean_Knob("render", "Render") + knob.setValue(False) + node.addKnob(knob) instance.data.update({ "asset": os.environ["AVALON_ASSET"], # todo: not a constant @@ -70,15 +74,21 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, - "families": ["render.local"], - "family": "write", - "publish": node.knob("publish"), + "family": "render", + "publish": node.knob("publish").value(), "collection": collection, - "first_frame": first_frame, - "last_frame": last_frame, + "startFrame": first_frame, + "endFrame": last_frame, "output_type": output_type }) + if node.knob('render').value(): + instance.data["families"] = ["render.local"] + if node.knob('farm').value(): + instance.data["families"] = ["render.farm"] + else: + instance.data["families"] = ["prerendered.frames"] + # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_prerendered_frames.py b/pype/plugins/nuke/publish/collect_prerendered_frames.py new file mode 100644 index 0000000000..e3cf9e2c42 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_prerendered_frames.py @@ -0,0 +1,23 @@ +import pyblish.api +import os + +class CollectFrames(pyblish.api.InstancePlugin): + """Inject the host into context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Collect data into prerenderd frames" + hosts = ["nuke"] + families = ['prerendered.frames'] + + def process(self, instance): + + collected_frames = os.listdir(instance.data['outputDir']) + + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(collected_frames) + instance.data['stagingDir'] = instance.data['outputDir'] + instance.data['transfer'] = False + + self.log.info('collected frames: {}'.format(collected_frames)) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py new file mode 100644 index 0000000000..f482a48cda --- /dev/null +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -0,0 +1,361 @@ +import os +import logging +import shutil + +import errno +import pyblish.api +from avalon import api, io + + +log = logging.getLogger(__name__) + + +class IntegrateFrames(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Integrate Frames" + order = pyblish.api.IntegratorOrder + families = ["prerendered.frames"] + + def process(self, instance): + + self.register(instance) + + self.log.info("Integrating Asset in to the database ...") + # self.integrate(instance) + + def register(self, instance): + + # Required environment variables + PROJECT = api.Session["AVALON_PROJECT"] + ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] + LOCATION = api.Session["AVALON_LOCATION"] + + context = instance.context + # Atomicity + # + # Guarantee atomic publishes - each asset contains + # an identical set of members. + # __ + # / o + # / \ + # | o | + # \ / + # o __/ + # + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + + # Assemble + # + # | + # v + # ---> <---- + # ^ + # | + # + stagingdir = instance.data.get("stagingDir") + assert stagingdir, ("Incomplete instance \"%s\": " + "Missing reference to staging area." % instance) + + # extra check if stagingDir actually exists and is available + + self.log.debug("Establishing staging directory @ %s" % stagingdir) + + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + + asset = io.find_one({"type": "asset", + "name": ASSET, + "parent": project["_id"]}) + + assert all([project, asset]), ("Could not find current project or " + "asset '%s'" % ASSET) + + subset = self.get_subset(asset, instance) + + # get next version + latest_version = io.find_one({"type": "version", + "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)]) + + next_version = 1 + if latest_version is not None: + next_version += latest_version["name"] + + self.log.info("Verifying version from assumed destination") + + assumed_data = instance.data["assumedTemplateData"] + assumed_version = assumed_data["version"] + if assumed_version != next_version: + raise AttributeError("Assumed version 'v{0:03d}' does not match" + "next version in database " + "('v{1:03d}')".format(assumed_version, + next_version)) + + self.log.debug("Next version: v{0:03d}".format(next_version)) + + version_data = self.create_version_data(context, instance) + version = self.create_version(subset=subset, + version_number=next_version, + locations=[LOCATION], + data=version_data) + + self.log.debug("Creating version ...") + version_id = io.insert_one(version).inserted_id + + # Write to disk + # _ + # | | + # _| |_ + # ____\ / + # |\ \ / \ + # \ \ v \ + # \ \________. + # \|________| + # + root = api.registered_root() + # template_data = {"root": root, + # "project": PROJECT, + # "silo": asset['silo'], + # "asset": ASSET, + # "subset": subset["name"], + # "version": version["name"]} + hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] + if hierarchy: + # hierarchy = os.path.sep.join(hierarchy) + hierarchy = os.path.join(*hierarchy) + + template_data = {"root": root, + "project": {"name": PROJECT, + "code": "prjX"}, + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "VERSION": version["name"], + "hierarchy": hierarchy} + + template_publish = project["config"]["template"]["publish"] + anatomy = instance.context.data['anatomy'] + + # Find the representations to transfer amongst the files + # Each should be a single representation (as such, a single extension) + representations = [] + + for files in instance.data["files"]: + + # Collection + # _______ + # |______|\ + # | |\| + # | || + # | || + # | || + # |_______| + # + if isinstance(files, list): + collection = files + # Assert that each member has identical suffix + _, ext = os.path.splitext(collection[0]) + assert all(ext == os.path.splitext(name)[1] + for name in collection), ( + "Files had varying suffixes, this is a bug" + ) + + assert not any(os.path.isabs(name) for name in collection) + + template_data["representation"] = ext[1:] + + for fname in collection: + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + # if instance.data.get('transfer', True): + # instance.data["transfers"].append([src, dst]) + + else: + # Single file + # _______ + # | |\ + # | | + # | | + # | | + # |_______| + # + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + _, ext = os.path.splitext(fname) + + template_data["representation"] = ext[1:] + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + + # if instance.data.get('transfer', True): + # dst = src + # instance.data["transfers"].append([src, dst]) + + representation = { + "schema": "pype:representation-2.0", + "type": "representation", + "parent": version_id, + "name": ext[1:], + "data": {'path': src}, + "dependencies": instance.data.get("dependencies", "").split(), + + # Imprint shortcut to context + # for performance reasons. + "context": { + "root": root, + "project": PROJECT, + "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] + } + } + representations.append(representation) + + self.log.info("Registering {} items".format(len(representations))) + + io.insert_many(representations) + + def integrate(self, instance): + """Move the files + + Through `instance.data["transfers"]` + + Args: + instance: the instance to integrate + """ + + transfers = instance.data["transfers"] + + for src, dest in transfers: + self.log.info("Copying file .. {} -> {}".format(src, dest)) + self.copy_file(src, dest) + + def copy_file(self, src, dst): + """ Copy given source to destination + + Arguments: + src (str): the source file which needs to be copied + dst (str): the destination of the sourc file + Returns: + None + """ + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + shutil.copy(src, dst) + + def get_subset(self, asset, instance): + + subset = io.find_one({"type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"]}) + + if subset is None: + subset_name = instance.data["subset"] + self.log.info("Subset '%s' not found, creating.." % subset_name) + + _id = io.insert_one({ + "schema": "pype:subset-2.0", + "type": "subset", + "name": subset_name, + "data": {}, + "parent": asset["_id"] + }).inserted_id + + subset = io.find_one({"_id": _id}) + + return subset + + def create_version(self, subset, version_number, locations, data=None): + """ Copy given source to destination + + Args: + subset (dict): the registered subset of the asset + version_number (int): the version number + locations (list): the currently registered locations + + Returns: + dict: collection of data to create a version + """ + # Imprint currently registered location + version_locations = [location for location in locations if + location is not None] + + return {"schema": "pype:version-2.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "locations": version_locations, + "data": data} + + def create_version_data(self, context, instance): + """Create the data collection for the version + + Args: + context: the current context + instance: the current instance being published + + Returns: + dict: the required information with instance.data as key + """ + + families = [] + current_families = instance.data.get("families", list()) + instance_family = instance.data.get("family", None) + + if instance_family is not None: + families.append(instance_family) + families += current_families + + # create relative source path for DB + relative_path = os.path.relpath(context.data["currentFile"], + api.registered_root()) + source = os.path.join("{root}", relative_path).replace("\\", "/") + + version_data = {"families": families, + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment")} + + # Include optional data if present in + optionals = ["startFrame", "endFrame", "step", "handles"] + for key in optionals: + if key in instance.data: + version_data[key] = instance.data[key] + + return version_data diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 9e69134600..d1a4ca870a 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -34,8 +34,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): node_subset_name = instance.data.get("subset", None) self.log.info("Starting render") - self.log.info("Start frame: {}".format(first_frame)) - self.log.info("End frame: {}".format(last_frame)) + self.log.info("Start frame: {}".format(startFrame)) + self.log.info("End frame: {}".format(endFrame)) # Render frames nuke.execute( diff --git a/pype/plugins/nuke/publish/validate_collections.py b/pype/plugins/nuke/publish/validate_collections.py new file mode 100644 index 0000000000..ff1bfdc245 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_collections.py @@ -0,0 +1,34 @@ +import pyblish.api +import pype.api +import clique + +import os +import glob + + +class ValidateCollections(pyblish.api.InstancePlugin): + """Validates mapped resources. + + These are external files to the current application, for example + these could be textures, image planes, cache files or other linked + media. + + This validates: + - The resources are existing files. + - The resources have correctly collected the data. + + """ + + order = pype.api.ValidateContentsOrder + label = "Validate Collections" + families = ['prerendered.frames'] + + def process(self, instance): + + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(collections)) + + assert len(collections) == 1, "There are multiple collections in the folder" + collection_instance = instance.data.get('collection', None) + + assert collections[0].is_contiguous(),"Some frames appear to be missing" From 5fffc698758e76bbe95bd4ba8c4cc4227d1edfcb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:44:41 +0100 Subject: [PATCH 41/78] restore valiadte templates temporarily --- .../global/publish/collect_deadline_user.py | 60 ++++ .../global/publish/submit_publish_job.py | 330 ++++++++++++++++++ .../global/publish/validate_templates.py | 42 +++ 3 files changed, 432 insertions(+) create mode 100644 pype/plugins/global/publish/collect_deadline_user.py create mode 100644 pype/plugins/global/publish/submit_publish_job.py create mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/publish/collect_deadline_user.py b/pype/plugins/global/publish/collect_deadline_user.py new file mode 100644 index 0000000000..f4d13a0545 --- /dev/null +++ b/pype/plugins/global/publish/collect_deadline_user.py @@ -0,0 +1,60 @@ +import os +import subprocess + +import pyblish.api + +CREATE_NO_WINDOW = 0x08000000 + + +def deadline_command(cmd): + # Find Deadline + path = os.environ.get("DEADLINE_PATH", None) + assert path is not None, "Variable 'DEADLINE_PATH' must be set" + + executable = os.path.join(path, "deadlinecommand") + if os.name == "nt": + executable += ".exe" + assert os.path.exists( + executable), "Deadline executable not found at %s" % executable + assert cmd, "Must have a command" + + query = (executable, cmd) + + process = subprocess.Popen(query, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + creationflags=CREATE_NO_WINDOW) + out, err = process.communicate() + + return out + + +class CollectDeadlineUser(pyblish.api.ContextPlugin): + """Retrieve the local active Deadline user""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Deadline User" + + hosts = ['maya', 'fusion', 'nuke'] + families = [ + "renderlayer", + "saver.deadline", + "imagesequence" + ] + + + def process(self, context): + """Inject the current working file""" + user = None + try: + user = deadline_command("GetCurrentUserName").strip() + except: + self.log.warning("Deadline command seems not to be working") + + if not user: + self.log.warning("No Deadline user found. " + "Do you have Deadline installed?") + return + + self.log.info("Found Deadline user: {}".format(user)) + context.data['deadlineUser'] = user diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py new file mode 100644 index 0000000000..cb852f7c43 --- /dev/null +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -0,0 +1,330 @@ +import os +import json +import re + +from avalon import api, io +from avalon.vendor import requests, clique + +import pyblish.api + + +def _get_script(): + """Get path to the image sequence script""" + try: + from pype.fusion.scripts import publish_filesequence + except Exception as e: + raise RuntimeError("Expected module 'publish_imagesequence'" + "to be available") + + module_path = publish_filesequence.__file__ + if module_path.endswith(".pyc"): + module_path = module_path[:-len(".pyc")] + ".py" + + return module_path + + +# Logic to retrieve latest files concerning extendFrames +def get_latest_version(asset_name, subset_name, family): + # Get asset + asset_name = io.find_one({"type": "asset", + "name": asset_name}, + projection={"name": True}) + + subset = io.find_one({"type": "subset", + "name": subset_name, + "parent": asset_name["_id"]}, + projection={"_id": True, "name": True}) + + # Check if subsets actually exists (pre-run check) + assert subset, "No subsets found, please publish with `extendFrames` off" + + # Get version + version_projection = {"name": True, + "data.startFrame": True, + "data.endFrame": True, + "parent": True} + + version = io.find_one({"type": "version", + "parent": subset["_id"], + "data.families": family}, + projection=version_projection, + sort=[("name", -1)]) + + assert version, "No version found, this is a bug" + + return version + + +def get_resources(version, extension=None): + """ + Get the files from the specific version + """ + query = {"type": "representation", "parent": version["_id"]} + if extension: + query["name"] = extension + + representation = io.find_one(query) + assert representation, "This is a bug" + + directory = api.get_representation_path(representation) + print("Source: ", directory) + resources = sorted([os.path.normpath(os.path.join(directory, fname)) + for fname in os.listdir(directory)]) + + return resources + + +def get_resource_files(resources, frame_range, override=True): + + res_collections, _ = clique.assemble(resources) + assert len(res_collections) == 1, "Multiple collections found" + res_collection = res_collections[0] + + # Remove any frames + if override: + for frame in frame_range: + if frame not in res_collection.indexes: + continue + res_collection.indexes.remove(frame) + + return list(res_collection) + + +class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): + """Submit image sequence publish jobs to Deadline. + + These jobs are dependent on a deadline job submission prior to this + plug-in. + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE + + Options in instance.data: + - deadlineSubmission (dict, Required): The returned .json + data from the job submission to deadline. + + - outputDir (str, Required): The output directory where the metadata + file should be generated. It's assumed that this will also be + final folder containing the output files. + + - ext (str, Optional): The extension (including `.`) that is required + in the output filename to be picked up for image sequence + publishing. + + - publishJobState (str, Optional): "Active" or "Suspended" + This defaults to "Suspended" + + This requires a "startFrame" and "endFrame" to be present in instance.data + or in context.data. + + """ + + label = "Submit image sequence jobs to Deadline" + order = pyblish.api.IntegratorOrder + 0.1 + + hosts = ["fusion", "maya", "nuke"] + + families = [ + "render.deadline", + "renderlayer", + "imagesequence" + ] + + def process(self, instance): + + # AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + # "http://localhost:8082") + # assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + try: + deadline_url = os.environ["DEADLINE_REST_URL"] + except KeyError: + self.log.error("Deadline REST API url not found.") + + # Get a submission job + job = instance.data.get("deadlineSubmissionJob") + if not job: + raise RuntimeError("Can't continue without valid deadline " + "submission prior to this plug-in.") + + data = instance.data.copy() + subset = data["subset"] + state = data.get("publishJobState", "Suspended") + job_name = "{batch} - {subset} [publish image sequence]".format( + batch=job["Props"]["Name"], + subset=subset + ) + + # Add in start/end frame + context = instance.context + start = instance.data.get("startFrame", context.data["startFrame"]) + end = instance.data.get("endFrame", context.data["endFrame"]) + resources = [] + + # Add in regex for sequence filename + # This assumes the output files start with subset name and ends with + # a file extension. + if "ext" in instance.data: + ext = re.escape(instance.data["ext"]) + else: + ext = "\.\D+" + + regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset), + ext=ext) + + # Write metadata for publish job + render_job = data.pop("deadlineSubmissionJob") + metadata = { + "regex": regex, + "startFrame": start, + "endFrame": end, + "families": ["imagesequence"], + + # Optional metadata (for debugging) + "metadata": { + "instance": data, + "job": job, + "session": api.Session.copy() + } + } + + # Ensure output dir exists + output_dir = instance.data["outputDir"] + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + + if data.get("extendFrames", False): + + family = "imagesequence" + override = data["overrideExistingFrame"] + + # override = data.get("overrideExistingFrame", False) + out_file = render_job.get("OutFile") + if not out_file: + raise RuntimeError("OutFile not found in render job!") + + extension = os.path.splitext(out_file[0])[1] + _ext = extension[1:] + + # Frame comparison + prev_start = None + prev_end = None + resource_range = range(int(start), int(end)+1) + + # Gather all the subset files (one subset per render pass!) + subset_names = [data["subset"]] + subset_names.extend(data.get("renderPasses", [])) + + for subset_name in subset_names: + version = get_latest_version(asset_name=data["asset"], + subset_name=subset_name, + family=family) + + # Set prev start / end frames for comparison + if not prev_start and not prev_end: + prev_start = version["data"]["startFrame"] + prev_end = version["data"]["endFrame"] + + subset_resources = get_resources(version, _ext) + resource_files = get_resource_files(subset_resources, + resource_range, + override) + + resources.extend(resource_files) + + updated_start = min(start, prev_start) + updated_end = max(end, prev_end) + + # Update metadata and instance start / end frame + self.log.info("Updating start / end frame : " + "{} - {}".format(updated_start, updated_end)) + + # TODO : Improve logic to get new frame range for the + # publish job (publish_filesequence.py) + # The current approach is not following Pyblish logic which is based + # on Collect / Validate / Extract. + + # ---- Collect Plugins --- + # Collect Extend Frames - Only run if extendFrames is toggled + # # # Store in instance: + # # # Previous rendered files per subset based on frames + # # # --> Add to instance.data[resources] + # # # Update publish frame range + + # ---- Validate Plugins --- + # Validate Extend Frames + # # # Check if instance has the requirements to extend frames + # There might have been some things which can be added to the list + # Please do so when fixing this. + + # Start frame + metadata["startFrame"] = updated_start + metadata["metadata"]["instance"]["startFrame"] = updated_start + + # End frame + metadata["endFrame"] = updated_end + metadata["metadata"]["instance"]["endFrame"] = updated_end + + metadata_filename = "{}_metadata.json".format(subset) + metadata_path = os.path.join(output_dir, metadata_filename) + with open(metadata_path, "w") as f: + json.dump(metadata, f, indent=4, sort_keys=True) + + # Generate the payload for Deadline submission + payload = { + "JobInfo": { + "Plugin": "Python", + "BatchName": job["Props"]["Batch"], + "Name": job_name, + "JobType": "Normal", + "JobDependency0": job["_id"], + "UserName": job["Props"]["User"], + "Comment": instance.context.data.get("comment", ""), + "InitialStatus": state + }, + "PluginInfo": { + "Version": "3.6", + "ScriptFile": _get_script(), + "Arguments": '--path "{}"'.format(metadata_path), + "SingleFrameOnly": "True" + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Transfer the environment from the original job to this dependent + # job so they use the same environment + environment = job["Props"].get("Env", {}) + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Avoid copied pools and remove secondary pool + payload["JobInfo"]["Pool"] = "none" + payload["JobInfo"].pop("SecondaryPool", None) + + self.log.info("Submitting..") + self.log.info(json.dumps(payload, indent=4, sort_keys=True)) + + url = "{}/api/jobs".format(deadline_url) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Copy files from previous render if extendFrame is True + if data.get("extendFrames", False): + + self.log.info("Preparing to copy ..") + import shutil + + dest_path = data["outputDir"] + for source in resources: + src_file = os.path.basename(source) + dest = os.path.join(dest_path, src_file) + shutil.copy(source, dest) + + self.log.info("Finished copying %i files" % len(resources)) diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py new file mode 100644 index 0000000000..f806104bb2 --- /dev/null +++ b/pype/plugins/global/publish/validate_templates.py @@ -0,0 +1,42 @@ +import pyblish.api +from app.api import ( + Templates +) + +class ValidateTemplates(pyblish.api.ContextPlugin): + """Check if all templates were filed""" + + label = "Validate Templates" + order = pyblish.api.ValidatorOrder - 0.1 + hosts = ["maya", "houdini", "nuke"] + + def process(self, context): + + anatomy = context.data["anatomy"] + if not anatomy: + raise RuntimeError("Did not find templates") + else: + data = { "project": {"name": "D001_projectsx", + "code": "prjX"}, + "representation": "exr", + "VERSION": 3, + "SUBVERSION": 10, + "task": "animation", + "asset": "sh001", + "hierarchy": "ep101/sq01/sh010"} + + + anatomy = context.data["anatomy"].format(data) + self.log.info(anatomy.work.path) + + data = { "project": {"name": "D001_projectsy", + "code": "prjY"}, + "representation": "abc", + "VERSION": 1, + "SUBVERSION": 5, + "task": "lookdev", + "asset": "bob", + "hierarchy": "ep101/sq01/bob"} + + anatomy = context.data["anatomy"].format(data) + self.log.info(anatomy.work.file) From c1321aeb3696f60f057f4b77f1e67861c58d181a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 01:54:22 +0100 Subject: [PATCH 42/78] adding render, render_farm knobs to nuke_write_create --- pype/nuke/lib.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 7e2539d073..061ab5e2c0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -80,9 +80,23 @@ def create_write_node(name, data): **_data ) instance = avalon.nuke.lib.imprint(instance, data["avalon"]) + add_rendering_knobs(instance) return instance +def add_rendering_knobs(node): + if "render" not in node.knobs(): + knob = nuke.Boolean_Knob("render", "Render") + knob.setFlag(0x1000) + knob.setValue(False) + node.addKnob(knob) + if "render_farm" not in node.knobs(): + knob = nuke.Boolean_Knob("render_farm", "Render on Farm") + knob.setValue(False) + node.addKnob(knob) + return node + + def update_frame_range(start, end, root=None): """Set Nuke script start and end frame range From 68fe0b0bb810572d1ed65324b0d291246d51e3af Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 02:25:16 +0100 Subject: [PATCH 43/78] final implementation collect_writes and validate collection --- pype/plugins/nuke/publish/collect_writes.py | 26 +++++++++++-------- .../nuke/publish/validate_collection.py | 8 ++++-- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index e83402ba48..4008cd1d08 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -54,12 +54,14 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): ) self.log.info("collection: {}".format(path)) - try: - collection = clique.parse(path) + collection = None + if not node["render"].value(): + try: + collection = clique.parse(path) - except Exception as e: - self.log.warning(e) - collection = None + except Exception as e: + self.log.warning(e) + collection = None # Include start and end render frame in label name = node.name() @@ -71,18 +73,20 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): ) self.log.debug("checking for error: {}".format(label)) - # # Adding/Checking publish and render target attribute - # if "render_local" not in node.knobs(): - # knob = nuke.Boolean_Knob("render_local", "Local rendering") - # knob.setValue(False) - # node.addKnob(knob) + + # dealing with local/farm rendering + if node["render_farm"].value(): + families = "{}.farm".format(instance.data["families"][0]) + else: + families = "{}.local".format(instance.data["families"][0]) + self.log.debug("checking for error: {}".format(label)) instance.data.update({ "path": nuke.filename(node), "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, - "families": ["{}.local".format(instance.data["families"][0])], + "families": [families], "collection": collection, "first_frame": first_frame, "last_frame": last_frame, diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 4f15b1e495..30b699cec9 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -1,5 +1,6 @@ import os import pyblish.api +import nuke @pyblish.api.log @@ -8,8 +9,9 @@ class RepairCollectionAction(pyblish.api.Action): on = "failed" icon = "wrench" - def process(self, instance, plugin): - self.log.info("this is going to be repaired") + def process(self, context, plugin): + context[0][0]["render"].setValue(True) + self.log.info("Rendering toggled ON") class ValidateCollection(pyblish.api.InstancePlugin): @@ -23,6 +25,8 @@ class ValidateCollection(pyblish.api.InstancePlugin): actions = [RepairCollectionAction] def process(self, instance): + if not instance.data["collection"]: + return missing_files = [] for f in instance.data["collection"]: From 63acea3295d5a194c7bdbc3a9a0201bb56a0a112 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 12:57:07 +0100 Subject: [PATCH 44/78] todo in pype.nuke.lib --- pype/nuke/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index ac9dc6675c..79c292b2ba 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -18,6 +18,8 @@ def format_anatomy(data): file = script_name() anatomy = get_anatomy() + + # TODO: perhaps should be in try! padding = anatomy.render.padding data.update({ From 0cb6ce320633d0ea3e49ff2da9a1fbedcefd38a3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 14:22:17 +0100 Subject: [PATCH 45/78] merging @milan's work from publish_integrate --- pype/plugins/nuke/publish/collect_writes.py | 51 +++++++------------ pype/plugins/nuke/publish/render_local.py | 4 +- .../nuke/publish/validate_collection.py | 28 +++++----- .../nuke/publish/validate_collections.py | 34 ------------- 4 files changed, 33 insertions(+), 84 deletions(-) delete mode 100644 pype/plugins/nuke/publish/validate_collections.py diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 4008cd1d08..e6097d8f6c 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -39,30 +39,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): first_frame = int(node["first"].getValue()) last_frame = int(node["last"].getValue()) - # Add collection - collection = None + # get path path = nuke.filename(node) - - if "#" in path: - path_split = path.split("#") - length = len(path_split)-1 - path = "{}%0{}d{}".format(path_split[0], length, path_split[-1]) - - path += " [{0}-{1}]".format( - str(first_frame), - str(last_frame) - ) - self.log.info("collection: {}".format(path)) - - collection = None - if not node["render"].value(): - try: - collection = clique.parse(path) - - except Exception as e: - self.log.warning(e) - collection = None - + output_dir = os.path.dirname(path) # Include start and end render frame in label name = node.name() @@ -74,23 +53,29 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): self.log.debug("checking for error: {}".format(label)) - # dealing with local/farm rendering - if node["render_farm"].value(): - families = "{}.farm".format(instance.data["families"][0]) + files = [] + # preredered frames + if not node["render"].value(): + # dealing with local/farm rendering + if node["render_farm"].value(): + families = "{}.farm".format(instance.data["families"][0]) + else: + families = "{}.local".format(instance.data["families"][0]) else: - families = "{}.local".format(instance.data["families"][0]) + families = 'prerendered.frames' + files = [f for f in os.listdir(output_dir)] self.log.debug("checking for error: {}".format(label)) instance.data.update({ - "path": nuke.filename(node), - "outputDir": os.path.dirname(nuke.filename(node)), + "path": path, + "outputDir": output_dir, "ext": ext, # todo: should be redundant "label": label, "families": [families], - "collection": collection, - "first_frame": first_frame, - "last_frame": last_frame, - "output_type": output_type + "files": files, + "firstFrame": first_frame, + "lastFrame": last_frame, + "outputType": output_type }) self.log.debug("instance.data: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 9e69134600..aa8c98f34b 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -29,8 +29,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): self.log.debug("instance collected: {}".format(instance.data)) - first_frame = instance.data.get("first_frame", None) - last_frame = instance.data.get("last_frame", None) + first_frame = instance.data.get("firstFrame", None) + last_frame = instance.data.get("lastFrame", None) node_subset_name = instance.data.get("subset", None) self.log.info("Starting render") diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 30b699cec9..95864601aa 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -1,6 +1,6 @@ import os import pyblish.api -import nuke +import clique @pyblish.api.log @@ -10,6 +10,7 @@ class RepairCollectionAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): + [os.remove(f) for f in context[0].data["files"]] context[0][0]["render"].setValue(True) self.log.info("Rendering toggled ON") @@ -18,26 +19,23 @@ class ValidateCollection(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder - optional = True - families = ["write"] - label = "Check Full Img Sequence" + # optional = True + families = ['prerendered.frames'] + label = "Check prerendered frames" hosts = ["nuke"] actions = [RepairCollectionAction] def process(self, instance): - if not instance.data["collection"]: - return - missing_files = [] - for f in instance.data["collection"]: - # print f - if not os.path.exists(f): - missing_files.append(f) + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(collections)) - for f in missing_files: - instance.data["collection"].remove(f) + frame_length = instance.data["lastFrame"] \ + - instance.data["firstFrame"] - frame_length = instance.data["last_frame"] - instance.data["first_frame"] + assert len(collections) == 1, self.log.info("There are multiple collections in the folder") - assert len(list(instance.data["collection"])) is frame_length, self.log.info( + assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") + + assert len(list(instance.data["files"])) is frame_length, self.log.info( "{} missing frames. Use repair to render all frames".format(__name__)) diff --git a/pype/plugins/nuke/publish/validate_collections.py b/pype/plugins/nuke/publish/validate_collections.py deleted file mode 100644 index ff1bfdc245..0000000000 --- a/pype/plugins/nuke/publish/validate_collections.py +++ /dev/null @@ -1,34 +0,0 @@ -import pyblish.api -import pype.api -import clique - -import os -import glob - - -class ValidateCollections(pyblish.api.InstancePlugin): - """Validates mapped resources. - - These are external files to the current application, for example - these could be textures, image planes, cache files or other linked - media. - - This validates: - - The resources are existing files. - - The resources have correctly collected the data. - - """ - - order = pype.api.ValidateContentsOrder - label = "Validate Collections" - families = ['prerendered.frames'] - - def process(self, instance): - - collections, remainder = clique.assemble(*instance.data['files']) - self.log.info('collections: {}'.format(collections)) - - assert len(collections) == 1, "There are multiple collections in the folder" - collection_instance = instance.data.get('collection', None) - - assert collections[0].is_contiguous(),"Some frames appear to be missing" From ad1db73b21d9af875d78cba13465cf229130c970 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 15:39:27 +0100 Subject: [PATCH 46/78] update publish rendering --- .../publish/collect_prerendered_frames.py | 23 ---------------- pype/plugins/nuke/publish/collect_writes.py | 26 ++++++++++++------- pype/plugins/nuke/publish/render_local.py | 4 ++- .../nuke/publish/validate_collection.py | 12 +++++++-- 4 files changed, 30 insertions(+), 35 deletions(-) delete mode 100644 pype/plugins/nuke/publish/collect_prerendered_frames.py diff --git a/pype/plugins/nuke/publish/collect_prerendered_frames.py b/pype/plugins/nuke/publish/collect_prerendered_frames.py deleted file mode 100644 index e3cf9e2c42..0000000000 --- a/pype/plugins/nuke/publish/collect_prerendered_frames.py +++ /dev/null @@ -1,23 +0,0 @@ -import pyblish.api -import os - -class CollectFrames(pyblish.api.InstancePlugin): - """Inject the host into context""" - - order = pyblish.api.CollectorOrder + 0.499 - label = "Collect data into prerenderd frames" - hosts = ["nuke"] - families = ['prerendered.frames'] - - def process(self, instance): - - collected_frames = os.listdir(instance.data['outputDir']) - - if "files" not in instance.data: - instance.data["files"] = list() - - instance.data["files"].append(collected_frames) - instance.data['stagingDir'] = instance.data['outputDir'] - instance.data['transfer'] = False - - self.log.info('collected frames: {}'.format(collected_frames)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index e6097d8f6c..7ac79350c0 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -2,7 +2,6 @@ import os import nuke import pyblish.api -import clique import logging log = logging.getLogger(__name__) @@ -42,6 +41,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # get path path = nuke.filename(node) output_dir = os.path.dirname(path) + self.log.debug(output_dir) # Include start and end render frame in label name = node.name() @@ -51,28 +51,36 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): int(last_frame) ) - self.log.debug("checking for error: {}".format(label)) - - files = [] # preredered frames if not node["render"].value(): + try: + families = "prerendered.frames" + collected_frames = os.listdir(output_dir) + if not collected_frames: + node["render"].setValue(True) + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"] = collected_frames + instance.data['stagingDir'] = output_dir + instance.data['transfer'] = False + except Exception: + node["render"].setValue(True) + + if node["render"].value(): # dealing with local/farm rendering if node["render_farm"].value(): families = "{}.farm".format(instance.data["families"][0]) else: families = "{}.local".format(instance.data["families"][0]) - else: - families = 'prerendered.frames' - files = [f for f in os.listdir(output_dir)] self.log.debug("checking for error: {}".format(label)) instance.data.update({ "path": path, "outputDir": output_dir, - "ext": ext, # todo: should be redundant + "ext": ext, "label": label, "families": [families], - "files": files, "firstFrame": first_frame, "lastFrame": last_frame, "outputType": output_type diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index aa8c98f34b..fb974e6588 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -31,7 +31,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): first_frame = instance.data.get("firstFrame", None) last_frame = instance.data.get("lastFrame", None) - node_subset_name = instance.data.get("subset", None) + node_subset_name = instance.data.get("name", None) self.log.info("Starting render") self.log.info("Start frame: {}".format(first_frame)) @@ -43,4 +43,6 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): int(first_frame), int(last_frame) ) + # swith to prerendered.frames + instance[0]["render"].setValue(False) self.log.info('Finished render') diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 95864601aa..6c4c305de7 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -10,7 +10,11 @@ class RepairCollectionAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): - [os.remove(f) for f in context[0].data["files"]] + + files_remove = [os.path.join(context[0].data["outputDir"], f) + for f in context[0].data["files"]] + for f in files_remove: + self.log.debug("removing file: {}".format(f)) context[0][0]["render"].setValue(True) self.log.info("Rendering toggled ON") @@ -31,11 +35,15 @@ class ValidateCollection(pyblish.api.InstancePlugin): self.log.info('collections: {}'.format(collections)) frame_length = instance.data["lastFrame"] \ - - instance.data["firstFrame"] + - instance.data["firstFrame"] + 1 assert len(collections) == 1, self.log.info("There are multiple collections in the folder") assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") + self.log.info('frame_length: {}'.format(frame_length)) + self.log.info('len(list(instance.data["files"])): {}'.format( + len(list(instance.data["files"])))) + assert len(list(instance.data["files"])) is frame_length, self.log.info( "{} missing frames. Use repair to render all frames".format(__name__)) From 4ecddb537750f40bfdabfaa3c11dd869ddfb0ba4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 16:19:53 +0100 Subject: [PATCH 47/78] fixing validation of collection --- pype/plugins/nuke/publish/collect_writes.py | 5 +++-- pype/plugins/nuke/publish/validate_collection.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 7ac79350c0..82c4d703e7 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -62,7 +62,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): instance.data["files"] = list() instance.data["files"] = collected_frames - instance.data['stagingDir'] = output_dir instance.data['transfer'] = False except Exception: node["render"].setValue(True) @@ -83,7 +82,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "families": [families], "firstFrame": first_frame, "lastFrame": last_frame, - "outputType": output_type + "outputType": output_type, + "stagingDir": output_dir, + }) self.log.debug("instance.data: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 6c4c305de7..a9853acc56 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -30,8 +30,8 @@ class ValidateCollection(pyblish.api.InstancePlugin): actions = [RepairCollectionAction] def process(self, instance): - - collections, remainder = clique.assemble(*instance.data['files']) + self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + collections, remainder = clique.assemble(instance.data['files']) self.log.info('collections: {}'.format(collections)) frame_length = instance.data["lastFrame"] \ From 930fb37b1bc565c2c5c8d5fc332dc376ce4c65fa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 17:07:18 +0100 Subject: [PATCH 48/78] fixing wrong settings on family --- pype/plugins/nuke/create/create_write.py | 94 +++++++++++++----------- 1 file changed, 50 insertions(+), 44 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 673d75911d..ff0375100a 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -17,11 +17,14 @@ def subset_to_families(subset, family, families): class CrateWriteRender(avalon.nuke.Creator): + # change this to template preset + preset = "render" + name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - family = "render" # change this to template - families = "write" # do not change! + family = "{}.write".format(preset) + families = preset icon = "sign-out" def __init__(self, *args, **kwargs): @@ -29,41 +32,43 @@ class CrateWriteRender(avalon.nuke.Creator): data = OrderedDict() - # creating pype subset - data["subset"] = subset_to_families( - self.data["subset"], - self.family, - self.families - ) - # swaping family with families - data["family"] = self.families - data["families"] = self.family + data["family"] = self.family.split(".")[1] + data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() if k not in data.keys()} self.data = data def process(self): + self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) + self.name = self.data["subset"] + instance = super(CrateWriteRender, self).process() + family = self.family.split(".")[0] + node = self.family.split(".")[1] + if not instance: write_data = { - "class": self.families, - "preset": self.family, + "class": node, + "preset": family, "avalon": self.data } - create_write_node(self.name, write_data) + create_write_node(self.data["subset"], write_data) return class CrateWritePrerender(avalon.nuke.Creator): + # change this to template preset + preset = "prerender" + name = "WritePrerender" label = "Create Write Prerender" hosts = ["nuke"] - family = "prerender" - families = "write" + family = "{}.write".format(preset) + families = preset icon = "sign-out" def __init__(self, *args, **kwargs): @@ -71,41 +76,43 @@ class CrateWritePrerender(avalon.nuke.Creator): data = OrderedDict() - # creating pype subset - data["subset"] = subset_to_families( - self.data["subset"], - self.family, - self.families - ) - # swaping family with families - data["family"] = self.families - data["families"] = self.family + data["family"] = self.family.split(".")[1] + data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() if k not in data.keys()} self.data = data def process(self): + self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) + self.name = self.data["subset"] + instance = super(CrateWritePrerender, self).process() + family = self.family.split(".")[0] + node = self.family.split(".")[1] + if not instance: write_data = { - "class": self.families, - "preset": self.family, + "class": node, + "preset": family, "avalon": self.data } - create_write_node(self.name, write_data) + create_write_node(self.data["subset"], write_data) return class CrateWriteStill(avalon.nuke.Creator): + # change this to template preset + preset = "still" + name = "WriteStill" label = "Create Write Still" hosts = ["nuke"] - family = "still" - families = "write" + family = "{}.write".format(preset) + families = preset icon = "image" def __init__(self, *args, **kwargs): @@ -113,15 +120,8 @@ class CrateWriteStill(avalon.nuke.Creator): data = OrderedDict() - # creating pype subset - data["subset"] = subset_to_families( - self.data["subset"], - self.family, - self.families - ) - # swaping family with families - data["family"] = self.families - data["families"] = self.family + data["family"] = self.family.split(".")[1] + data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() if k not in data.keys()} @@ -129,17 +129,23 @@ class CrateWriteStill(avalon.nuke.Creator): def process(self): import nuke + self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) + self.name = self.data["subset"] + instance = super(CrateWriteStill, self).process() + family = self.family.split(".")[0] + node = self.family.split(".")[1] + if not instance: write_data = { - "class": self.families, - "preset": self.family, - "avalon": self.data, - "frame_range": [nuke.frame(), nuke.frame()] + "frame_range": [nuke.frame(), nuke.frame()], + "class": node, + "preset": family, + "avalon": self.data } nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame())) - create_write_node(self.name, write_data) + create_write_node(self.data["subset"], write_data) return From 72773e7a28b8b5648906db8e357d670b7e6859f1 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 17:48:40 +0100 Subject: [PATCH 49/78] fix typo in templates --- pype/ftrack/actions/action_syncToAvalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 65b84fbc7b..5af84b3042 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -146,7 +146,7 @@ class SyncToAvalon(BaseAction): 'apps': apps, # TODO redo work!!! 'template': { - 'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>', + 'workfile': '{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>', 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} } From a99669b972d2cb3f53ccf9a96108bd0c69b9103a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Dec 2018 17:59:33 +0100 Subject: [PATCH 50/78] update create_write, publihs validate collection wip --- pype/plugins/nuke/create/create_write.py | 36 +++++++++---------- pype/plugins/nuke/publish/render_local.py | 2 +- .../nuke/publish/validate_collection.py | 8 +++-- 3 files changed, 24 insertions(+), 22 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index ff0375100a..c1b492ac2e 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -6,6 +6,8 @@ from pype.nuke import ( ) from pype import api as pype +import nuke + log = pype.Logger.getLogger(__name__, "nuke") @@ -23,7 +25,7 @@ class CrateWriteRender(avalon.nuke.Creator): name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - family = "{}.write".format(preset) + family = "{}_write".format(preset) families = preset icon = "sign-out" @@ -32,7 +34,7 @@ class CrateWriteRender(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split(".")[1] + data["family"] = self.family.split("_")[1] data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -40,13 +42,12 @@ class CrateWriteRender(avalon.nuke.Creator): self.data = data def process(self): - self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) self.name = self.data["subset"] - instance = super(CrateWriteRender, self).process() + family = self.family.split("_")[0] + node = self.family.split("_")[1] - family = self.family.split(".")[0] - node = self.family.split(".")[1] + instance = nuke.toNode(self.data["subset"]) if not instance: write_data = { @@ -67,7 +68,7 @@ class CrateWritePrerender(avalon.nuke.Creator): name = "WritePrerender" label = "Create Write Prerender" hosts = ["nuke"] - family = "{}.write".format(preset) + family = "{}_write".format(preset) families = preset icon = "sign-out" @@ -76,7 +77,7 @@ class CrateWritePrerender(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split(".")[1] + data["family"] = self.family.split("_")[1] data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -84,13 +85,12 @@ class CrateWritePrerender(avalon.nuke.Creator): self.data = data def process(self): - self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) self.name = self.data["subset"] - instance = super(CrateWritePrerender, self).process() + instance = nuke.toNode(self.data["subset"]) - family = self.family.split(".")[0] - node = self.family.split(".")[1] + family = self.family.split("_")[0] + node = self.family.split("_")[1] if not instance: write_data = { @@ -111,7 +111,7 @@ class CrateWriteStill(avalon.nuke.Creator): name = "WriteStill" label = "Create Write Still" hosts = ["nuke"] - family = "{}.write".format(preset) + family = "{}_write".format(preset) families = preset icon = "image" @@ -120,7 +120,7 @@ class CrateWriteStill(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split(".")[1] + data["family"] = self.family.split("_")[1] data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -128,14 +128,12 @@ class CrateWriteStill(avalon.nuke.Creator): self.data = data def process(self): - import nuke - self.data["subset"] = "{}.{}".format(self.families, self.data["subset"]) self.name = self.data["subset"] - instance = super(CrateWriteStill, self).process() + instance = nuke.toNode(self.data["subset"]) - family = self.family.split(".")[0] - node = self.family.split(".")[1] + family = self.family.split("_")[0] + node = self.family.split("_")[1] if not instance: write_data = { diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index fb974e6588..55adedb9e5 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -14,7 +14,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder label = "Render Local" hosts = ["nuke"] - families = ["render.local"] + families = ["render.local", "prerender.local", "still.local"] def process(self, instance): diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index a9853acc56..12e6160f6b 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -14,6 +14,7 @@ class RepairCollectionAction(pyblish.api.Action): files_remove = [os.path.join(context[0].data["outputDir"], f) for f in context[0].data["files"]] for f in files_remove: + os.remove(f) self.log.debug("removing file: {}".format(f)) context[0][0]["render"].setValue(True) self.log.info("Rendering toggled ON") @@ -37,9 +38,12 @@ class ValidateCollection(pyblish.api.InstancePlugin): frame_length = instance.data["lastFrame"] \ - instance.data["firstFrame"] + 1 - assert len(collections) == 1, self.log.info("There are multiple collections in the folder") + if frame_length is not 1: + assert len(collections) == 1, self.log.info( + "There are multiple collections in the folder") + assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") - assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") + assert remainder is not None, self.log.info("There are some wrong files in folder") self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(list(instance.data["files"])): {}'.format( From 4b4960cff5ccca2ae24b3ae96750ac5bc364969c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sun, 2 Dec 2018 02:11:16 +0100 Subject: [PATCH 51/78] integrate collection of files rather than framed individually --- pype/plugins/nuke/publish/collect_writes.py | 24 +++++++------------ .../nuke/publish/validate_collection.py | 10 ++++---- 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 82c4d703e7..db966fd84d 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -41,7 +41,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # get path path = nuke.filename(node) output_dir = os.path.dirname(path) - self.log.debug(output_dir) + self.log.debug('output dir: {}'.format(output_dir)) # Include start and end render frame in label name = node.name() @@ -53,20 +53,14 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # preredered frames if not node["render"].value(): - try: - families = "prerendered.frames" - collected_frames = os.listdir(output_dir) - if not collected_frames: - node["render"].setValue(True) - if "files" not in instance.data: - instance.data["files"] = list() - - instance.data["files"] = collected_frames - instance.data['transfer'] = False - except Exception: - node["render"].setValue(True) - - if node["render"].value(): + families = "prerendered.frames" + collected_frames = os.listdir(output_dir) + self.log.debug("collected_frames: {}".format(label)) + if "files" not in instance.data: + instance.data["files"] = list() + instance.data["files"].append(collected_frames) + instance.data['transfer'] = False + else: # dealing with local/farm rendering if node["render_farm"].value(): families = "{}.farm".format(instance.data["families"][0]) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 12e6160f6b..4088272bc4 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -32,8 +32,8 @@ class ValidateCollection(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - collections, remainder = clique.assemble(instance.data['files']) - self.log.info('collections: {}'.format(collections)) + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(str(collections))) frame_length = instance.data["lastFrame"] \ - instance.data["firstFrame"] + 1 @@ -43,11 +43,11 @@ class ValidateCollection(pyblish.api.InstancePlugin): "There are multiple collections in the folder") assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") - assert remainder is not None, self.log.info("There are some wrong files in folder") + assert remainder is not None, self.log.info("There are some extra files in folder") self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(list(instance.data["files"])): {}'.format( - len(list(instance.data["files"])))) + len(list(instance.data["files"][0])))) - assert len(list(instance.data["files"])) is frame_length, self.log.info( + assert len(list(instance.data["files"][0])) is frame_length, self.log.info( "{} missing frames. Use repair to render all frames".format(__name__)) From d2b68baf5e89f13f4fc4b9be395867481eef7068 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 2 Dec 2018 22:18:33 +0100 Subject: [PATCH 52/78] fixing setting format to use crop value --- pype/nuke/__init__.py | 27 +++++---- pype/nuke/lib.py | 131 ++++++++++++++++++++++++++---------------- pype/nuke/menu.py | 13 ++++- 3 files changed, 104 insertions(+), 67 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 371fe2a786..3bf972bcaf 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -3,7 +3,7 @@ import sys from avalon import api as avalon from pyblish import api as pyblish -from .. import api as pype +from .. import api from pype.nuke import menu @@ -15,12 +15,12 @@ import nuke # removing logger handler created in avalon_core for name, handler in [(handler.get_name(), handler) - for handler in pype.Logger.logging.root.handlers[:]]: + for handler in api.Logger.logging.root.handlers[:]]: if "pype" not in str(name).lower(): - pype.Logger.logging.root.removeHandler(handler) + api.Logger.logging.root.removeHandler(handler) -log = pype.Logger.getLogger(__name__, "nuke") +log = api.Logger.getLogger(__name__, "nuke") AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") @@ -37,7 +37,7 @@ self = sys.modules[__name__] self.nLogger = None -class NukeHandler(pype.Logger.logging.Handler): +class NukeHandler(api.Logger.logging.Handler): ''' Nuke Handler - emits logs into nuke's script editor. warning will emit nuke.warning() @@ -45,7 +45,7 @@ class NukeHandler(pype.Logger.logging.Handler): ''' def __init__(self): - pype.Logger.logging.Handler.__init__(self) + api.Logger.logging.Handler.__init__(self) self.set_name("Pype_Nuke_Handler") def emit(self, record): @@ -65,11 +65,11 @@ class NukeHandler(pype.Logger.logging.Handler): nuke_handler = NukeHandler() if nuke_handler.get_name() \ not in [handler.get_name() - for handler in pype.Logger.logging.root.handlers[:]]: - pype.Logger.logging.getLogger().addHandler(nuke_handler) + for handler in api.Logger.logging.root.handlers[:]]: + api.Logger.logging.getLogger().addHandler(nuke_handler) if not self.nLogger: - self.nLogger = pype.Logger + self.nLogger = api.Logger def reload_config(): @@ -86,8 +86,6 @@ def reload_config(): "app.api", "{}.api".format(AVALON_CONFIG), "{}.templates".format(AVALON_CONFIG), - "{}.nuke".format(AVALON_CONFIG), - "{}.nuke.lib".format(AVALON_CONFIG), "{}.nuke.templates".format(AVALON_CONFIG), "{}.nuke.menu".format(AVALON_CONFIG) ): @@ -100,7 +98,8 @@ def reload_config(): def install(): - pype.fill_avalon_workdir() + + api.fill_avalon_workdir() reload_config() log.info("Registering Nuke plug-ins..") @@ -129,7 +128,7 @@ def install(): menu.install() # load data from templates - pype.load_data_from_templates() + api.load_data_from_templates() def uninstall(): @@ -141,7 +140,7 @@ def uninstall(): pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) # reset data from templates - pype.reset_data_from_templates() + api.reset_data_from_templates() def on_pyblish_instance_toggled(instance, old_value, new_value): diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 79c292b2ba..b3ae82c609 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -2,6 +2,7 @@ import sys from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui +from avalon import api, io import avalon.nuke import pype.api as pype import nuke @@ -99,57 +100,6 @@ def add_rendering_knobs(node): return node -def update_frame_range(start, end, root=None): - """Set Nuke script start and end frame range - - Args: - start (float, int): start frame - end (float, int): end frame - root (object, Optional): root object from nuke's script - - Returns: - None - - """ - - knobs = { - "first_frame": start, - "last_frame": end - } - - with avalon.nuke.viewer_update_and_undo_stop(): - for key, value in knobs.items(): - if root: - root[key].setValue(value) - else: - nuke.root()[key].setValue(value) - - -def get_additional_data(container): - """Get Nuke's related data for the container - - Args: - container(dict): the container found by the ls() function - - Returns: - dict - """ - - node = container["_tool"] - tile_color = node['tile_color'].value() - if tile_color is None: - return {} - - hex = '%08x' % tile_color - rgba = [ - float(int(hex[0:2], 16)) / 255.0, - float(int(hex[2:4], 16)) / 255.0, - float(int(hex[4:6], 16)) / 255.0 - ] - - return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} - - def set_viewers_colorspace(viewer): assert isinstance(viewer, dict), log.error( "set_viewers_colorspace(): argument should be dictionary") @@ -245,6 +195,85 @@ def get_avalon_knob_data(node): return None return data + +def reset_resolution(): + """Set resolution to project resolution.""" + log.info("Reseting resolution") + project = io.find_one({"type": "project"}) + asset = api.Session["AVALON_ASSET"] + asset = io.find_one({"name": asset, "type": "asset"}) + + try: + width = asset["data"].get("resolution_width", 1920) + height = asset["data"].get("resolution_height", 1080) + pixel_aspect = asset["data"].get("pixel_aspect", 1) + + bbox = asset["data"].get("crop", "0.0.1920.1080") + + try: + x, y, r, t = bbox.split(".") + except Exception as e: + x = 0 + y = 0 + r = width + t = height + log.error("{}: {} \nFormat:Crop need to be set with dots, example: " + "0.0.1920.1080, /nSetting to default".format(__name__, e)) + + except KeyError: + log.warning( + "No resolution information found for \"{0}\".".format( + project["name"] + ) + ) + return + + used_formats = list() + for f in nuke.formats(): + if project["name"] in str(f.name()): + used_formats.append(f.name()) + else: + format_name = project["name"] + "_1" + + if used_formats: + format_name = "{}_{}".format( + project["name"], + int(used_formats[-1][-1])+1 + ) + log.info("Format exists: {}. " + "Will create new: {}...".format( + used_formats[-1], + format_name) + ) + + make_format( + width=int(width), + height=int(height), + x=int(x), + y=int(y), + r=int(r), + t=int(t), + pixel_aspect=float(pixel_aspect), + project_name=format_name + ) + log.info("Format is set") + + +def make_format(**args): + log.info("Format does't exist, will create: \n{}".format(args)) + nuke.addFormat( + "{width} " + "{height} " + "{x} " + "{y} " + "{r} " + "{t} " + "{pixel_aspect} " + "{project_name}".format(**args) + ) + nuke.root()["format"].setValue("{project_name}".format(**args)) + + # TODO: bellow functions are wip and needs to be check where they are used # ------------------------------------ diff --git a/pype/nuke/menu.py b/pype/nuke/menu.py index 97e2432e16..1fb38e389d 100644 --- a/pype/nuke/menu.py +++ b/pype/nuke/menu.py @@ -5,8 +5,17 @@ from pype.nuke import lib def install(): + menubar = nuke.menu("Nuke") menu = menubar.findItem(Session["AVALON_LABEL"]) - menu.addSeparator() - menu.addCommand("Set colorspace...", lib.set_colorspace) + # replace reset resolution from avalon core to pype's + name = "Reset Resolution" + rm_item = [(i, item) + for i, item in enumerate(menu.items()) + if name in item.name()][0] + menu.removeItem(rm_item[1].name()) + menu.addCommand(rm_item[1].name(), lib.reset_resolution, index=rm_item[0]) + + # add colorspace menu item + menu.addCommand("Set colorspace...", lib.set_colorspace, index=rm_item[0]+1) From b03ede44f3296b3e0aa91f3744407b67162bdf10 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Dec 2018 08:39:56 +0100 Subject: [PATCH 53/78] load_sequence update --- .../plugins/global/load/open_imagesequence.py | 2 +- .../nuke/{load => _load_unused}/load_alembic | 0 .../nuke/{load => _load_unused}/load_backdrop | 0 .../{load => _load_unused}/load_camera_abc | 0 .../{load => _load_unused}/load_camera_nk | 0 .../nuke/_load_unused/load_sequence.py | 252 +++++++++++++ .../nuke/{load => _load_unused}/load_still | 0 pype/plugins/nuke/load/actions.py | 12 +- pype/plugins/nuke/load/load_sequence.py | 354 ++++++------------ 9 files changed, 383 insertions(+), 237 deletions(-) rename pype/plugins/nuke/{load => _load_unused}/load_alembic (100%) rename pype/plugins/nuke/{load => _load_unused}/load_backdrop (100%) rename pype/plugins/nuke/{load => _load_unused}/load_camera_abc (100%) rename pype/plugins/nuke/{load => _load_unused}/load_camera_nk (100%) create mode 100644 pype/plugins/nuke/_load_unused/load_sequence.py rename pype/plugins/nuke/{load => _load_unused}/load_still (100%) diff --git a/pype/plugins/global/load/open_imagesequence.py b/pype/plugins/global/load/open_imagesequence.py index 8cb16fc507..a910625733 100644 --- a/pype/plugins/global/load/open_imagesequence.py +++ b/pype/plugins/global/load/open_imagesequence.py @@ -18,7 +18,7 @@ def open(filepath): class PlayImageSequence(api.Loader): """Open Image Sequence with system default""" - families = ["imagesequence"] + families = ["write"] representations = ["*"] label = "Play sequence" diff --git a/pype/plugins/nuke/load/load_alembic b/pype/plugins/nuke/_load_unused/load_alembic similarity index 100% rename from pype/plugins/nuke/load/load_alembic rename to pype/plugins/nuke/_load_unused/load_alembic diff --git a/pype/plugins/nuke/load/load_backdrop b/pype/plugins/nuke/_load_unused/load_backdrop similarity index 100% rename from pype/plugins/nuke/load/load_backdrop rename to pype/plugins/nuke/_load_unused/load_backdrop diff --git a/pype/plugins/nuke/load/load_camera_abc b/pype/plugins/nuke/_load_unused/load_camera_abc similarity index 100% rename from pype/plugins/nuke/load/load_camera_abc rename to pype/plugins/nuke/_load_unused/load_camera_abc diff --git a/pype/plugins/nuke/load/load_camera_nk b/pype/plugins/nuke/_load_unused/load_camera_nk similarity index 100% rename from pype/plugins/nuke/load/load_camera_nk rename to pype/plugins/nuke/_load_unused/load_camera_nk diff --git a/pype/plugins/nuke/_load_unused/load_sequence.py b/pype/plugins/nuke/_load_unused/load_sequence.py new file mode 100644 index 0000000000..695dd0b981 --- /dev/null +++ b/pype/plugins/nuke/_load_unused/load_sequence.py @@ -0,0 +1,252 @@ +import os +import contextlib + +from avalon import api +import avalon.io as io + +from avalon.nuke import log +import nuke + + +@contextlib.contextmanager +def preserve_inputs(node, knobs): + """Preserve the node's inputs after context""" + + values = {} + for name in knobs: + try: + knob_value = node[name].vaule() + values[name] = knob_value + except ValueError: + log.warning("missing knob {} in node {}" + "{}".format(name, node['name'].value())) + + try: + yield + finally: + for name, value in values.items(): + node[name].setValue(value) + + +@contextlib.contextmanager +def preserve_trim(node): + """Preserve the relative trim of the Loader tool. + + This tries to preserve the loader's trim (trim in and trim out) after + the context by reapplying the "amount" it trims on the clip's length at + start and end. + + """ + # working script frame range + script_start = nuke.root()["start_frame"].value() + + start_at_frame = None + offset_frame = None + if node['frame_mode'].value() == "start at": + start_at_frame = node['frame'].value() + if node['frame_mode'].value() is "offset": + offset_frame = node['frame'].value() + + try: + yield + finally: + if start_at_frame: + node['frame_mode'].setValue("start at") + node['frame'].setValue(str(script_start)) + log.info("start frame of reader was set to" + "{}".format(script_start)) + + if offset_frame: + node['frame_mode'].setValue("offset") + node['frame'].setValue(str((script_start + offset_frame))) + log.info("start frame of reader was set to" + "{}".format(script_start)) + + +def loader_shift(node, frame, relative=True): + """Shift global in time by i preserving duration + + This moves the loader by i frames preserving global duration. When relative + is False it will shift the global in to the start frame. + + Args: + loader (tool): The fusion loader tool. + frame (int): The amount of frames to move. + relative (bool): When True the shift is relative, else the shift will + change the global in to frame. + + Returns: + int: The resulting relative frame change (how much it moved) + + """ + # working script frame range + script_start = nuke.root()["start_frame"].value() + + if node['frame_mode'].value() == "start at": + start_at_frame = node['frame'].value() + if node['frame_mode'].value() is "offset": + offset_frame = node['frame'].value() + + if relative: + shift = frame + else: + if start_at_frame: + shift = frame + if offset_frame: + shift = frame + offset_frame + + # Shifting global in will try to automatically compensate for the change + # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those + # input values to "just shift" the clip + with preserve_inputs(node, knobs=["file", + "first", + "last", + "originfirst", + "originlast", + "frame_mode", + "frame"]): + + # GlobalIn cannot be set past GlobalOut or vice versa + # so we must apply them in the order of the shift. + if start_at_frame: + node['frame_mode'].setValue("start at") + node['frame'].setValue(str(script_start + shift)) + if offset_frame: + node['frame_mode'].setValue("offset") + node['frame'].setValue(str(shift)) + + return int(shift) + + +class LoadSequence(api.Loader): + """Load image sequence into Nuke""" + + families = ["write"] + representations = ["*"] + + label = "Load sequence" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + + from avalon.nuke import ( + containerise, + ls_img_sequence, + viewer_update_and_undo_stop + ) + log.info("here i am") + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Use the first file for now + # TODO: fix path fname + file = ls_img_sequence(os.path.dirname(self.fname), one=True) + + # Create the Loader with the filename path set + with viewer_update_and_undo_stop(): + # TODO: it might be universal read to img/geo/camera + r = nuke.createNode( + "Read", + "name {}".format(self.name)) # TODO: does self.name exist? + r["file"].setValue(file['path']) + if len(file['frames']) is 1: + first = file['frames'][0][0] + last = file['frames'][0][1] + r["originfirst"].setValue(first) + r["first"].setValue(first) + r["originlast"].setValue(last) + r["last"].setValue(last) + else: + first = file['frames'][0][0] + last = file['frames'][:-1][1] + r["originfirst"].setValue(first) + r["first"].setValue(first) + r["originlast"].setValue(last) + r["last"].setValue(last) + log.warning("Missing frames in image sequence") + + # Set global in point to start frame (if in version.data) + start = context["version"]["data"].get("startFrame", None) + if start is not None: + loader_shift(r, start, relative=False) + + containerise(r, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update the Loader's path + + Fusion automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + from avalon.nuke import ( + viewer_update_and_undo_stop, + ls_img_sequence, + update_container + ) + log.info("this i can see") + node = container["_tool"] + # TODO: prepare also for other readers img/geo/camera + assert node.Class() == "Reader", "Must be Reader" + + root = api.get_representation_path(representation) + file = ls_img_sequence(os.path.dirname(root), one=True) + + # Get start frame from version data + version = io.find_one({"type": "version", + "_id": representation["parent"]}) + start = version["data"].get("startFrame") + if start is None: + log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format(node['name'].value(), representation)) + start = 0 + + with viewer_update_and_undo_stop(): + + # Update the loader's path whilst preserving some values + with preserve_trim(node): + with preserve_inputs(node, + knobs=["file", + "first", + "last", + "originfirst", + "originlast", + "frame_mode", + "frame"]): + node["file"] = file["path"] + + # Set the global in to the start frame of the sequence + global_in_changed = loader_shift(node, start, relative=False) + if global_in_changed: + # Log this change to the user + log.debug("Changed '{}' global in:" + " {:d}".format(node['name'].value(), start)) + + # Update the imprinted representation + update_container( + node, + {"representation": str(representation["_id"])} + ) + + def remove(self, container): + + from avalon.nuke import viewer_update_and_undo_stop + + node = container["_tool"] + assert node.Class() == "Reader", "Must be Reader" + + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/load/load_still b/pype/plugins/nuke/_load_unused/load_still similarity index 100% rename from pype/plugins/nuke/load/load_still rename to pype/plugins/nuke/_load_unused/load_still diff --git a/pype/plugins/nuke/load/actions.py b/pype/plugins/nuke/load/actions.py index f3b7748f01..449567987a 100644 --- a/pype/plugins/nuke/load/actions.py +++ b/pype/plugins/nuke/load/actions.py @@ -3,6 +3,9 @@ """ from avalon import api +from pype.api import Logger + +log = Logger.getLogger(__name__, "nuke") class SetFrameRangeLoader(api.Loader): @@ -10,7 +13,7 @@ class SetFrameRangeLoader(api.Loader): families = ["animation", "camera", - "imagesequence", + "write", "yeticache", "pointcache"] representations = ["*"] @@ -30,9 +33,10 @@ class SetFrameRangeLoader(api.Loader): start = version_data.get("startFrame", None) end = version_data.get("endFrame", None) + log.info("start: {}, end: {}".format(start, end)) if start is None or end is None: - print("Skipping setting frame range because start or " - "end frame data is missing..") + log.info("Skipping setting frame range because start or " + "end frame data is missing..") return lib.update_frame_range(start, end) @@ -43,7 +47,7 @@ class SetFrameRangeWithHandlesLoader(api.Loader): families = ["animation", "camera", - "imagesequence", + "write", "yeticache", "pointcache"] representations = ["*"] diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 0b771a7007..ee5e93aad5 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -1,127 +1,18 @@ +import nuke import os import contextlib from avalon import api import avalon.io as io -from avalon.nuke import log -import nuke - - -@contextlib.contextmanager -def preserve_inputs(node, knobs): - """Preserve the node's inputs after context""" - - values = {} - for name in knobs: - try: - knob_value = node[name].vaule() - values[name] = knob_value - except ValueError: - log.warning("missing knob {} in node {}" - "{}".format(name, node['name'].value())) - - try: - yield - finally: - for name, value in values.items(): - node[name].setValue(value) - - -@contextlib.contextmanager -def preserve_trim(node): - """Preserve the relative trim of the Loader tool. - - This tries to preserve the loader's trim (trim in and trim out) after - the context by reapplying the "amount" it trims on the clip's length at - start and end. - - """ - # working script frame range - script_start = nuke.root()["start_frame"].value() - - start_at_frame = None - offset_frame = None - if node['frame_mode'].value() == "start at": - start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": - offset_frame = node['frame'].value() - - try: - yield - finally: - if start_at_frame: - node['frame_mode'].setValue("start at") - node['frame'].setValue(str(script_start)) - log.info("start frame of reader was set to" - "{}".format(script_start)) - - if offset_frame: - node['frame_mode'].setValue("offset") - node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of reader was set to" - "{}".format(script_start)) - - -def loader_shift(node, frame, relative=True): - """Shift global in time by i preserving duration - - This moves the loader by i frames preserving global duration. When relative - is False it will shift the global in to the start frame. - - Args: - loader (tool): The fusion loader tool. - frame (int): The amount of frames to move. - relative (bool): When True the shift is relative, else the shift will - change the global in to frame. - - Returns: - int: The resulting relative frame change (how much it moved) - - """ - # working script frame range - script_start = nuke.root()["start_frame"].value() - - if node['frame_mode'].value() == "start at": - start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": - offset_frame = node['frame'].value() - - if relative: - shift = frame - else: - if start_at_frame: - shift = frame - if offset_frame: - shift = frame + offset_frame - - # Shifting global in will try to automatically compensate for the change - # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those - # input values to "just shift" the clip - with preserve_inputs(node, knobs=["file", - "first", - "last", - "originfirst", - "originlast", - "frame_mode", - "frame"]): - - # GlobalIn cannot be set past GlobalOut or vice versa - # so we must apply them in the order of the shift. - if start_at_frame: - node['frame_mode'].setValue("start at") - node['frame'].setValue(str(script_start + shift)) - if offset_frame: - node['frame_mode'].setValue("offset") - node['frame'].setValue(str(shift)) - - return int(shift) +from pype.api import Logger +log = Logger.getLogger(__name__, "nuke") class LoadSequence(api.Loader): """Load image sequence into Nuke""" - families = ["imagesequence"] + families = ["write"] representations = ["*"] label = "Load sequence" @@ -131,122 +22,121 @@ class LoadSequence(api.Loader): def load(self, context, name, namespace, data): - from avalon.nuke import ( - containerise, - ls_img_sequence, - viewer_update_and_undo_stop - ) - - # Fallback to asset name when namespace is None - if namespace is None: - namespace = context['asset']['name'] - - # Use the first file for now - # TODO: fix path fname - file = ls_img_sequence(os.path.dirname(self.fname), one=True) - - # Create the Loader with the filename path set - with viewer_update_and_undo_stop(): - # TODO: it might be universal read to img/geo/camera - r = nuke.createNode( - "Read", - "name {}".format(self.name)) # TODO: does self.name exist? - r["file"].setValue(file['path']) - if len(file['frames']) is 1: - first = file['frames'][0][0] - last = file['frames'][0][1] - r["originfirst"].setValue(first) - r["first"].setValue(first) - r["originlast"].setValue(last) - r["last"].setValue(last) - else: - first = file['frames'][0][0] - last = file['frames'][:-1][1] - r["originfirst"].setValue(first) - r["first"].setValue(first) - r["originlast"].setValue(last) - r["last"].setValue(last) - log.warning("Missing frames in image sequence") - - # Set global in point to start frame (if in version.data) - start = context["version"]["data"].get("startFrame", None) - if start is not None: - loader_shift(r, start, relative=False) - - containerise(r, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__) - - def switch(self, container, representation): - self.update(container, representation) - - def update(self, container, representation): - """Update the Loader's path - - Fusion automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - from avalon.nuke import ( - viewer_update_and_undo_stop, - ls_img_sequence, - update_container - ) - - node = container["_tool"] - # TODO: prepare also for other readers img/geo/camera - assert node.Class() == "Reader", "Must be Reader" - - root = api.get_representation_path(representation) - file = ls_img_sequence(os.path.dirname(root), one=True) - - # Get start frame from version data - version = io.find_one({"type": "version", - "_id": representation["parent"]}) - start = version["data"].get("startFrame") - if start is None: - log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(node['name'].value(), representation)) - start = 0 - - with viewer_update_and_undo_stop(): - - # Update the loader's path whilst preserving some values - with preserve_trim(node): - with preserve_inputs(node, - knobs=["file", - "first", - "last", - "originfirst", - "originlast", - "frame_mode", - "frame"]): - node["file"] = file["path"] - - # Set the global in to the start frame of the sequence - global_in_changed = loader_shift(node, start, relative=False) - if global_in_changed: - # Log this change to the user - log.debug("Changed '{}' global in:" - " {:d}".format(node['name'].value(), start)) - - # Update the imprinted representation - update_container( - node, - {"representation": str(representation["_id"])} - ) - - def remove(self, container): - - from avalon.nuke import viewer_update_and_undo_stop - - node = container["_tool"] - assert node.Class() == "Reader", "Must be Reader" - - with viewer_update_and_undo_stop(): - nuke.delete(node) + log.info("context: {}\n".format(context["representation"])) + log.info("name: {}\n".format(name)) + log.info("namespace: {}\n".format(namespace)) + log.info("data: {}\n".format(data)) + return + # # Fallback to asset name when namespace is None + # if namespace is None: + # namespace = context['asset']['name'] + # + # # Use the first file for now + # # TODO: fix path fname + # file = ls_img_sequence(os.path.dirname(self.fname), one=True) + # + # # Create the Loader with the filename path set + # with viewer_update_and_undo_stop(): + # # TODO: it might be universal read to img/geo/camera + # r = nuke.createNode( + # "Read", + # "name {}".format(self.name)) # TODO: does self.name exist? + # r["file"].setValue(file['path']) + # if len(file['frames']) is 1: + # first = file['frames'][0][0] + # last = file['frames'][0][1] + # r["originfirst"].setValue(first) + # r["first"].setValue(first) + # r["originlast"].setValue(last) + # r["last"].setValue(last) + # else: + # first = file['frames'][0][0] + # last = file['frames'][:-1][1] + # r["originfirst"].setValue(first) + # r["first"].setValue(first) + # r["originlast"].setValue(last) + # r["last"].setValue(last) + # log.warning("Missing frames in image sequence") + # + # # Set global in point to start frame (if in version.data) + # start = context["version"]["data"].get("startFrame", None) + # if start is not None: + # loader_shift(r, start, relative=False) + # + # containerise(r, + # name=name, + # namespace=namespace, + # context=context, + # loader=self.__class__.__name__) + # + # def switch(self, container, representation): + # self.update(container, representation) + # + # def update(self, container, representation): + # """Update the Loader's path + # + # Fusion automatically tries to reset some variables when changing + # the loader's path to a new file. These automatic changes are to its + # inputs: + # + # """ + # + # from avalon.nuke import ( + # viewer_update_and_undo_stop, + # ls_img_sequence, + # update_container + # ) + # log.info("this i can see") + # node = container["_tool"] + # # TODO: prepare also for other readers img/geo/camera + # assert node.Class() == "Reader", "Must be Reader" + # + # root = api.get_representation_path(representation) + # file = ls_img_sequence(os.path.dirname(root), one=True) + # + # # Get start frame from version data + # version = io.find_one({"type": "version", + # "_id": representation["parent"]}) + # start = version["data"].get("startFrame") + # if start is None: + # log.warning("Missing start frame for updated version" + # "assuming starts at frame 0 for: " + # "{} ({})".format(node['name'].value(), representation)) + # start = 0 + # + # with viewer_update_and_undo_stop(): + # + # # Update the loader's path whilst preserving some values + # with preserve_trim(node): + # with preserve_inputs(node, + # knobs=["file", + # "first", + # "last", + # "originfirst", + # "originlast", + # "frame_mode", + # "frame"]): + # node["file"] = file["path"] + # + # # Set the global in to the start frame of the sequence + # global_in_changed = loader_shift(node, start, relative=False) + # if global_in_changed: + # # Log this change to the user + # log.debug("Changed '{}' global in:" + # " {:d}".format(node['name'].value(), start)) + # + # # Update the imprinted representation + # update_container( + # node, + # {"representation": str(representation["_id"])} + # ) + # + # def remove(self, container): + # + # from avalon.nuke import viewer_update_and_undo_stop + # + # node = container["_tool"] + # assert node.Class() == "Reader", "Must be Reader" + # + # with viewer_update_and_undo_stop(): + # nuke.delete(node) From 91c3bbf0222e6930745bb92487aa59af585714f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 3 Dec 2018 10:41:13 +0100 Subject: [PATCH 54/78] Silo is set to same name as highest parent, except Project --- pype/ftrack/actions/action_syncToAvalon.py | 69 +--------------------- pype/ftrack/ftrack_utils.py | 66 +++++++++++++++++++++ 2 files changed, 69 insertions(+), 66 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index f612e03d43..5e5c7b0605 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -151,66 +151,6 @@ class SyncToAvalon(BaseAction): for child in childrens: self.getShotAsset(child) - def getData(self, entity, session): - entity_type = entity.entity_type - - data = {} - data['ftrackId'] = entity['id'] - data['entityType'] = entity_type - - for cust_attr in self.custom_attributes: - key = cust_attr['key'] - if cust_attr['entity_type'].lower() in ['asset']: - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': - # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) - # Get object id of entity type - ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] - - if cust_attr['object_type_id'] == ent_obj_type_id: - data[key] = entity['custom_attributes'][key] - - if entity_type in ['Project']: - data['code'] = entity['name'] - return data - - # Get info for 'Data' in Avalon DB - tasks = [] - for child in entity['children']: - if child.entity_type in ['Task']: - tasks.append(child['name']) - - # Get list of parents without project - parents = [] - for i in range(1, len(entity['link'])-1): - tmp = session.get(entity['link'][i]['type'], entity['link'][i]['id']) - parents.append(tmp) - - folderStruct = [] - parentId = None - - for parent in parents: - parName = parent['name'] - folderStruct.append(parName) - parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] - if parent['parent'].entity_type != 'project' and parentId is None: - self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] - - hierarchy = os.path.sep.join(folderStruct) - - data['visualParent'] = parentId - data['parents'] = folderStruct - data['tasks'] = tasks - data['hierarchy'] = hierarchy - - return data - def importToAvalon(self, session, entity): # --- Begin: PUSH TO Avalon --- @@ -240,7 +180,7 @@ class SyncToAvalon(BaseAction): elif self.avalon_project['name'] != entity['full_name']: raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(avalon_asset['name'], name)) - data = self.getData(entity, session) + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) # Store info about project (FtrackId) io.update_many({ @@ -259,17 +199,14 @@ class SyncToAvalon(BaseAction): ## ----- ASSETS ------ # Presets: - # TODO how to check if entity is Asset Library or AssetBuild? - silo = 'Film' - if entity_type in ['AssetBuild', 'Library']: - silo = 'Assets' + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) + silo = data.pop('silo') os.environ['AVALON_SILO'] = silo name = entity['name'] os.environ['AVALON_ASSET'] = name - data = self.getData(entity, session) # Try to find asset in current database avalon_asset = None diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index d1d4176153..ab13c8c63b 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -1,5 +1,6 @@ import os import sys +import re from pprint import * import ftrack_api @@ -12,6 +13,71 @@ from app.api import Logger log = Logger.getLogger(__name__) +def get_data(parent, entity, session, custom_attributes): + entity_type = entity.entity_type + + data = {} + data['ftrackId'] = entity['id'] + data['entityType'] = entity_type + + for cust_attr in custom_attributes: + key = cust_attr['key'] + if cust_attr['entity_type'].lower() in ['asset']: + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': + # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + # Get object id of entity type + ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] + + if cust_attr['object_type_id'] == ent_obj_type_id: + data[key] = entity['custom_attributes'][key] + + if entity_type in ['Project']: + data['code'] = entity['name'] + return data + + # Get info for 'Data' in Avalon DB + tasks = [] + for child in entity['children']: + if child.entity_type in ['Task']: + tasks.append(child['name']) + + # Get list of parents without project + parents = [] + for i in range(1, len(entity['link'])-1): + tmp = session.get(entity['link'][i]['type'], entity['link'][i]['id']) + parents.append(tmp) + + silo = entity['name'] + if len(parents) > 0: + silo = parents[0]['name'] + + folderStruct = [] + parentId = None + + for parent in parents: + parName = parent['name'] + folderStruct.append(parName) + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + if parent['parent'].entity_type != 'project' and parentId is None: + parent.importToAvalon(parent) + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + + hierarchy = os.path.sep.join(folderStruct) + + data['silo'] = silo + data['visualParent'] = parentId + data['parents'] = folderStruct + data['tasks'] = tasks + data['hierarchy'] = hierarchy + + return data + def avalon_check_name(entity, inSchema = None): alright = True name = entity['name'] From ce1ff14dad38ba7a1e5e700fda3b49cc39dc1fc3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 3 Dec 2018 11:31:25 +0100 Subject: [PATCH 55/78] update action --- pype/ftrack/actions/action_syncToAvalon.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 5e5c7b0605..596b7362f9 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -15,7 +15,12 @@ from avalon import io, inventory from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): - '''Edit meta data action.''' + ''' + Synchronizing data action - from Ftrack to Avalon DB + + + + ''' #: Action identifier. identifier = 'sync.to.avalon' From a0f6e017f9b4dc12c3ee097bfa5451d97df7a7e1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 3 Dec 2018 12:47:24 +0100 Subject: [PATCH 56/78] Added few words --- pype/ftrack/actions/action_syncToAvalon.py | 26 +++++++++++++++++++++- pype/ftrack/ftrack_utils.py | 5 ----- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 596b7362f9..556c57be2a 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -18,7 +18,28 @@ class SyncToAvalon(BaseAction): ''' Synchronizing data action - from Ftrack to Avalon DB + Stores all information about entity. + - Name(string) - Most important information + - Parent(ObjectId) - Avalon Project Id, if entity is not project itself + - Silo(string) - Last parent except project + - Data(dictionary): + - VisualParent(ObjectId) - Avalon Id of parent asset + - Parents(array of string) - All parent names except project + - Tasks(array of string) - Tasks on asset + - FtrackId(string) + - entityType(string) - entity's type on Ftrack + * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' + These information are stored also for all parents and children entities. + + Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. + - action IS NOT creating this Custom attribute if doesn't exist + - run 'Create Custom Attributes' action or do it manually (Must be in 'avalon' group) + + If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: + - names are checked -> shows error if names are not exact the same!!! + - after sync is not allowed to change names! + - only way is to create new entity in ftrack with new name ''' @@ -206,7 +227,10 @@ class SyncToAvalon(BaseAction): # Presets: data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) - silo = data.pop('silo') + silo = entity['name'] + if len(data['parents']) > 0: + silo = data['parents'][0] + os.environ['AVALON_SILO'] = silo name = entity['name'] diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index ab13c8c63b..68c9be6bfb 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -53,10 +53,6 @@ def get_data(parent, entity, session, custom_attributes): tmp = session.get(entity['link'][i]['type'], entity['link'][i]['id']) parents.append(tmp) - silo = entity['name'] - if len(parents) > 0: - silo = parents[0]['name'] - folderStruct = [] parentId = None @@ -70,7 +66,6 @@ def get_data(parent, entity, session, custom_attributes): hierarchy = os.path.sep.join(folderStruct) - data['silo'] = silo data['visualParent'] = parentId data['parents'] = folderStruct data['tasks'] = tasks From 5b482789fe7ee465324b97025f20225df5cf6f34 Mon Sep 17 00:00:00 2001 From: antirotor Date: Mon, 3 Dec 2018 15:02:53 +0100 Subject: [PATCH 57/78] fixed launching on linux --- pype/ftrack/actions/ftrack_action_handler.py | 78 ++++++++++++++++---- 1 file changed, 65 insertions(+), 13 deletions(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index b3fbd28274..6d86a52ce5 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -1,6 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2017 ftrack import os +import sys import logging import getpass import platform @@ -298,20 +299,71 @@ class AppAction(object): # Full path to executable launcher execfile = None - for ext in os.environ["PATHEXT"].split(os.pathsep): - fpath = os.path.join(path.strip('"'), self.executable + ext) - if os.path.isfile(fpath) and os.access(fpath, os.X_OK): - execfile = fpath - break + if sys.platform == "win32": + + for ext in os.environ["PATHEXT"].split(os.pathsep): + fpath = os.path.join(path.strip('"'), self.executable + ext) + if os.path.isfile(fpath) and os.access(fpath, os.X_OK): + execfile = fpath + break + pass + + # Run SW if was found executable + if execfile is not None: + lib.launch(executable=execfile, args=[], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + + if sys.platform.startswith('linux'): + execfile = os.path.join(path.strip('"'), self.executable) + if os.path.isfile(execfile): + try: + fp = open(execfile) + except PermissionError as p: + log.error('Access denied on {0} - {1}'. + format(execfile, p)) + return { + 'success': False, + 'message': "Access denied on launcher - {}". + format(execfile) + } + fp.close() + # check executable permission + if not os.access(execfile, os.X_OK): + log.error('No executable permission on {}'. + format(execfile)) + return { + 'success': False, + 'message': "No executable permission - {}" + .format(execfile) + } + pass + else: + log.error('Launcher doesn\'t exist - {}'. + format(execfile)) + return { + 'success': False, + 'message': "Launcher doesn't exist - {}" + .format(execfile) + } + pass + # Run SW if was found executable + if execfile is not None: + lib.launch('/usr/bin/env', args=['bash', execfile], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + - # Run SW if was found executable - if execfile is not None: - lib.launch(executable=execfile, args=[], environment=env) - else: - return { - 'success': False, - 'message': "We didn't found launcher for {0}".format(self.label) - } # RUN TIMER IN FTRACK username = event['source']['user']['username'] From 0cf395c3bc2b2aa645c6c7c7b97d87d2227c10bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 11:29:10 +0100 Subject: [PATCH 58/78] Created attributes are set to group and security roles are set for API only --- .../actions/action_createCustomAttributes.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/actions/action_createCustomAttributes.py b/pype/ftrack/actions/action_createCustomAttributes.py index 5f9dbd7381..13aad6e659 100644 --- a/pype/ftrack/actions/action_createCustomAttributes.py +++ b/pype/ftrack/actions/action_createCustomAttributes.py @@ -89,11 +89,25 @@ class AvalonIdAttribute(BaseAction): # Set session back to begin("session.query" raises error on commit) session.rollback() # Set security roles for attribute - custAttrSecuRole = session.query('SecurityRole').all() + role_api = session.query('SecurityRole where name is "API"').all() # Set Text type of Attribute custom_attribute_type = session.query( 'CustomAttributeType where name is "text"' ).one() + # Get/Set 'avalon' group + groups = session.query('CustomAttributeGroup where name is "avalon"').all() + if len(groups) > 1: + msg = "There are more Custom attribute groups with name 'avalon'" + self.log.warning(msg) + return { 'success': False, 'message':msg } + + elif len(groups) < 1: + group = session.create('CustomAttributeGroup', { + 'name': 'avalon', + }) + session.commit() + else: + group = groups[0] for entity_type in base: # Create a custom attribute configuration. @@ -103,8 +117,9 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': role_api, + 'read_security_roles': role_api, + 'group':group, 'config': json.dumps({'markdown': False}) }) @@ -117,8 +132,9 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': role_api, + 'read_security_roles': role_api, + 'group':group, 'config': json.dumps({'markdown': False}) }) From 01b331fdd9ebb56bb523aa5c89accbe9aaec29a6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 13:18:11 +0100 Subject: [PATCH 59/78] Added few comments --- pype/ftrack/actions/action_syncToAvalon.py | 39 +++++++++++++--------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index d89b0e6618..d907407ac6 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -17,7 +17,7 @@ class SyncToAvalon(BaseAction): Synchronizing data action - from Ftrack to Avalon DB Stores all information about entity. - - Name(string) - Most important information + - Name(string) - Most important information = identifier of entity - Parent(ObjectId) - Avalon Project Id, if entity is not project itself - Silo(string) - Last parent except project - Data(dictionary): @@ -28,17 +28,24 @@ class SyncToAvalon(BaseAction): - entityType(string) - entity's type on Ftrack * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' - These information are stored also for all parents and children entities. + * These information are stored also for all parents and children entities. Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. - action IS NOT creating this Custom attribute if doesn't exist - - run 'Create Custom Attributes' action or do it manually (Must be in 'avalon' group) + - run 'Create Custom Attributes' action or do it manually (Not recommended) If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: - - names are checked -> shows error if names are not exact the same!!! + - names are checked -> shows error if names are not exact the same - after sync is not allowed to change names! - only way is to create new entity in ftrack with new name + If ID in 'avalon_mongo_id' is empty string or is not found in DB: + - tries to find entity by name + - found: + - raise error if ftrackId/visual parent/parents are not same + - not found: + - Creates asset/project + ''' #: Action identifier. @@ -79,7 +86,7 @@ class SyncToAvalon(BaseAction): }) try: - self.log.info("action <" + self.__class__.__name__ + "> is running") + self.log.info("Action <" + self.__class__.__name__ + "> is running") self.ca_mongoid = 'avalon_mongo_id' #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug self.setAvalonAttributes() @@ -96,7 +103,7 @@ class SyncToAvalon(BaseAction): for entity in entities: self.getShotAsset(entity) - # Check duplicate name - raise error if found + # Check names: REGEX in schema/duplicates - raise error if found all_names = [] duplicates = [] @@ -186,8 +193,11 @@ class SyncToAvalon(BaseAction): # Set project template template = lib.get_avalon_project_template_schema() if self.ca_mongoid in entity['custom_attributes']: - projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) - self.avalon_project = io.find_one({"_id": projectId}) + try: + projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) + self.avalon_project = io.find_one({"_id": projectId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) if self.avalon_project is None: self.avalon_project = io.find_one({ @@ -255,15 +265,14 @@ class SyncToAvalon(BaseAction): else: update = False aD = avalon_asset['data'] - attr = ['ftrackId', 'visualParent', 'parents'] - for a in attr: - if a not in aD: update = True + check_attr = ['ftrackId', 'visualParent', 'parents'] + for attr in check_attr: + if attr not in aD: update = True if update is False: - if (avalon_asset['data']['ftrackId'] != data['ftrackId'] or - avalon_asset['data']['visualParent'] != data['visualParent'] or - avalon_asset['data']['parents'] != data['parents']): - raise ValueError('Entity <{}> is not same like in Avalon DB'.format(name)) + for attr in check_attr: + if (avalon_asset['data'][attr] != data[attr]): + raise ValueError('In Avalon DB already exists entity with name <{}>!'.format(name)) elif avalon_asset['name'] != entity['name']: raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) From 06d473425d9afcdecf0ad5d4679a9af15b44725a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 13:39:42 +0100 Subject: [PATCH 60/78] Added action validation - only for admins, security role - for admins, Library removed from excepted entity type --- .../actions/action_createCustomAttributes.py | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/pype/ftrack/actions/action_createCustomAttributes.py b/pype/ftrack/actions/action_createCustomAttributes.py index 13aad6e659..ad1e1af5b9 100644 --- a/pype/ftrack/actions/action_createCustomAttributes.py +++ b/pype/ftrack/actions/action_createCustomAttributes.py @@ -24,14 +24,18 @@ class AvalonIdAttribute(BaseAction): def discover(self, session, entities, event): - ''' Validation ''' + ''' + Validation + - action is only for Administrators + ''' + success = False + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + for role in user['user_security_roles']: + if role['security_role']['name'] == 'Administrator': + success = True - # userId = event['source']['user']['id'] - # user = session.query('User where id is ' + userId).one() - # if user['user_security_roles'][0]['security_role']['name'] != 'Administrator': - # return False - - return True + return success def launch(self, session, entities, event): @@ -55,7 +59,7 @@ class AvalonIdAttribute(BaseAction): # Types that don't need object_type_id base = {'show'} # Don't create custom attribute on these entity types: - exceptions = ['task','milestone','library'] + exceptions = ['task', 'milestone'] exceptions.extend(base) # Get all possible object types all_obj_types = session.query('ObjectType').all() @@ -89,7 +93,10 @@ class AvalonIdAttribute(BaseAction): # Set session back to begin("session.query" raises error on commit) session.rollback() # Set security roles for attribute - role_api = session.query('SecurityRole where name is "API"').all() + role_api = session.query('SecurityRole where name is "API"').one() + role_admin = session.query('SecurityRole where name is "Administrator"').one() + roles = [role_api,role_admin] + # Set Text type of Attribute custom_attribute_type = session.query( 'CustomAttributeType where name is "text"' @@ -117,8 +124,8 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': role_api, - 'read_security_roles': role_api, + 'write_security_roles': roles, + 'read_security_roles': roles, 'group':group, 'config': json.dumps({'markdown': False}) }) @@ -132,8 +139,8 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': role_api, - 'read_security_roles': role_api, + 'write_security_roles': roles, + 'read_security_roles': roles, 'group':group, 'config': json.dumps({'markdown': False}) }) From 9910fa0b0d06e5a383ddd8fdbcfc91dd6e777d3d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 14:16:02 +0100 Subject: [PATCH 61/78] Validation Error is now in context --- pype/ftrack/actions/action_syncToAvalon.py | 2 +- pype/ftrack/ftrack_utils.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index d907407ac6..9b8176a088 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -151,7 +151,7 @@ class SyncToAvalon(BaseAction): fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno) self.log.error('Error during syncToAvalon: {}'.format(log_message)) - message = 'Unexpected Error!!! (Please check Log for more information)' + message = 'Unexpected Error - Please check Log for more information' if len(message) > 0: message = "Unable to sync: {}".format(message) diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 68c9be6bfb..10a1ee8ef3 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -8,7 +8,7 @@ from pype import lib import avalon.io as io import avalon.api import avalon -from avalon.vendor import toml +from avalon.vendor import toml, jsonschema from app.api import Logger log = Logger.getLogger(__name__) @@ -74,6 +74,7 @@ def get_data(parent, entity, session, custom_attributes): return data def avalon_check_name(entity, inSchema = None): + ValidationError = jsonschema.ValidationError alright = True name = entity['name'] if " " in name: From 27618c8d55a1afb92714b38e1b60dd9d13ae222b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 4 Dec 2018 14:49:11 +0100 Subject: [PATCH 62/78] Create write, Publish write, Publish `prerendered.frames`, Load sequence. All is working --- pype/ftrack/actions/ftrack_action_handler.py | 1 + pype/nuke/__init__.py | 3 +- pype/nuke/lib.py | 2 +- .../ftrack/integrate_ftrack_instances.py | 3 +- pype/plugins/nuke/load/load_sequence.py | 311 +++++++++++------- .../plugins/nuke/publish/collect_instances.py | 15 +- pype/plugins/nuke/publish/collect_writes.py | 19 +- .../nuke/publish/integrate_rendered_frames.py | 41 +-- pype/plugins/nuke/publish/render_local.py | 4 +- .../nuke/publish/validate_collection.py | 4 +- 10 files changed, 247 insertions(+), 156 deletions(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index b3fbd28274..ba67912c9f 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -71,6 +71,7 @@ class AppAction(object): ), self._launch ) + self.log.info("Application '{}' - Registered successfully".format(self.label)) def _discover(self, event): args = self._translate_event( diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 371fe2a786..83235edc29 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -113,8 +113,7 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ - "render", - "still" + "write", "lifeGroup", "backdrop", "imagesequence", diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 79c292b2ba..af0284bfae 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -241,7 +241,7 @@ def get_avalon_knob_data(node): import toml try: data = toml.loads(node['avalon'].value()) - except: + except Exception: return None return data diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 9a0a36a413..177ced5ddb 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -20,7 +20,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'rig': 'rig', 'setdress': 'setdress', 'pointcache': 'cache', - 'review': 'mov'} + 'review': 'mov', + 'write': 'img'} def process(self, instance): diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index ee5e93aad5..1cd3688aaf 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -1,14 +1,77 @@ -import nuke import os import contextlib from avalon import api import avalon.io as io + +import nuke + from pype.api import Logger log = Logger.getLogger(__name__, "nuke") +@contextlib.contextmanager +def preserve_trim(node): + """Preserve the relative trim of the Loader tool. + + This tries to preserve the loader's trim (trim in and trim out) after + the context by reapplying the "amount" it trims on the clip's length at + start and end. + + """ + # working script frame range + script_start = nuke.root()["first_frame"].value() + + start_at_frame = None + offset_frame = None + if node['frame_mode'].value() == "start at": + start_at_frame = node['frame'].value() + if node['frame_mode'].value() is "offset": + offset_frame = node['frame'].value() + + try: + yield + finally: + if start_at_frame: + node['frame_mode'].setValue("start at") + node['frame'].setValue(str(script_start)) + log.info("start frame of reader was set to" + "{}".format(script_start)) + + if offset_frame: + node['frame_mode'].setValue("offset") + node['frame'].setValue(str((script_start + offset_frame))) + log.info("start frame of reader was set to" + "{}".format(script_start)) + + +def loader_shift(node, frame, relative=True): + """Shift global in time by i preserving duration + + This moves the loader by i frames preserving global duration. When relative + is False it will shift the global in to the start frame. + + Args: + loader (tool): The fusion loader tool. + frame (int): The amount of frames to move. + relative (bool): When True the shift is relative, else the shift will + change the global in to frame. + + Returns: + int: The resulting relative frame change (how much it moved) + + """ + # working script frame range + script_start = nuke.root()["first_frame"].value() + + if relative: + node['frame_mode'].setValue("start at") + node['frame'].setValue(str(script_start)) + + return int(script_start) + + class LoadSequence(api.Loader): """Load image sequence into Nuke""" @@ -21,122 +84,132 @@ class LoadSequence(api.Loader): color = "orange" def load(self, context, name, namespace, data): + from avalon.nuke import ( + containerise, + ls_img_sequence, + viewer_update_and_undo_stop + ) + for k, v in context.items(): + log.info("key: `{}`, value: {}\n".format(k, v)) - log.info("context: {}\n".format(context["representation"])) - log.info("name: {}\n".format(name)) - log.info("namespace: {}\n".format(namespace)) - log.info("data: {}\n".format(data)) - return - # # Fallback to asset name when namespace is None - # if namespace is None: - # namespace = context['asset']['name'] - # - # # Use the first file for now - # # TODO: fix path fname - # file = ls_img_sequence(os.path.dirname(self.fname), one=True) - # - # # Create the Loader with the filename path set - # with viewer_update_and_undo_stop(): - # # TODO: it might be universal read to img/geo/camera - # r = nuke.createNode( - # "Read", - # "name {}".format(self.name)) # TODO: does self.name exist? - # r["file"].setValue(file['path']) - # if len(file['frames']) is 1: - # first = file['frames'][0][0] - # last = file['frames'][0][1] - # r["originfirst"].setValue(first) - # r["first"].setValue(first) - # r["originlast"].setValue(last) - # r["last"].setValue(last) - # else: - # first = file['frames'][0][0] - # last = file['frames'][:-1][1] - # r["originfirst"].setValue(first) - # r["first"].setValue(first) - # r["originlast"].setValue(last) - # r["last"].setValue(last) - # log.warning("Missing frames in image sequence") - # - # # Set global in point to start frame (if in version.data) - # start = context["version"]["data"].get("startFrame", None) - # if start is not None: - # loader_shift(r, start, relative=False) - # - # containerise(r, - # name=name, - # namespace=namespace, - # context=context, - # loader=self.__class__.__name__) - # - # def switch(self, container, representation): - # self.update(container, representation) - # - # def update(self, container, representation): - # """Update the Loader's path - # - # Fusion automatically tries to reset some variables when changing - # the loader's path to a new file. These automatic changes are to its - # inputs: - # - # """ - # - # from avalon.nuke import ( - # viewer_update_and_undo_stop, - # ls_img_sequence, - # update_container - # ) - # log.info("this i can see") - # node = container["_tool"] - # # TODO: prepare also for other readers img/geo/camera - # assert node.Class() == "Reader", "Must be Reader" - # - # root = api.get_representation_path(representation) - # file = ls_img_sequence(os.path.dirname(root), one=True) - # - # # Get start frame from version data - # version = io.find_one({"type": "version", - # "_id": representation["parent"]}) - # start = version["data"].get("startFrame") - # if start is None: - # log.warning("Missing start frame for updated version" - # "assuming starts at frame 0 for: " - # "{} ({})".format(node['name'].value(), representation)) - # start = 0 - # - # with viewer_update_and_undo_stop(): - # - # # Update the loader's path whilst preserving some values - # with preserve_trim(node): - # with preserve_inputs(node, - # knobs=["file", - # "first", - # "last", - # "originfirst", - # "originlast", - # "frame_mode", - # "frame"]): - # node["file"] = file["path"] - # - # # Set the global in to the start frame of the sequence - # global_in_changed = loader_shift(node, start, relative=False) - # if global_in_changed: - # # Log this change to the user - # log.debug("Changed '{}' global in:" - # " {:d}".format(node['name'].value(), start)) - # - # # Update the imprinted representation - # update_container( - # node, - # {"representation": str(representation["_id"])} - # ) - # - # def remove(self, container): - # - # from avalon.nuke import viewer_update_and_undo_stop - # - # node = container["_tool"] - # assert node.Class() == "Reader", "Must be Reader" - # - # with viewer_update_and_undo_stop(): - # nuke.delete(node) + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Use the first file for now + # TODO: fix path fname + file = ls_img_sequence(os.path.dirname(self.fname), one=True) + log.info("file: {}\n".format(file)) + + read_name = "Read_" + context["representation"]["context"]["subset"] + # Create the Loader with the filename path set + with viewer_update_and_undo_stop(): + # TODO: it might be universal read to img/geo/camera + r = nuke.createNode( + "Read", + "name {}".format(read_name)) + r["file"].setValue(file['path']) + if len(file['frames']) is 1: + first = file['frames'][0][0] + last = file['frames'][0][1] + r["origfirst"].setValue(first) + r["first"].setValue(first) + r["origlast"].setValue(last) + r["last"].setValue(last) + else: + first = file['frames'][0][0] + last = file['frames'][:-1][1] + r["origfirst"].setValue(first) + r["first"].setValue(first) + r["origlast"].setValue(last) + r["last"].setValue(last) + log.warning("Missing frames in image sequence") + + # Set colorspace defined in version data + colorspace = context["version"]["data"].get("colorspace", None) + if colorspace is not None: + r["colorspace"].setValue(str(colorspace)) + + # Set global in point to start frame (if in version.data) + start = context["version"]["data"].get("startFrame", None) + if start is not None: + loader_shift(r, start, relative=True) + + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["startFrame", "endFrame", "handles", + "source", "colorspace", "author", "fps"] + + data_imprint = {} + for k in add_keys: + data_imprint.update({k: context["version"]['data'][k]}) + + containerise(r, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update the Loader's path + + Fusion automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + from avalon.nuke import ( + viewer_update_and_undo_stop, + ls_img_sequence, + update_container + ) + log.info("this i can see") + node = container["_tool"] + # TODO: prepare also for other readers img/geo/camera + assert node.Class() == "Reader", "Must be Reader" + + root = api.get_representation_path(representation) + file = ls_img_sequence(os.path.dirname(root), one=True) + + # Get start frame from version data + version = io.find_one({"type": "version", + "_id": representation["parent"]}) + start = version["data"].get("startFrame") + if start is None: + log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format(node['name'].value(), representation)) + start = 0 + + with viewer_update_and_undo_stop(): + + # Update the loader's path whilst preserving some values + with preserve_trim(node): + node["file"] = file["path"] + + # Set the global in to the start frame of the sequence + global_in_changed = loader_shift(node, start, relative=False) + if global_in_changed: + # Log this change to the user + log.debug("Changed '{}' global in:" + " {:d}".format(node['name'].value(), start)) + + # Update the imprinted representation + update_container( + node, + {"representation": str(representation["_id"])} + ) + + def remove(self, container): + + from avalon.nuke import viewer_update_and_undo_stop + + node = container["_tool"] + assert node.Class() == "Reader", "Must be Reader" + + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index f1fa1276c2..5d64c60252 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -24,24 +24,29 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): except Exception: continue + try: + publish = node.knob("publish").value() + except Exception: + continue + # get data from avalon knob avalon_knob_data = get_avalon_knob_data(node) if not avalon_knob_data: continue - subset = avalon_knob_data["subset"] + + subset = avalon_knob_data.get("subset", None) or node["name"].value() # Create instance instance = context.create_instance(subset) instance.add(node) instance.data.update({ + "subset": subset, "asset": os.environ["AVALON_ASSET"], "label": node.name(), "name": node.name(), - "subset": subset, - "families": [avalon_knob_data["families"]], - "family": avalon_knob_data["family"], - "publish": node.knob("publish").value() + "avalonKnob": avalon_knob_data, + "publish": publish }) self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index db966fd84d..1f1d79fefe 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -3,6 +3,8 @@ import os import nuke import pyblish.api import logging +from avalon import io, api + log = logging.getLogger(__name__) @@ -15,6 +17,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): hosts = ["nuke", "nukeassist"] def process(self, context): + asset_data = io.find_one({"type": "asset", + "name": api.Session["AVALON_ASSET"]}) + self.log.debug("asset_data: {}".format(asset_data["data"])) for instance in context.data["instances"]: self.log.debug("checking instance: {}".format(instance)) node = instance[0] @@ -63,9 +68,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): else: # dealing with local/farm rendering if node["render_farm"].value(): - families = "{}.farm".format(instance.data["families"][0]) + families = "{}.farm".format(instance.data["avalonKnob"]["families"][0]) else: - families = "{}.local".format(instance.data["families"][0]) + families = "{}.local".format(instance.data["avalonKnob"]["families"][0]) self.log.debug("checking for error: {}".format(label)) instance.data.update({ @@ -73,12 +78,16 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "outputDir": output_dir, "ext": ext, "label": label, + "family": instance.data["avalonKnob"]["family"], "families": [families], - "firstFrame": first_frame, - "lastFrame": last_frame, + "startFrame": first_frame, + "endFrame": last_frame, "outputType": output_type, "stagingDir": output_dir, - + "colorspace": node["colorspace"].value(), + "handles": int(asset_data["data"].get("handles", 0)), + "step": 1, + "fps": int(nuke.root()['fps'].value()) }) self.log.debug("instance.data: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index f482a48cda..8b7df93d1b 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -1,6 +1,7 @@ import os import logging import shutil +import clique import errno import pyblish.api @@ -110,9 +111,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): locations=[LOCATION], data=version_data) + self.log.debug("version: {}".format(version)) self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id + version_id = io.insert_one(version).inserted_id + self.log.debug("version_id: {}".format(version_id)) # Write to disk # _ # | | @@ -130,11 +133,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # "asset": ASSET, # "subset": subset["name"], # "version": version["name"]} - hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] + hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*hierarchy) - + self.log.debug("hierarchy: {}".format(hierarchy)) template_data = {"root": root, "project": {"name": PROJECT, "code": "prjX"}, @@ -145,7 +148,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "VERSION": version["name"], "hierarchy": hierarchy} - template_publish = project["config"]["template"]["publish"] + # template_publish = project["config"]["template"]["publish"] anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -153,7 +156,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): representations = [] for files in instance.data["files"]: - # Collection # _______ # |______|\ @@ -206,7 +208,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.publish.path - # if instance.data.get('transfer', True): # dst = src # instance.data["transfers"].append([src, dst]) @@ -222,17 +223,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Imprint shortcut to context # for performance reasons. "context": { - "root": root, - "project": PROJECT, - "projectcode": "prjX", - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] + "root": root, + "project": PROJECT, + "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] } } representations.append(representation) @@ -353,9 +354,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "comment": context.data.get("comment")} # Include optional data if present in - optionals = ["startFrame", "endFrame", "step", "handles"] + optionals = ["startFrame", "endFrame", "step", + "handles", "colorspace", "fps", "outputDir"] + for key in optionals: if key in instance.data: - version_data[key] = instance.data[key] + version_data[key] = instance.data.get(key, None) return version_data diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 55adedb9e5..eee67d1e40 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -29,8 +29,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): self.log.debug("instance collected: {}".format(instance.data)) - first_frame = instance.data.get("firstFrame", None) - last_frame = instance.data.get("lastFrame", None) + first_frame = instance.data.get("startFrame", None) + last_frame = instance.data.get("endFrame", None) node_subset_name = instance.data.get("name", None) self.log.info("Starting render") diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 4088272bc4..e8137d006c 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -35,8 +35,8 @@ class ValidateCollection(pyblish.api.InstancePlugin): collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) - frame_length = instance.data["lastFrame"] \ - - instance.data["firstFrame"] + 1 + frame_length = instance.data["endFrame"] \ + - instance.data["startFrame"] + 1 if frame_length is not 1: assert len(collections) == 1, self.log.info( From 6e0fc1771d73004cd0b13333de2baa5b182b3c17 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 15:50:20 +0100 Subject: [PATCH 63/78] Top entity is not pushed to Avalon as asset and error message is full now --- pype/ftrack/actions/action_syncToAvalon.py | 21 +++++++++------------ pype/ftrack/ftrack_utils.py | 13 +++++++------ 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 9b8176a088..343df2e702 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -233,10 +233,12 @@ class SyncToAvalon(BaseAction): ## ----- ASSETS ------ # Presets: - data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) - silo = entity['name'] - if len(data['parents']) > 0: + + # return if entity is silo + if len(data['parents']) == 0: + return + else: silo = data['parents'][0] os.environ['AVALON_SILO'] = silo @@ -263,16 +265,11 @@ class SyncToAvalon(BaseAction): # Raise error if it seems to be different ent. with same name else: - update = False aD = avalon_asset['data'] - check_attr = ['ftrackId', 'visualParent', 'parents'] - for attr in check_attr: - if attr not in aD: update = True - - if update is False: - for attr in check_attr: - if (avalon_asset['data'][attr] != data[attr]): - raise ValueError('In Avalon DB already exists entity with name <{}>!'.format(name)) + # check_attr = ['parents', 'ftrackId', 'visualParent'] + if (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): + raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) elif avalon_asset['name'] != entity['name']: raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 10a1ee8ef3..66d739829b 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -49,16 +49,17 @@ def get_data(parent, entity, session, custom_attributes): # Get list of parents without project parents = [] - for i in range(1, len(entity['link'])-1): - tmp = session.get(entity['link'][i]['type'], entity['link'][i]['id']) - parents.append(tmp) - folderStruct = [] + for i in range(1, len(entity['link'])-1): + parEnt = session.get(entity['link'][i]['type'], entity['link'][i]['id']) + parName = parEnt['name'] + folderStruct.append(parName) + if i > 1: + parents.append(parEnt) + parentId = None for parent in parents: - parName = parent['name'] - folderStruct.append(parName) parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] if parent['parent'].entity_type != 'project' and parentId is None: parent.importToAvalon(parent) From 884ac36f25cf4c1e669da1dc98ccdce1aa9b9060 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 16:35:04 +0100 Subject: [PATCH 64/78] Creates Checkbox for auto-sync event in Project. Action update roles and group on existing attributes --- .../actions/action_createCustomAttributes.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/pype/ftrack/actions/action_createCustomAttributes.py b/pype/ftrack/actions/action_createCustomAttributes.py index ad1e1af5b9..d45a92a01d 100644 --- a/pype/ftrack/actions/action_createCustomAttributes.py +++ b/pype/ftrack/actions/action_createCustomAttributes.py @@ -53,11 +53,19 @@ class AvalonIdAttribute(BaseAction): }) session.commit() try: + # Checkbox for event sync + cbxSyncName = 'avalon_auto_sync' + cbxSyncLabel = 'Avalon auto-sync' + cbxSyncExist = False + # Attribute Name and Label custAttrName = 'avalon_mongo_id' custAttrLabel = 'Avalon/Mongo Id' + + attrs_update = set() # Types that don't need object_type_id base = {'show'} + # Don't create custom attribute on these entity types: exceptions = ['task', 'milestone'] exceptions.extend(base) @@ -77,6 +85,7 @@ class AvalonIdAttribute(BaseAction): # Get IDs of filtered object types all_obj_types_id = set() + for obj in all_obj_types: all_obj_types_id.add(obj['id']) @@ -84,11 +93,16 @@ class AvalonIdAttribute(BaseAction): current_cust_attr = session.query('CustomAttributeConfiguration').all() # Filter already existing AvalonMongoID attr. for attr in current_cust_attr: + if attr['key'] == cbxSyncName: + cbxSyncExist = True + cbxAttribute = attr if attr['key'] == custAttrName: if attr['entity_type'] in base: base.remove(attr['entity_type']) + attrs_update.add(attr) if attr['object_type_id'] in all_obj_types_id: all_obj_types_id.remove(attr['object_type_id']) + attrs_update.add(attr) # Set session back to begin("session.query" raises error on commit) session.rollback() @@ -116,6 +130,24 @@ class AvalonIdAttribute(BaseAction): else: group = groups[0] + # Checkbox for auto-sync event / Create or Update(roles + group) + if cbxSyncExist is False: + cbxType = session.query('CustomAttributeType where name is "boolean"').first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'show', + 'type': cbxType, + 'label': cbxSyncLabel, + 'key': cbxSyncName, + 'default': False, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, + }) + else: + cbxAttribute['write_security_roles'] = roles + cbxAttribute['read_security_roles'] = roles + cbxAttribute['group'] = group + for entity_type in base: # Create a custom attribute configuration. session.create('CustomAttributeConfiguration', { @@ -145,11 +177,18 @@ class AvalonIdAttribute(BaseAction): 'config': json.dumps({'markdown': False}) }) + for attr in attrs_update: + attr['write_security_roles'] = roles + attr['read_security_roles'] = roles + attr['group'] = group + job['status'] = 'done' session.commit() except Exception as e: + session.rollback() job['status'] = 'failed' + session.commit() self.log.error("Creating custom attributes failed ({})".format(e)) return True From e1308d7f644453ac3bbb0e17993e432db688c553 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 16:55:36 +0100 Subject: [PATCH 65/78] Action shows only to users in roleList: Admin & Project Manager --- pype/ftrack/actions/action_syncToAvalon.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 343df2e702..829ac3dedc 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -60,12 +60,20 @@ class SyncToAvalon(BaseAction): def discover(self, session, entities, event): ''' Validation ''' - + roleCheck = False discover = False - for entity in entities: - if entity.entity_type.lower() not in ['task', 'assetversion']: - discover = True - break + roleList = ['Administrator', 'Project Manager'] + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + for role in user['user_security_roles']: + if role['security_role']['name'] in roleList: + roleCheck = True + if roleCheck is True: + for entity in entities: + if entity.entity_type.lower() not in ['task', 'assetversion']: + discover = True + break return discover From 5818350ba2df84f47782fc6eebd484d17cf6937c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 18:16:57 +0100 Subject: [PATCH 66/78] User's can't move entites --- pype/ftrack/actions/action_syncToAvalon.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 829ac3dedc..c354f2332d 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -35,9 +35,8 @@ class SyncToAvalon(BaseAction): - run 'Create Custom Attributes' action or do it manually (Not recommended) If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: - - names are checked -> shows error if names are not exact the same - - after sync is not allowed to change names! - - only way is to create new entity in ftrack with new name + - name, parents and silo are checked -> shows error if are not exact the same + - after sync it is not allowed to change names or move entities If ID in 'avalon_mongo_id' is empty string or is not found in DB: - tries to find entity by name @@ -281,6 +280,10 @@ class SyncToAvalon(BaseAction): elif avalon_asset['name'] != entity['name']: raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) + elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path)) # Update info io.update_many({'type': 'asset','name': name}, From bf30d1ef822cec56fca68cb394bf3496562d452b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 4 Dec 2018 18:51:27 +0100 Subject: [PATCH 67/78] insert 'v' before {version:0>3} --- pype/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index 0b9e066703..b0f427aeb6 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -355,7 +355,7 @@ def get_avalon_project_template(): """ template = Templates(type=["anatomy"]) proj_template = {} - proj_template['workfile'] = '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>' + proj_template['workfile'] = '{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>' proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' # TODO this down should work but it can't be in default.toml: From 33a59a52e85334f4dfbe9685954e6ad491246c15 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 5 Dec 2018 01:03:41 +0100 Subject: [PATCH 68/78] changing the file gather for ftrack publishing a bit. fixes gathering of image sequences --- .../ftrack/integrate_ftrack_instances.py | 21 +++++--- pype/plugins/global/publish/integrate.py | 15 +++--- pype/plugins/nuke/publish/collect_writes.py | 2 +- .../nuke/publish/integrate_rendered_frames.py | 52 ++++++++++--------- .../nuke/publish/validate_collection.py | 8 +++ 5 files changed, 58 insertions(+), 40 deletions(-) diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 9a0a36a413..e853976b0f 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -1,5 +1,6 @@ import pyblish.api import os +import clique class IntegrateFtrackInstance(pyblish.api.InstancePlugin): @@ -20,7 +21,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'rig': 'rig', 'setdress': 'setdress', 'pointcache': 'cache', - 'review': 'mov'} + 'review': 'mov', + 'write': 'comp'} def process(self, instance): @@ -36,20 +38,25 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): componentList = [] - transfers = instance.data["transfers"] + dst_list = instance.data['destination_list'] ft_session = instance.context.data["ftrackSession"] location = ft_session.query( 'Location where name is "ftrack.unmanaged"').one() self.log.debug('location {}'.format(location)) - for src, dest in transfers: - filename, ext = os.path.splitext(src) - self.log.debug('source filename: ' + filename) - self.log.debug('source ext: ' + ext) + for file in instance.data['destination_list']: + self.log.debug('file {}'.format(file)) + + for file in dst_list: + filename, ext = os.path.splitext(file) + self.log.debug('dest ext: ' + ext) componentList.append({"assettype_data": { "short": asset_type, + }, + "asset_data": { + "name": instance.data["subset"], }, "assetversion_data": { "version": version_number, @@ -57,7 +64,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "component_data": { "name": ext[1:], # Default component name is "main". }, - "component_path": dest, + "component_path": file, 'component_location': location, "component_overwrite": False, } diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index e20f59133c..4a55cd9e22 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -136,12 +136,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - # template_data = {"root": root, - # "project": PROJECT, - # "silo": asset['silo'], - # "asset": ASSET, - # "subset": subset["name"], - # "version": version["name"]} + hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) @@ -163,6 +158,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] + destination_list = [] for files in instance.data["files"]: @@ -195,6 +191,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): dst = anatomy_filled.publish.path instance.data["transfers"].append([src, dst]) + template = anatomy.publish.path else: # Single file @@ -218,13 +215,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): dst = anatomy_filled.publish.path instance.data["transfers"].append([src, dst]) + template = anatomy.publish.path representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': dst}, + "data": {'path': dst, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context @@ -243,6 +241,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "representation": ext[1:] } } + + destination_list.append(dst) + instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index db966fd84d..3a6a354b4b 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -73,7 +73,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "outputDir": output_dir, "ext": ext, "label": label, - "families": [families], + "families": [families, 'ftrack'], "firstFrame": first_frame, "lastFrame": last_frame, "outputType": output_type, diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index f482a48cda..e9447c517a 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -1,6 +1,7 @@ import os import logging import shutil +import clique import errno import pyblish.api @@ -30,7 +31,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.register(instance) self.log.info("Integrating Asset in to the database ...") - # self.integrate(instance) + if instance.data.get('transfer', True): + self.integrate(instance) def register(self, instance): @@ -124,12 +126,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - # template_data = {"root": root, - # "project": PROJECT, - # "silo": asset['silo'], - # "asset": ASSET, - # "subset": subset["name"], - # "version": version["name"]} hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) @@ -139,6 +135,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "project": {"name": PROJECT, "code": "prjX"}, "silo": asset['silo'], + "task": api.Session["AVALON_TASK"], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], @@ -151,6 +148,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] + destination_list = [] for files in instance.data["files"]: @@ -164,26 +162,30 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # |_______| # if isinstance(files, list): + collection = files # Assert that each member has identical suffix - _, ext = os.path.splitext(collection[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collection), ( - "Files had varying suffixes, this is a bug" - ) - - assert not any(os.path.isabs(name) for name in collection) - - template_data["representation"] = ext[1:] + dst_collection = [] for fname in collection: + filename, ext = os.path.splitext(fname) + _, frame = os.path.splitext(filename) + + template_data["representation"] = ext[1:] + template_data["frame"] = frame[1:] + src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled.publish.path + dst = anatomy_filled.render.path - # if instance.data.get('transfer', True): - # instance.data["transfers"].append([src, dst]) + dst_collection.append(dst) + instance.data["transfers"].append([src, dst]) + + template = anatomy.render.path + + collections, remainder = clique.assemble(dst_collection) + dst = collections[0].format('{head}{padding}{tail}') else: # Single file @@ -204,19 +206,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled.publish.path + dst = anatomy_filled.render.path + template = anatomy.render.path + instance.data["transfers"].append([src, dst]) - # if instance.data.get('transfer', True): - # dst = src - # instance.data["transfers"].append([src, dst]) - representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': src}, + "data": {'path': dst, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context @@ -235,6 +235,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "representation": ext[1:] } } + destination_list.append(dst) + instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 4088272bc4..d3a2df4ead 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -45,6 +45,14 @@ class ValidateCollection(pyblish.api.InstancePlugin): assert remainder is not None, self.log.info("There are some extra files in folder") + basename, ext = os.path.splitext(list(collections[0])[0]) + assert all(ext == os.path.splitext(name)[1] + for name in collections[0]), self.log.info( + "Files had varying suffixes" + ) + + assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute") + self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(list(instance.data["files"])): {}'.format( len(list(instance.data["files"][0])))) From 23d7bc5f0c12fbfeef61aca56deaf1e6aa41ceb0 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 5 Dec 2018 01:39:14 +0100 Subject: [PATCH 69/78] add project code to publishes --- pype/plugins/global/publish/integrate.py | 8 +++----- pype/plugins/nuke/publish/integrate_rendered_frames.py | 7 +++---- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 4a55cd9e22..698eb907e9 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -82,8 +82,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", "name": ASSET, @@ -136,7 +135,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) @@ -144,7 +142,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data = {"root": root, "project": {"name": PROJECT, - "code": "prjX"}, + "code": project['data']['code']}, "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], @@ -230,7 +228,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "context": { "root": root, "project": PROJECT, - "projectcode": "prjX", + "projectcode": project['data']['code'], 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index e9447c517a..d02516b67f 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -72,8 +72,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", "name": ASSET, @@ -133,7 +132,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): template_data = {"root": root, "project": {"name": PROJECT, - "code": "prjX"}, + "code": project['data']['code']}, "silo": asset['silo'], "task": api.Session["AVALON_TASK"], "asset": ASSET, @@ -224,7 +223,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "context": { "root": root, "project": PROJECT, - "projectcode": "prjX", + "projectcode": project['data']['code'], 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, From 112f4ca48db573a78be2e8123fd6d319e4a9c842 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 5 Dec 2018 11:15:45 +0100 Subject: [PATCH 70/78] add img and render to ftrack assets --- pype/plugins/ftrack/integrate_ftrack_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index e853976b0f..441dd6f88d 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -22,7 +22,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'setdress': 'setdress', 'pointcache': 'cache', 'review': 'mov', - 'write': 'comp'} + 'write': 'img', + 'render': 'render'} def process(self, instance): From 0229c35a4d207a23100e312eab33000697100e65 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Dec 2018 13:47:13 +0100 Subject: [PATCH 71/78] wip on reset_format --- pype/nuke/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index b3ae82c609..173b3bf692 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -207,7 +207,6 @@ def reset_resolution(): width = asset["data"].get("resolution_width", 1920) height = asset["data"].get("resolution_height", 1080) pixel_aspect = asset["data"].get("pixel_aspect", 1) - bbox = asset["data"].get("crop", "0.0.1920.1080") try: @@ -236,6 +235,7 @@ def reset_resolution(): format_name = project["name"] + "_1" if used_formats: + check_format = used_formats[-1] format_name = "{}_{}".format( project["name"], int(used_formats[-1][-1])+1 @@ -246,6 +246,8 @@ def reset_resolution(): format_name) ) + # format_build + make_format( width=int(width), height=int(height), From f2796b12e04952260ff14e970bbbf8273b187e9f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 5 Dec 2018 15:14:46 +0100 Subject: [PATCH 72/78] if format alredy exists it will not create new --- pype/nuke/lib.py | 72 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 24 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 173b3bf692..4f251bb729 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -230,49 +230,73 @@ def reset_resolution(): used_formats = list() for f in nuke.formats(): if project["name"] in str(f.name()): - used_formats.append(f.name()) + used_formats.append(f) else: format_name = project["name"] + "_1" + crnt_fmt_str = "" if used_formats: check_format = used_formats[-1] format_name = "{}_{}".format( project["name"], - int(used_formats[-1][-1])+1 + int(used_formats[-1].name()[-1])+1 ) - log.info("Format exists: {}. " - "Will create new: {}...".format( - used_formats[-1], - format_name) - ) + log.info( + "Format exists: {}. " + "Will create new: {}...".format( + used_formats[-1].name(), + format_name) + ) + crnt_fmt_kargs = { + "width": (check_format.width()), + "height": (check_format.height()), + "x": int(check_format.x()), + "y": int(check_format.y()), + "r": int(check_format.r()), + "t": int(check_format.t()), + "pixel_aspect": float(check_format.pixelAspect()) + } + crnt_fmt_str = make_format_string(**crnt_fmt_kargs) + log.info("crnt_fmt_str: {}".format(crnt_fmt_str)) - # format_build + new_fmt_kargs = { + "width": int(width), + "height": int(height), + "x": int(x), + "y": int(y), + "r": int(r), + "t": int(t), + "pixel_aspect": float(pixel_aspect), + "project_name": format_name + } - make_format( - width=int(width), - height=int(height), - x=int(x), - y=int(y), - r=int(r), - t=int(t), - pixel_aspect=float(pixel_aspect), - project_name=format_name - ) - log.info("Format is set") + new_fmt_str = make_format_string(**new_fmt_kargs) + log.info("new_fmt_str: {}".format(new_fmt_str)) + + if new_fmt_str not in crnt_fmt_str: + make_format(frm_str=new_fmt_str, + project_name=new_fmt_kargs["project_name"]) + + log.info("Format is set") -def make_format(**args): - log.info("Format does't exist, will create: \n{}".format(args)) - nuke.addFormat( +def make_format_string(**args): + format_str = ( "{width} " "{height} " "{x} " "{y} " "{r} " "{t} " - "{pixel_aspect} " - "{project_name}".format(**args) + "{pixel_aspect:.2f}".format(**args) ) + return format_str + + +def make_format(**args): + log.info("Format does't exist, will create: \n{}".format(args)) + nuke.addFormat("{frm_str} " + "{project_name}".format(**args)) nuke.root()["format"].setValue("{project_name}".format(**args)) From d1c1c4f039c363c572769b7ae1a4ba9e4a9c98bc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Dec 2018 16:15:11 +0100 Subject: [PATCH 73/78] Sync to avalon event should work like action. Synchronize only projects that have checked Avalon auto-sync. --- pype/ftrack/actions/action_syncToAvalon.py | 13 +- pype/ftrack/events/event_sync_to_avalon.py | 182 ++++++++---------- .../events/{test_event.py => event_test.py} | 2 +- pype/ftrack/events/ftrack_event_handler.py | 72 ++----- pype/ftrack/ftrack_utils.py | 2 +- 5 files changed, 104 insertions(+), 167 deletions(-) rename pype/ftrack/events/{test_event.py => event_test.py} (90%) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index c354f2332d..cad43684c9 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -219,7 +219,7 @@ class SyncToAvalon(BaseAction): }) elif self.avalon_project['name'] != entity['full_name']: - raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(avalon_asset['name'], name)) + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name)) data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) @@ -235,7 +235,7 @@ class SyncToAvalon(BaseAction): if self.ca_mongoid in entity['custom_attributes']: entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) else: - self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) + self.log.error('Custom attribute for "{}" is not created.'.format(entity['name'])) return ## ----- ASSETS ------ @@ -271,15 +271,12 @@ class SyncToAvalon(BaseAction): self.log.debug("Asset {} - created".format(name)) # Raise error if it seems to be different ent. with same name - else: - aD = avalon_asset['data'] - # check_attr = ['parents', 'ftrackId', 'visualParent'] - if (avalon_asset['data']['parents'] != data['parents'] or - avalon_asset['silo'] != silo): + elif (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) elif avalon_asset['name'] != entity['name']: - raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name)) elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: old_path = "/".join(avalon_asset['data']['parents']) new_path = "/".join(data['parents']) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 4854a36f8c..0ed231c625 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -12,12 +12,17 @@ from pype.ftrack import ftrack_utils class Sync_to_Avalon(BaseEvent): def launch(self, session, entities, event): + self.ca_mongoid = 'avalon_mongo_id' + # If mongo_id textfield has changed: RETURN! + # - infinite loop for ent in event['data']['entities']: - if self.ca_mongoid in ent['keys']: - return False + if 'keys' in ent: + if self.ca_mongoid in ent['keys']: + return self.proj = None + # get project for entity in entities: try: base_proj = entity['link'][0] @@ -26,13 +31,24 @@ class Sync_to_Avalon(BaseEvent): self.proj = session.get(base_proj['type'], base_proj['id']) break - if self.proj is None: - return False + # check if project is set to auto-sync + if (self.proj is None or + 'avalon_auto_sync' not in self.proj['custom_attributes'] or + self.proj['custom_attributes']['avalon_auto_sync'] is False): + return - os.environ["AVALON_PROJECT"] = self.proj['full_name'] + # check if project have Custom Attribute 'avalon_mongo_id' + if self.ca_mongoid not in self.proj['custom_attributes']: + message = "Custom attribute '{}' for 'Project' is not created or don't have set permissions for API".format(self.ca_mongoid) + self.log.warning(message) + self.show_message(event, message, False) + return self.projectId = self.proj['custom_attributes'][self.ca_mongoid] + os.environ["AVALON_PROJECT"] = self.proj['full_name'] + + # get avalon project if possible io.install() try: self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)}) @@ -40,13 +56,13 @@ class Sync_to_Avalon(BaseEvent): self.avalon_project = None importEntities = [] - if self.avalon_project is None: self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) if self.avalon_project is None: importEntities.append(self.proj) else: self.projectId = self.avalon_project['_id'] + io.uninstall() for entity in entities: @@ -56,7 +72,8 @@ class Sync_to_Avalon(BaseEvent): try: mongo_id = entity['custom_attributes'][self.ca_mongoid] except: - message = "Please run 'Create Attributes' action or create custom attribute 'avalon_mongo_id' manually for {}".format(entity.entity_type) + message = "Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity.entity_type) + self.log.warning(message) self.show_message(event, message, False) return @@ -69,60 +86,37 @@ class Sync_to_Avalon(BaseEvent): self.setAvalonAttributes() io.install() + try: + for entity in importEntities: + self.importToAvalon(session, entity) + session.commit() - for entity in importEntities: - self.importToAvalon(entity) + except ValueError as ve: + message = str(ve) + self.show_message(event, message, False) + self.log.warning(message) + + except Exception as e: + message = str(e) + ftrack_message = "SyncToAvalon event ended with unexpected error please check log file for more information." + self.show_message(event, ftrack_message, False) + self.log.error(message) io.uninstall() - session.commit() + return - if message != "": - self.show_message(event, message, False) + def importToAvalon(self, session, entity): + if self.ca_mongoid not in entity['custom_attributes']: + raise ValueError("Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity['name'])) - return True - - def importToAvalon(self, entity): - data = {} + ftrack_utils.avalon_check_name(entity) entity_type = entity.entity_type - type = 'asset' - name = entity['name'] - silo = 'Film' if entity_type in ['Project']: type = 'project' name = entity['full_name'] - data['code'] = entity['name'] - elif entity_type in ['AssetBuild', 'Library']: - silo = 'Assets' - - os.environ["AVALON_ASSET"] = name - os.environ["AVALON_SILO"] = silo - - data['ftrackId'] = entity['id'] - data['entityType'] = entity_type - - for cust_attr in self.custom_attributes: - key = cust_attr['key'] - if cust_attr['entity_type'].lower() in ['asset']: - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': - # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) - # Get object id of entity type - ent_obj_type_id = self.session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] - - if cust_attr['object_type_id'] == ent_obj_type_id: - data[key] = entity['custom_attributes'][key] - - mongo_id = entity['custom_attributes'][self.ca_mongoid] - - if entity_type in ['Project']: config = ftrack_utils.get_config(entity) template = lib.get_avalon_project_template_schema() @@ -130,8 +124,12 @@ class Sync_to_Avalon(BaseEvent): inventory.save(name, config, template) self.avalon_project = io.find_one({'type': 'project', 'name': name}) + elif self.avalon_project['name'] != name: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name)) + self.projectId = self.avalon_project['_id'] - data['code'] = entity['name'] + + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) io.update_many( {"_id": ObjectId(self.projectId)}, @@ -140,48 +138,30 @@ class Sync_to_Avalon(BaseEvent): 'config':config, 'data':data, }}) - try: - entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) - except Exception as e: - self.log.error(e) + + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) + return - if self.avalon_project is None: - self.importToAvalon(self.proj) + self.importToAvalon(session, self.proj) - tasks = [] - for child in entity['children']: - if child.entity_type in ['Task']: - tasks.append(child['name']) + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) - folderStruct = [] - parentId = None + # return if entity is silo + if len(data['parents']) == 0: + return + else: + silo = data['parents'][0] - parents = [] - for i in range(1, len(entity['link'])-1): - tmp_type = entity['link'][i]['type'] - tmp_id = entity['link'][i]['id'] - tmp = self.session.get(tmp_type, tmp_id) - parents.append(tmp) + name = entity['name'] - for parent in parents: - parname = self.checkName(parent['name']) - folderStruct.append(parname) - avalonAarent = io.find_one({'type': 'asset', 'name': parname}) - if parent['parent'].entity_type != 'project' and avalonAarent is None: - self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': parname})['_id'] - - hierarchy = os.path.sep.join(folderStruct) - - data['tasks'] = tasks - if parentId is not None: - data['parents'] = folderStruct - data['visualParent'] = parentId - data['hierarchy'] = hierarchy + os.environ["AVALON_ASSET"] = name + os.environ['AVALON_SILO'] = silo avalon_asset = None + # existence of this custom attr is already checked + mongo_id = entity['custom_attributes'][self.ca_mongoid] if mongo_id is not "": avalon_asset = io.find_one({'_id': ObjectId(mongo_id)}) @@ -190,15 +170,17 @@ class Sync_to_Avalon(BaseEvent): avalon_asset = io.find_one({'type': 'asset', 'name': name}) if avalon_asset is None: mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId)) - else: - if name != avalon_asset['name']: - string = "'{}->{}'".format(name, avalon_asset['name']) - if entity_type in ['Shot','AssetBuild']: - self.nameShotAsset.append(string) - mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId)) - else: - self.nameChanged.append(string) - return + # Raise error if it seems to be different ent. with same name + elif (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): + raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) + elif avalon_asset['name'] != entity['name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name)) + elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path)) + io.update_many( {"_id": ObjectId(mongo_id)}, @@ -206,21 +188,9 @@ class Sync_to_Avalon(BaseEvent): 'name':name, 'silo':silo, 'data':data, - 'parent': self.projectId}}) + 'parent': ObjectId(self.projectId)}}) - try: - entity['custom_attributes'][self.ca_mongoid] = str(mongo_id) - except Exception as e: - self.log.error(e) - - - def checkName(self, input_name): - if input_name.find(" ") == -1: - name = input_name - else: - name = input_name.replace(" ", "-") - print("Name of {} was changed to {}".format(input_name, name)) - return name + entity['custom_attributes'][self.ca_mongoid] = str(mongo_id) def setAvalonAttributes(self): self.custom_attributes = [] diff --git a/pype/ftrack/events/test_event.py b/pype/ftrack/events/event_test.py similarity index 90% rename from pype/ftrack/events/test_event.py rename to pype/ftrack/events/event_test.py index c2586aa666..128e1ad197 100644 --- a/pype/ftrack/events/test_event.py +++ b/pype/ftrack/events/event_test.py @@ -11,7 +11,7 @@ class Test_Event(BaseEvent): '''just a testing event''' - # self.show_message(event,"test",True) + # self.log.info(event) return True diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index d0ecd53192..a440d93fc4 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -17,18 +17,15 @@ from app.api import ( class BaseEvent(object): - '''Custom Action base class + '''Custom Event base class - `label` a descriptive string identifing your action. + BaseEvent is based on ftrack.update event + - get entities from event - `varaint` To group actions together, give them the same - label and specify a unique variant per action. + If want to use different event base + - override register and *optional _translate_event method - `identifier` a unique identifier for your action. - - `description` a verbose descriptive text for you action - - ''' + ''' def __init__(self, session): '''Expects a ftrack_api.Session instance''' @@ -46,7 +43,7 @@ class BaseEvent(object): '''Registers the event, subscribing the the discover and launch topics.''' self.session.event_hub.subscribe('topic=ftrack.update', self._launch) - self.log.info("----- event - <" + self.__class__.__name__ + "> - Has been registered -----") + self.log.info("Event '{}' - Registered successfully".format(self.__class__.__name__)) def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' @@ -91,25 +88,18 @@ class BaseEvent(object): ) def _launch(self, event): + + self.session.reset() + args = self._translate_event( self.session, event ) - # TODO REMOVE THIS - ONLY FOR TEST PROJECT - for a in args[0]: - try: - if (a['project']['name'] != 'eventproj'): - return True - except: - continue - - response = self.launch( + self.launch( self.session, *args ) - return self._handle_result( - self.session, response, *args - ) + return def launch(self, session, entities, event): '''Callback method for the custom action. @@ -133,6 +123,14 @@ class BaseEvent(object): raise NotImplementedError() def show_message(self, event, input_message, result = False): + """ + Shows message to user who triggered event + - event - just source of user id + - input_message - message that is shown to user + - result - changes color of message (based on ftrack settings) + - True = Violet + - False = Red + """ if not isinstance(result, bool): result = False @@ -147,38 +145,10 @@ class BaseEvent(object): topic='ftrack.action.trigger-user-interface', data=dict( type='message', - success=False, + success=result, message=message ), target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id) ), on_error='ignore' ) - - def _handle_result(self, session, result, entities, event): - '''Validate the returned result from the action callback''' - if isinstance(result, bool): - result = { - 'success': result, - 'message': ( - '{0} launched successfully.'.format( - self.__class__.__name__ - ) - ) - } - - elif isinstance(result, dict): - for key in ('success', 'message'): - if key in result: - continue - - raise KeyError( - 'Missing required key: {0}.'.format(key) - ) - - else: - self.log.error( - 'Invalid result type must be bool or dictionary!' - ) - - return result diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 66d739829b..caaeb6c707 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -62,7 +62,7 @@ def get_data(parent, entity, session, custom_attributes): for parent in parents: parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] if parent['parent'].entity_type != 'project' and parentId is None: - parent.importToAvalon(parent) + parent.importToAvalon(session, parent) parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] hierarchy = os.path.sep.join(folderStruct) From a7ac81dcc10dc2b793f4acb5fa19480397621842 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Dec 2018 16:58:39 +0100 Subject: [PATCH 74/78] Prepared action for event server --- pype/ftrack/events/action_sync_to_avalon.py | 366 ++++++++++++++++++++ 1 file changed, 366 insertions(+) create mode 100644 pype/ftrack/events/action_sync_to_avalon.py diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py new file mode 100644 index 0000000000..e305b30739 --- /dev/null +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -0,0 +1,366 @@ +import sys +import argparse +import logging +import os +import ftrack_api +import json +import re +from pype import lib +from pype.ftrack.actions.ftrack_action_handler import BaseAction +from bson.objectid import ObjectId +from avalon import io, inventory + +from pype.ftrack import ftrack_utils + +class Sync_To_Avalon(BaseAction): + ''' + Synchronizing data action - from Ftrack to Avalon DB + + Stores all information about entity. + - Name(string) - Most important information = identifier of entity + - Parent(ObjectId) - Avalon Project Id, if entity is not project itself + - Silo(string) - Last parent except project + - Data(dictionary): + - VisualParent(ObjectId) - Avalon Id of parent asset + - Parents(array of string) - All parent names except project + - Tasks(array of string) - Tasks on asset + - FtrackId(string) + - entityType(string) - entity's type on Ftrack + * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' + + * These information are stored also for all parents and children entities. + + Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. + - action IS NOT creating this Custom attribute if doesn't exist + - run 'Create Custom Attributes' action or do it manually (Not recommended) + + If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: + - name, parents and silo are checked -> shows error if are not exact the same + - after sync it is not allowed to change names or move entities + + If ID in 'avalon_mongo_id' is empty string or is not found in DB: + - tries to find entity by name + - found: + - raise error if ftrackId/visual parent/parents are not same + - not found: + - Creates asset/project + + ''' + + #: Action identifier. + identifier = 'sync.to.avalon' + #: Action label. + label = 'SyncToAvalon' + #: Action description. + description = 'Send data from Ftrack to Avalon' + #: Action icon. + icon = 'https://cdn1.iconfinder.com/data/icons/hawcons/32/699650-icon-92-inbox-download-512.png' + + def register(self): + '''Registers the action, subscribing the the discover and launch topics.''' + self.session.event_hub.subscribe( + 'topic=ftrack.action.discover', + self._discover + ) + + self.session.event_hub.subscribe( + 'topic=ftrack.action.launch and data.actionIdentifier={0}'.format( + self.identifier + ), + self._launch + ) + + self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__)) + + def discover(self, session, entities, event): + ''' Validation ''' + roleCheck = False + discover = False + roleList = ['Administrator', 'Project Manager'] + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + for role in user['user_security_roles']: + if role['security_role']['name'] in roleList: + roleCheck = True + if roleCheck is True: + for entity in entities: + if entity.entity_type.lower() not in ['task', 'assetversion']: + discover = True + break + + return discover + + + def launch(self, session, entities, event): + message = "" + + # JOB SETTINGS + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'Synch Ftrack to Avalon.' + }) + }) + + try: + self.log.info("Action <" + self.__class__.__name__ + "> is running") + self.ca_mongoid = 'avalon_mongo_id' + #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug + self.setAvalonAttributes() + self.importable = [] + + # get from top entity in hierarchy all parent entities + top_entity = entities[0]['link'] + if len(top_entity) > 1: + for e in top_entity: + parent_entity = session.get(e['type'], e['id']) + self.importable.append(parent_entity) + + # get all child entities separately/unique + for entity in entities: + self.getShotAsset(entity) + + # Check names: REGEX in schema/duplicates - raise error if found + all_names = [] + duplicates = [] + + for e in self.importable: + ftrack_utils.avalon_check_name(e) + if e['name'] in all_names: + duplicates.append("'{}'".format(e['name'])) + else: + all_names.append(e['name']) + + if len(duplicates) > 0: + raise ValueError("Entity name duplication: {}".format(", ".join(duplicates))) + + ## ----- PROJECT ------ + # store Ftrack project- self.importable[0] must be project entity!!! + self.entityProj = self.importable[0] + # set AVALON_ env + os.environ["AVALON_PROJECT"] = self.entityProj["full_name"] + os.environ["AVALON_ASSET"] = self.entityProj["full_name"] + + self.avalon_project = None + + io.install() + + # Import all entities to Avalon DB + for e in self.importable: + self.importToAvalon(session, e) + + io.uninstall() + + job['status'] = 'done' + session.commit() + self.log.info('Synchronization to Avalon was successfull!') + + except ValueError as ve: + job['status'] = 'failed' + session.commit() + message = str(ve) + self.log.error('Error during syncToAvalon: {}'.format(message)) + + except Exception as e: + job['status'] = 'failed' + session.commit() + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno) + self.log.error('Error during syncToAvalon: {}'.format(log_message)) + message = 'Unexpected Error - Please check Log for more information' + + if len(message) > 0: + message = "Unable to sync: {}".format(message) + return { + 'success': False, + 'message': message + } + + return { + 'success': True, + 'message': "Synchronization was successfull" + } + + def setAvalonAttributes(self): + self.custom_attributes = [] + all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() + for cust_attr in all_avalon_attr['custom_attribute_configurations']: + if 'avalon_' not in cust_attr['key']: + self.custom_attributes.append(cust_attr) + + def getShotAsset(self, entity): + if not (entity.entity_type in ['Task']): + if entity not in self.importable: + self.importable.append(entity) + + if entity['children']: + childrens = entity['children'] + for child in childrens: + self.getShotAsset(child) + + def importToAvalon(self, session, entity): + # --- Begin: PUSH TO Avalon --- + + entity_type = entity.entity_type + + if entity_type.lower() in ['project']: + # Set project Config + config = ftrack_utils.get_config(entity) + # Set project template + template = lib.get_avalon_project_template_schema() + if self.ca_mongoid in entity['custom_attributes']: + try: + projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) + self.avalon_project = io.find_one({"_id": projectId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) + + if self.avalon_project is None: + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + if self.avalon_project is None: + inventory.save(entity['full_name'], config, template) + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + + elif self.avalon_project['name'] != entity['full_name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name)) + + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) + + # Store info about project (FtrackId) + io.update_many({ + 'type': 'project', + 'name': entity['full_name'] + }, { + '$set':{'data':data, 'config':config} + }) + + self.projectId = self.avalon_project["_id"] + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) + else: + self.log.error('Custom attribute for "{}" is not created.'.format(entity['name'])) + return + + ## ----- ASSETS ------ + # Presets: + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) + + # return if entity is silo + if len(data['parents']) == 0: + return + else: + silo = data['parents'][0] + + os.environ['AVALON_SILO'] = silo + + name = entity['name'] + os.environ['AVALON_ASSET'] = name + + + # Try to find asset in current database + avalon_asset = None + if self.ca_mongoid in entity['custom_attributes']: + try: + entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid]) + avalon_asset = io.find_one({"_id": entityId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) + + if avalon_asset is None: + avalon_asset = io.find_one({'type': 'asset', 'name': name}) + # Create if don't exists + if avalon_asset is None: + inventory.create_asset(name, silo, data, self.projectId) + self.log.debug("Asset {} - created".format(name)) + + # Raise error if it seems to be different ent. with same name + elif (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): + raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) + + elif avalon_asset['name'] != entity['name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name)) + elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path)) + + # Update info + io.update_many({'type': 'asset','name': name}, + {'$set':{'data':data, 'silo': silo}}) + + self.log.debug("Asset {} - updated".format(name)) + + entityId = io.find_one({'type': 'asset', 'name': name})['_id'] + ## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK + # Set custom attribute to avalon/mongo id of entity (parentID is last) + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(entityId) + else: + self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) + + session.commit() + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + # Validate that session is an instance of ftrack_api.Session. If not, + # assume that register is being called from an old or incompatible API and + # return without doing anything. + if not isinstance(session, ftrack_api.session.Session): + return + + action_handler = Sync_To_Avalon(session) + action_handler.register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) From 2da8a47442ad381c6b503b71c9919ab290599f1b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Dec 2018 19:00:29 +0100 Subject: [PATCH 75/78] preparation for 'ftrack_resources' --- pype/ftrack/actions/action_Apps.py | 10 +++++++++- pype/ftrack/actions/ftrack_action_handler.py | 17 +++++------------ 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/pype/ftrack/actions/action_Apps.py b/pype/ftrack/actions/action_Apps.py index 3d1bf093de..084ffa9aec 100644 --- a/pype/ftrack/actions/action_Apps.py +++ b/pype/ftrack/actions/action_Apps.py @@ -31,11 +31,19 @@ def registerApp(app, session): label = apptoml['ftrack_label'] icon = None + ftrack_resources = "" # Path to resources here + if 'icon' in apptoml: icon = apptoml['icon'] + if '{ftrack_resources}' in icon: + icon = icon.format(ftrack_resources) + + description = None + if 'description' in apptoml: + description = apptoml['description'] # register action - AppAction(session, label, name, executable, variant, icon).register() + AppAction(session, label, name, executable, variant, icon, description).register() def register(session): diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index 15c57dbb1c..89fa669992 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -14,9 +14,6 @@ import acre from pype import api as pype -log = pype.Logger.getLogger(__name__, "ftrack") - -log.debug("pype.Anatomy: {}".format(pype.Anatomy)) class AppAction(object): @@ -231,13 +228,9 @@ class AppAction(object): entity, id = entities[0] entity = session.get(entity, id) - silo = "Film" - if entity.entity_type == "AssetBuild": - silo = "Asset" - # set environments for Avalon os.environ["AVALON_PROJECT"] = entity['project']['full_name'] - os.environ["AVALON_SILO"] = silo + os.environ["AVALON_SILO"] = entity['ancestors'][0]['name'] os.environ["AVALON_ASSET"] = entity['parent']['name'] os.environ["AVALON_TASK"] = entity['name'] os.environ["AVALON_APP"] = self.identifier @@ -262,7 +255,7 @@ class AppAction(object): try: anatomy = anatomy.format(data) except Exception as e: - log.error("{0} Error in anatomy.format: {1}".format(__name__, e)) + self.log.error("{0} Error in anatomy.format: {1}".format(__name__, e)) os.environ["AVALON_WORKDIR"] = os.path.join(anatomy.work.root, anatomy.work.folder) # TODO Add paths to avalon setup from tomls @@ -328,7 +321,7 @@ class AppAction(object): try: fp = open(execfile) except PermissionError as p: - log.error('Access denied on {0} - {1}'. + self.log.error('Access denied on {0} - {1}'. format(execfile, p)) return { 'success': False, @@ -338,7 +331,7 @@ class AppAction(object): fp.close() # check executable permission if not os.access(execfile, os.X_OK): - log.error('No executable permission on {}'. + self.log.error('No executable permission on {}'. format(execfile)) return { 'success': False, @@ -347,7 +340,7 @@ class AppAction(object): } pass else: - log.error('Launcher doesn\'t exist - {}'. + self.log.error('Launcher doesn\'t exist - {}'. format(execfile)) return { 'success': False, From 3fda5c32b3d9761ab3981d007b55bf53409e59cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Dec 2018 10:13:47 +0100 Subject: [PATCH 76/78] Action syncToAvalon - changed identifier and label. Is visible only for role 'Pypeclub' --- pype/ftrack/actions/action_syncToAvalon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index cad43684c9..04e9ed53a5 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -48,9 +48,9 @@ class SyncToAvalon(BaseAction): ''' #: Action identifier. - identifier = 'sync.to.avalon' + identifier = 'sync.to.avalon.local' #: Action label. - label = 'SyncToAvalon' + label = 'SyncToAvalon - Local' #: Action description. description = 'Send data from Ftrack to Avalon' #: Action icon. @@ -61,7 +61,7 @@ class SyncToAvalon(BaseAction): ''' Validation ''' roleCheck = False discover = False - roleList = ['Administrator', 'Project Manager'] + roleList = ['Pypeclub'] userId = event['source']['user']['id'] user = session.query('User where id is ' + userId).one() From f59bcf2e44eec0708649b4fdd9ce45e517829055 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Dec 2018 12:57:44 +0100 Subject: [PATCH 77/78] renamed filename --- .../{action_syncToAvalon.py => action_sync_to_avalon_local.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename pype/ftrack/actions/{action_syncToAvalon.py => action_sync_to_avalon_local.py} (100%) diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_sync_to_avalon_local.py similarity index 100% rename from pype/ftrack/actions/action_syncToAvalon.py rename to pype/ftrack/actions/action_sync_to_avalon_local.py From e19fbea3e9fb18c243a05ce034308e33eceb75ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Dec 2018 15:20:05 +0100 Subject: [PATCH 78/78] Identifier is now full app name(with version), so same app won't launch twice if two versions are available. --- pype/ftrack/actions/action_Apps.py | 3 ++- pype/ftrack/actions/ftrack_action_handler.py | 4 +--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/actions/action_Apps.py b/pype/ftrack/actions/action_Apps.py index 3d1bf093de..76c6ba1e06 100644 --- a/pype/ftrack/actions/action_Apps.py +++ b/pype/ftrack/actions/action_Apps.py @@ -9,7 +9,7 @@ from app.api import Logger log = Logger.getLogger(__name__) def registerApp(app, session): - name = app['name'].split("_")[0] + name = app['name'].replace("_", ".") variant = "" try: variant = app['name'].split("_")[1] @@ -59,6 +59,7 @@ def register(session): appNames.append(app['name']) apps.append(app) + apps = sorted(apps, key=lambda x: x['name']) for app in apps: try: registerApp(app, session) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index 15c57dbb1c..63561951d4 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -72,9 +72,7 @@ class AppAction(object): ), self._launch ) - self.log.info("Application '{}' - Registered successfully".format(self.label)) - - self.log.info("Application '{}' - Registered successfully".format(self.label)) + self.log.info("Application '{} {}' - Registered successfully".format(self.label,self.variant)) def _discover(self, event): args = self._translate_event(