Merged in feature/PYPE-387-nuke-to-deadline (pull request #182)

Feature/PYPE-387 nuke to deadline

Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
Ondřej Samohel 2019-06-23 20:09:43 +00:00 committed by Milan Kolar
commit 3e25cc3bfd
26 changed files with 450 additions and 403 deletions

View file

@ -2,7 +2,6 @@ import os
from pyblish import api as pyblish
from avalon import api as avalon
from Qt import QtWidgets
import logging
log = logging.getLogger(__name__)
@ -25,20 +24,6 @@ def install():
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
# pyblish-qml settings.
try:
__import__("pyblish_qml")
except ImportError as e:
log.error("Could not load pyblish-qml: %s " % e)
else:
from pyblish_qml import settings
app = QtWidgets.QApplication.instance()
screen_resolution = app.desktop().screenGeometry()
width, height = screen_resolution.width(), screen_resolution.height()
settings.WindowSize = (width / 3, height * 0.75)
settings.WindowPosition = (0, 0)
def uninstall():
log.info("Deregistering global plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)

View file

@ -5,7 +5,8 @@ from .plugin import (
ValidatePipelineOrder,
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder
ValidateMeshOrder,
ValidationException
)
# temporary fix, might
@ -62,6 +63,8 @@ __all__ = [
"Logger",
"ValidationException",
# contectual templates
# get data to preloaded templates
"load_data_from_templates",

View file

@ -24,6 +24,7 @@ for path in sys.path:
log.info("_ removing from sys.path: `{}`".format(path))
sys.path.remove(path)
def onScriptLoad():
if nuke.env['LINUX']:
nuke.tcl('load ffmpegReader')
@ -37,12 +38,12 @@ def checkInventoryVersions():
"""
Actiual version idetifier of Loaded containers
Any time this function is run it will check all nodes and filter only Loader nodes for its version. It will get all versions from database
and check if the node is having actual version. If not then it will color it to red.
Any time this function is run it will check all nodes and filter only
Loader nodes for its version. It will get all versions from database
and check if the node is having actual version. If not then it will color
it to red.
"""
# get all Loader nodes by avalon attribute metadata
for each in nuke.allNodes():
if each.Class() == 'Read':
@ -195,12 +196,17 @@ def create_write_node(name, data):
except Exception as e:
log.error("problem with resolving anatomy tepmlate: {}".format(e))
fpath = str(anatomy_filled["render"]["path"]).replace("\\", "/")
# build file path to workfiles
fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
fpath = '{work}/renders/v{version}/{subset}.{frame}.{ext}'.format(
work=fpath, version=data["version"], subset=data["subset"],
frame=data["frame"],
ext=data["nuke_dataflow_writes"]["file_type"])
# create directory
if not os.path.isdir(os.path.dirname(fpath)):
log.info("path does not exist")
os.makedirs(os.path.dirname(fpath), 0766)
os.makedirs(os.path.dirname(fpath), 0o766)
_data = OrderedDict({
"file": fpath

View file

@ -69,3 +69,7 @@ def contextplugin_should_run(plugin, context):
return True
return False
class ValidationException(Exception):
pass

View file

@ -22,9 +22,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'rig': 'rig',
'setdress': 'setdress',
'pointcache': 'cache',
'write': 'img',
'render': 'render',
'nukescript': 'comp',
'write': 'render',
'review': 'mov',
'plate': 'img'
}

View file

@ -1,148 +0,0 @@
import os
import pyblish.api
from avalon import io, api
class CollectAssumedDestination(pyblish.api.ContextPlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Collect Assumed Destination"
order = pyblish.api.CollectorOrder + 0.498
exclude_families = ["plate"]
def process(self, context):
for instance in context:
if [ef for ef in self.exclude_families
if ef in instance.data["family"]]:
self.log.info("Ignoring instance: {}".format(instance))
return
self.process_item(instance)
def process_item(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
anatomy = instance.context.data['anatomy']
# self.log.info(anatomy.anatomy())
self.log.info(anatomy.templates)
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
self.log.info(anatomy_filled)
mock_template = anatomy_filled["publish"]["path"]
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
# get all the stuff from the database
subset_name = instance.data["subset"]
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
# FIXME: io is not initialized at this point for shell host
io.install()
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True, "data": True})
template = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset["_id"]})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
if version is not None:
version_number += int(version["name"])
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["template"] = template
instance.data["assumedTemplateData"] = template_data
# We take the parent folder of representation 'filepath'
instance.data["assumedDestination"] = os.path.dirname(
(anatomy.format(template_data))["publish"]["path"]
)

View file

@ -204,7 +204,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
'ext': '{}'.format(ext),
'files': list(collection),
"stagingDir": root,
"anatomy_template": "render"
"anatomy_template": "render",
"frameRate": fps
}
instance.data["representations"].append(representation)

View file

@ -27,5 +27,5 @@ class CollectPresets(api.ContextPlugin):
context.data["presets"] = presets
self.log.info(context.data["presets"])
# self.log.info(context.data["presets"])
return

View file

@ -70,6 +70,9 @@ class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
sub_proc = subprocess.Popen(subprocess_mov)
sub_proc.wait()
if not os.path.isfile(full_output_path):
raise("Quicktime wasn't created succesfully")
if "representations" not in instance.data:
instance.data["representations"] = []

View file

@ -60,7 +60,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"nukescript",
"render",
"rendersetup",
"write",
"rig",
"plate",
"look"

View file

@ -1,6 +1,8 @@
import os
import json
import re
from pprint import pprint
import logging
from avalon import api, io
from avalon.vendor import requests, clique
@ -215,7 +217,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if not job:
# No deadline job. Try Muster: musterSubmissionJob
job = instance.data.get("musterSubmissionJob")
job = data.pop("musterSubmissionJob")
submission_type = "muster"
if not job:
raise RuntimeError("Can't continue without valid Deadline "
@ -249,7 +251,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# This assumes the output files start with subset name and ends with
# a file extension. The "ext" key includes the dot with the extension.
if "ext" in instance.data:
ext = re.escape(instance.data["ext"])
ext = r"\." + re.escape(instance.data["ext"])
else:
ext = r"\.\D+"
@ -362,7 +364,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata["metadata"]["instance"]["endFrame"] = updated_end
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(output_dir, metadata_filename)
# convert log messages if they are `LogRecord` to their
# string format to allow serializing as JSON later on.
rendered_logs = []
for log in metadata["metadata"]["instance"].get("_log", []):
if isinstance(log, logging.LogRecord):
rendered_logs.append(log.getMessage())
else:
rendered_logs.append(log)
metadata["metadata"]["instance"]["_log"] = rendered_logs
with open(metadata_path, "w") as f:
json.dump(metadata, f, indent=4, sort_keys=True)

View file

@ -19,7 +19,7 @@ def subset_to_families(subset, family, families):
return "{}.{}".format(family, new_subset)
class CrateWriteRender(avalon.nuke.Creator):
class CreateWriteRender(avalon.nuke.Creator):
# change this to template preset
preset = "render"
@ -31,7 +31,7 @@ class CrateWriteRender(avalon.nuke.Creator):
icon = "sign-out"
def __init__(self, *args, **kwargs):
super(CrateWriteRender, self).__init__(*args, **kwargs)
super(CreateWriteRender, self).__init__(*args, **kwargs)
data = OrderedDict()
@ -62,7 +62,7 @@ class CrateWriteRender(avalon.nuke.Creator):
return
class CrateWritePrerender(avalon.nuke.Creator):
class CreateWritePrerender(avalon.nuke.Creator):
# change this to template preset
preset = "prerender"
@ -74,7 +74,7 @@ class CrateWritePrerender(avalon.nuke.Creator):
icon = "sign-out"
def __init__(self, *args, **kwargs):
super(CrateWritePrerender, self).__init__(*args, **kwargs)
super(CreateWritePrerender, self).__init__(*args, **kwargs)
data = OrderedDict()
@ -89,8 +89,6 @@ class CrateWritePrerender(avalon.nuke.Creator):
self.name = self.data["subset"]
instance = nuke.toNode(self.data["subset"])
family = self.family
node = 'write'
if not instance:
@ -103,51 +101,53 @@ class CrateWritePrerender(avalon.nuke.Creator):
create_write_node(self.data["subset"], write_data)
return
#
#
# class CrateWriteStill(avalon.nuke.Creator):
# # change this to template preset
# preset = "still"
#
# name = "WriteStill"
# label = "Create Write Still"
# hosts = ["nuke"]
# family = "{}_write".format(preset)
# families = preset
# icon = "image"
#
# def __init__(self, *args, **kwargs):
# super(CrateWriteStill, self).__init__(*args, **kwargs)
#
# data = OrderedDict()
#
# data["family"] = self.family.split("_")[-1]
# data["families"] = self.families
#
# {data.update({k: v}) for k, v in self.data.items()
# if k not in data.keys()}
# self.data = data
#
# def process(self):
# self.name = self.data["subset"]
#
# node_name = self.data["subset"].replace(
# "_", "_f{}_".format(nuke.frame()))
# instance = nuke.toNode(self.data["subset"])
# self.data["subset"] = node_name
#
# family = self.family
# node = 'write'
#
# if not instance:
# write_data = {
# "frame_range": [nuke.frame(), nuke.frame()],
# "class": node,
# "preset": self.preset,
# "avalon": self.data
# }
#
# nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
# create_write_node(node_name, write_data)
#
# return
"""
class CrateWriteStill(avalon.nuke.Creator):
# change this to template preset
preset = "still"
name = "WriteStill"
label = "Create Write Still"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "image"
def __init__(self, *args, **kwargs):
super(CrateWriteStill, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family.split("_")[-1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
node_name = self.data["subset"].replace(
"_", "_f{}_".format(nuke.frame()))
instance = nuke.toNode(self.data["subset"])
self.data["subset"] = node_name
family = self.family
node = 'write'
if not instance:
write_data = {
"frame_range": [nuke.frame(), nuke.frame()],
"class": node,
"preset": self.preset,
"avalon": self.data
}
nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
create_write_node(node_name, write_data)
return
"""

View file

@ -0,0 +1,21 @@
import nuke
from avalon import api, io
import pyblish.api
class CollectAssetInfo(pyblish.api.ContextPlugin):
"""Collect framerate."""
order = pyblish.api.CollectorOrder
label = "Collect Asset Info"
hosts = [
"nuke",
"nukeassist"
]
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.info("asset_data: {}".format(asset_data))
context.data['handles'] = int(asset_data["data"].get("handles", 0))

View file

@ -1,46 +0,0 @@
import pyblish.api
@pyblish.api.log
class CollectInstanceFamilies(pyblish.api.InstancePlugin):
"""Collect families for all instances"""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Families"
hosts = ["nuke", "nukeassist"]
families = ['write']
def process(self, instance):
node = instance[0]
self.log.info('processing {}'.format(node))
families = []
if instance.data.get('families'):
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
if node["render"].value():
# dealing with local/farm rendering
if node["render_farm"].value():
families.append("render.farm")
else:
families.append("render.local")
else:
families.append("render.frames")
# to ignore staging dir op in integrate
instance.data['transfer'] = False
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -15,3 +15,4 @@ class CollectFramerate(pyblish.api.ContextPlugin):
def process(self, context):
context.data["framerate"] = nuke.root()["fps"].getValue()
context.data["fps"] = nuke.root()["fps"].getValue()

View file

@ -19,7 +19,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"name": api.Session["AVALON_ASSET"]})
# add handles into context
context.data['handles'] = int(asset_data["data"].get("handles", 0))
context.data['handles'] = context.data['handles']
self.log.debug("asset_data: {}".format(asset_data["data"]))
instances = []
@ -40,12 +40,23 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
subset = avalon_knob_data.get("subset", None) or node["name"].value()
subset = avalon_knob_data.get(
"subset", None) or node["name"].value()
# Create instance
instance = context.create_instance(subset)
instance.add(node)
family = avalon_knob_data["families"]
if node["render"].value():
self.log.info("flagged for render")
family = "render.local"
# dealing with local/farm rendering
if node["render_farm"].value():
self.log.info("adding render farm family")
family = "render.farm"
instance.data['transfer'] = False
instance.data.update({
"subset": subset,
"asset": os.environ["AVALON_ASSET"],
@ -53,14 +64,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"name": node.name(),
"subset": subset,
"family": avalon_knob_data["family"],
"families": [family],
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish').value(),
"step": 1,
"fps": int(nuke.root()['fps'].value())
})
# if node.Class() == "Write":
# instance.data["families"] = [avalon_knob_data["families"]]
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)

View file

@ -9,21 +9,9 @@ class CollectReview(pyblish.api.InstancePlugin):
family = "review"
label = "Collect Review"
hosts = ["nuke"]
families = ["write"]
family_targets = [".local", ".frames"]
families = ["render", "render.local"]
def process(self, instance):
pass
families = [(f, search) for f in instance.data["families"]
for search in self.family_targets
if search in f][0]
if families:
root_families = families[0].replace(families[1], "")
# instance.data["families"].append(".".join([
# root_families,
# self.family
# ]))
instance.data["families"].append("render.review")
if instance.data["families"]:
instance.data["families"].append("review")
self.log.info("Review collected: `{}`".format(instance))

View file

@ -16,10 +16,6 @@ class CollectScript(pyblish.api.ContextPlugin):
hosts = ['nuke']
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.info("asset_data: {}".format(asset_data["data"]))
root = nuke.root()
add_avalon_tab_knob(root)
add_publish_knob(root)
@ -57,7 +53,7 @@ class CollectScript(pyblish.api.ContextPlugin):
"publish": root.knob('publish').value(),
"family": family,
"representation": "nk",
"handles": int(asset_data["data"].get("handles", 0)),
"handles": context.data['handles'],
"step": 1,
"fps": int(root['fps'].value()),
})

View file

@ -4,65 +4,66 @@ import pyblish.api
import pype.api as pype
@pyblish.api.log
class CollectNukeWrites(pyblish.api.ContextPlugin):
class CollectNukeWrites(pyblish.api.InstancePlugin):
"""Collect all write nodes."""
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
families = ["render.local", "render", "render.farm"]
def process(self, context):
for instance in context.data["instances"]:
def process(self, instance):
if not instance.data["publish"]:
continue
# if not instance.data["publish"]:
# continue
node = instance[0]
node = instance[0]
if node.Class() != "Write":
continue
if node.Class() != "Write":
return
self.log.debug("checking instance: {}".format(instance))
self.log.debug("checking instance: {}".format(instance))
# Determine defined file type
ext = node["file_type"].value()
# Determine defined file type
ext = node["file_type"].value()
# Determine output type
output_type = "img"
if ext == "mov":
output_type = "mov"
# Determine output type
output_type = "img"
if ext == "mov":
output_type = "mov"
# Get frame range
handles = instance.context.data.get('handles', 0)
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
# Get frame range
handles = instance.context.data.get('handles', 0)
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
if node["use_limit"].getValue():
handles = 0
first_frame = int(node["first"].getValue())
last_frame = int(node["last"].getValue())
if node["use_limit"].getValue():
handles = 0
first_frame = int(node["first"].getValue())
last_frame = int(node["last"].getValue())
# get path
path = nuke.filename(node)
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# get path
path = nuke.filename(node)
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# get version
version = pype.get_version_from_path(path)
instance.data['version'] = version
self.log.debug('Write Version: %s' % instance.data('version'))
# get version
version = pype.get_version_from_path(nuke.root().name())
instance.data['version'] = version
self.log.debug('Write Version: %s' % instance.data('version'))
# create label
name = node.name()
# Include start and end render frame in label
label = "{0} ({1}-{2})".format(
name,
int(first_frame),
int(last_frame)
)
# create label
name = node.name()
# Include start and end render frame in label
label = "{0} ({1}-{2})".format(
name,
int(first_frame),
int(last_frame)
)
if 'render' in instance.data['families']:
instance.data['families'].append('ftrack')
if "representations" not in instance.data:
instance.data["representations"] = list()
try:
@ -80,23 +81,20 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
except Exception:
self.log.debug("couldn't collect frames: {}".format(label))
if 'render.local' in instance.data['families']:
instance.data['families'].append('ftrack')
instance.data.update({
"path": path,
"outputDir": output_dir,
"ext": ext,
"label": label,
"handles": handles,
"startFrame": first_frame,
"endFrame": last_frame,
"outputType": output_type,
"colorspace": node["colorspace"].value(),
})
# except Exception:
# self.log.debug("couldn't collect frames: {}".format(label))
instance.data.update({
"path": path,
"outputDir": output_dir,
"ext": ext,
"label": label,
"handles": handles,
"startFrame": first_frame,
"endFrame": last_frame,
"outputType": output_type,
"colorspace": node["colorspace"].value(),
})
self.log.debug("instance.data: {}".format(instance.data))
self.log.debug("context: {}".format(context))
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -8,14 +8,15 @@ class ExtractFramesToIntegrate(pyblish.api.InstancePlugin):
order = pyblish.api.ExtractorOrder
label = "Extract rendered frames"
hosts = ["nuke"]
families = ["render.frames", "prerender.frames", "still.frames"]
families = ["render"]
def process(self, instance):
def process(self, instance\
return
staging_dir = instance.data.get('stagingDir', None)
output_dir = instance.data.get('outputDir', None)
if not staging_dir:
staging_dir = output_dir
instance.data['stagingDir'] = staging_dir
instance.data['transfer'] = False
# staging_dir = instance.data.get('stagingDir', None)
# output_dir = instance.data.get('outputDir', None)
#
# if not staging_dir:
# staging_dir = output_dir
# instance.data['stagingDir'] = staging_dir
# # instance.data['transfer'] = False

View file

@ -4,6 +4,7 @@ import datetime
import time
import clique
from pprint import pformat
import pyblish.api
@ -23,35 +24,18 @@ class ExtractJSON(pyblish.api.ContextPlugin):
os.makedirs(workspace)
context_data = context.data.copy()
out_data = dict(self.serialize(context_data))
unwrapped_instance = []
for i in context_data["instances"]:
unwrapped_instance.append(i.data)
instances_data = []
for instance in context:
data = {}
for key, value in instance.data.items():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
data[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
instances_data.append(data)
out_data["instances"] = instances_data
context_data["instances"] = unwrapped_instance
timestamp = datetime.datetime.fromtimestamp(
time.time()).strftime("%Y%m%d-%H%M%S")
filename = timestamp + "_instances.json"
with open(os.path.join(workspace, filename), "w") as outfile:
outfile.write(json.dumps(out_data, indent=4, sort_keys=True))
outfile.write(pformat(context_data, depth=20))
def serialize(self, data):
"""

View file

@ -69,6 +69,9 @@ class NukeRenderLocal(pype.api.Extractor):
temp_dir
))
instance.data['family'] = 'render'
instance.data['families'].append('render')
collections, remainder = clique.assemble(collected_frames)
self.log.info('collections: {}'.format(str(collections)))

View file

@ -16,7 +16,7 @@ class ExtractDataForReview(pype.api.Extractor):
label = "Extract Review"
optional = True
families = ["render.review"]
families = ["review"]
hosts = ["nuke"]
def process(self, instance):
@ -79,7 +79,7 @@ class ExtractDataForReview(pype.api.Extractor):
instance.data["representations"] = []
representation = {
'name': 'mov',
'name': 'review',
'ext': 'mov',
'files': file_name,
"stagingDir": stagingDir,
@ -100,7 +100,7 @@ class ExtractDataForReview(pype.api.Extractor):
import nuke
temporary_nodes = []
stagingDir = instance.data["stagingDir"].replace("\\", "/")
stagingDir = instance.data['representations'][0]["stagingDir"].replace("\\", "/")
self.log.debug("StagingDir `{0}`...".format(stagingDir))
collection = instance.data.get("collection", None)

View file

@ -0,0 +1,208 @@
import os
import json
import getpass
import nuke
from avalon import api
from avalon.vendor import requests
import re
import pyblish.api
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit write to Deadline
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["nuke", "nukestudio"]
families = ["render.farm"]
optional = True
def process(self, instance):
# root = nuke.root()
# node_subset_name = instance.data.get("name", None)
node = instance[0]
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
"http://localhost:8082")
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
context = instance.context
workspace = os.path.dirname(context.data["currentFile"])
filepath = None
# get path
path = nuke.filename(node)
output_dir = instance.data['outputDir']
filepath = context.data["currentFile"]
self.log.debug(filepath)
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
dirname = os.path.join(workspace, "renders")
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
try:
# Ensure render folder exists
os.makedirs(dirname)
except OSError:
pass
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
# Job name, as seen in Monitor
"Name": jobname,
# Arbitrary username, for visualisation in Monitor
"UserName": deadline_user,
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(
start=int(instance.data["startFrame"]),
end=int(instance.data["endFrame"])
),
"Comment": comment,
# Optional, enable double-click to preview rendered
# frames from Deadline Monitor
# "OutputFilename0": output_filename_0.replace("\\", "/"),
},
"PluginInfo": {
# Input
"SceneFile": filepath,
# Output directory and filename
"OutputFilePath": dirname.replace("\\", "/"),
# "OutputFilePrefix": render_variables["filename_prefix"],
# Mandatory for Deadline
"Version": ver.group(),
# Resolve relative references
"ProjectPath": workspace,
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Include critical environment variables with submission
keys = [
# This will trigger `userSetup.py` on the slave
# such that proper initialisation happens the same
# way as it does on a local machine.
# TODO(marcus): This won't work if the slaves don't
# have accesss to these paths, such as if slaves are
# running Linux and the submitter is on Windows.
"PYTHONPATH",
"PATH",
"AVALON_SCHEMA",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"PYBLISHPLUGINPATH",
"NUKE_PATH",
"TOOL_ENV"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
# self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
clean_path = ""
self.log.debug("key: {}".format(key))
to_process = environment[key]
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = environment[key]
elif "://" in environment[key]:
clean_path = environment[key]
elif os.pathsep not in to_process:
try:
path = environment[key]
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
else:
for path in environment[key].split(os.pathsep):
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
except UnicodeDecodeError:
print('path contains non UTF characters')
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
clean_path = clean_path.replace(
os.path.normpath(
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
clean_environment[key] = clean_path
environment = clean_environment
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
plugin = payload["JobInfo"]["Plugin"]
self.log.info("using render plugin : {}".format(plugin))
self.preflight_check(instance)
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
instance.data["deadlineSubmissionJob"] = response.json()
instance.data["publishJobState"] = "Active"
def preflight_check(self, instance):
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
for key in ("startFrame", "endFrame"):
value = instance.data[key]
if int(value) == value:
continue
self.log.warning(
"%f=%d was rounded off to nearest integer"
% (value, int(value))
)

View file

@ -1,5 +1,6 @@
import os
import pyblish.api
from pype.api import ValidationException
import clique
@ -20,24 +21,29 @@ class RepairCollectionAction(pyblish.api.Action):
self.log.info("Rendering toggled ON")
class ValidatePrerenderedFrames(pyblish.api.InstancePlugin):
class ValidateRenderedFrames(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder + 0.1
families = ["render.frames", "still.frames", "prerender.frames"]
families = ["render"]
label = "Validate prerendered frame"
hosts = ["nuke"]
label = "Validate rendered frame"
hosts = ["nuke", "nukestudio"]
actions = [RepairCollectionAction]
def process(self, instance):
for repre in instance.data.get('representations'):
assert repre.get('files'), "no frames were collected, you need to render them"
if not repre.get('files'):
msg = ("no frames were collected, "
"you need to render them")
self.log.error(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])
self.log.info('collections: {}'.format(str(collections)))
self.log.info('remainder: {}'.format(str(remainder)))
collection = collections[0]
@ -45,10 +51,20 @@ class ValidatePrerenderedFrames(pyblish.api.InstancePlugin):
- instance.data["startFrame"] + 1
if frame_length != 1:
assert len(collections) == 1, "There are multiple collections in the folder"
assert collection.is_contiguous(), "Some frames appear to be missing"
if len(collections) != 1:
msg = "There are multiple collections in the folder"
self.log.error(msg)
raise ValidationException(msg)
assert remainder is not None, "There are some extra files in folder"
if not collection.is_contiguous():
msg = "Some frames appear to be missing"
self.log.error(msg)
raise ValidationException(msg)
# if len(remainder) != 0:
# msg = "There are some extra files in folder"
# self.log.error(msg)
# raise ValidationException(msg)
self.log.info('frame_length: {}'.format(frame_length))
self.log.info('len(collection.indexes): {}'.format(
@ -56,7 +72,7 @@ class ValidatePrerenderedFrames(pyblish.api.InstancePlugin):
assert len(
collection.indexes
) is frame_length, "{} missing frames. Use "
"repair to render all frames".format(__name__)
) is frame_length, ("{} missing frames. Use "
"repair to render all frames").format(__name__)
instance.data['collection'] = collection

View file

@ -11,7 +11,7 @@ from pypeapp import Logger
log = Logger().get_logger(__name__, "nuke")
nuke.addOnScriptSave(writes_version_sync)
# nuke.addOnScriptSave(writes_version_sync)
nuke.addOnScriptSave(onScriptLoad)
nuke.addOnScriptSave(checkInventoryVersions)