adding plugins from maya and old repository for wip

This commit is contained in:
Jakub Jezek 2018-11-15 22:55:20 +01:00
parent bd128b25de
commit 539a010ce8
5 changed files with 712 additions and 0 deletions

View file

@ -0,0 +1,198 @@
import os
import nuke
import pyblish.api
import clique
import ft_utils
reload(ft_utils)
global pre_name
pre_name = ft_utils.get_paths_from_template(['shot.vfx.prerender'],
False)[0].split('_')[0]
class CollectNukeWrites(pyblish.api.ContextPlugin):
"""Collect all write nodes."""
order = pyblish.api.CollectorOrder
label = "Writes"
hosts = ["nuke", "nukeassist"]
# targets = ["default", "process"]
def process(self, context):
instances = []
# creating instances per write node
for node in nuke.allNodes():
if node.Class() != "Write":
continue
# Determine output type
output_type = "img"
if node["file_type"].value() == "mov":
output_type = "mov"
# Create instance
instance = pyblish.api.Instance(node.name())
instance.data["family"] = output_type
instance.add(node)
instance.data["label"] = node.name()
instance.data["publish"] = False
# Get frame range
start_frame = int(nuke.root()["first_frame"].getValue())
end_frame = int(nuke.root()["last_frame"].getValue())
if node["use_limit"].getValue():
start_frame = int(node["first"].getValue())
end_frame = int(node["last"].getValue())
print "writeNode collected: {}".format(node.name())
# Add collection
collection = None
try:
path = ""
if pre_name in node.name():
path = ft_utils.convert_hashes_in_file_name(
node['prerender_path'].getText())
else:
path = nuke.filename(node)
path += " [{0}-{1}]".format(start_frame, end_frame)
collection = clique.parse(path)
###################################################
'''possible place to start create mov publish write collection'''
###################################################
except ValueError:
# Ignore the exception when the path does not match the
# collection.
pass
instance.data["collection"] = collection
instances.append(instance)
context.data["write_instances"] = instances
context.data["instances"] = (
context.data.get("instances", []) + instances)
class CollectNukeWritesProcess(pyblish.api.ContextPlugin):
"""Collect all local processing write instances."""
order = CollectNukeWrites.order + 0.01
label = "Writes Local"
hosts = ["nuke"]
# targets = ["process.local"]
def process(self, context):
for item in context.data["write_instances"]:
instance = context.create_instance(item.data["name"])
for key, value in item.data.iteritems():
instance.data[key] = value
if pre_name not in item.data["name"]:
instance.data["label"] += " - write - local"
instance.data["families"] = ["write", "local"]
else:
instance.data["label"] += " - prerender - local"
instance.data["families"] = ["prerender", "local"]
for node in item:
instance.add(node)
# Adding/Checking publish attribute
if "process_local" not in node.knobs():
knob = nuke.Boolean_Knob("process_local", "Process Local")
knob.setValue(False)
node.addKnob(knob)
value = bool(node["process_local"].getValue())
# Compare against selection
selection = instance.context.data.get("selection", [])
if selection:
if list(set(instance) & set(selection)):
value = True
else:
value = False
instance.data["publish"] = value
def instanceToggled(instance, value):
instance[0]["process_local"].setValue(value)
instance.data["instanceToggled"] = instanceToggled
class CollectNukeWritesPublish(pyblish.api.ContextPlugin):
"""Collect all write instances for publishing."""
order = CollectNukeWrites.order + 0.01
label = "Writes"
hosts = ["nuke", "nukeassist"]
# targets = ["default"]
def process(self, context):
for item in context.data["write_instances"]:
# If the collection was not generated.
if not item.data["collection"]:
continue
missing_files = []
for f in item.data["collection"]:
# print f
if not os.path.exists(f):
missing_files.append(f)
for f in missing_files:
item.data["collection"].remove(f)
if not list(item.data["collection"]):
continue
instance = context.create_instance(item.data["name"])
for key, value in item.data.iteritems():
# print key, value
instance.data[key] = value
instance.data["families"] = ["output"]
instance.data["label"] += (
" - " + os.path.basename(instance.data["collection"].format()))
for node in item:
instance.add(node)
# Adding/Checking publish attribute
if "publish" not in node.knobs():
knob = nuke.Boolean_Knob("publish", "Publish")
knob.setValue(False)
node.addKnob(knob)
value = bool(node["publish"].getValue())
# Compare against selection
selection = instance.context.data.get("selection", [])
if selection:
if list(set(instance) & set(selection)):
value = True
else:
value = False
instance.data["publish"] = value
def instanceToggled(instance, value):
# Removing and adding the knob to support NukeAssist, where
# you can't modify the knob value directly.
instance[0].removeKnob(instance[0]["publish"])
knob = nuke.Boolean_Knob("publish", "Publish")
knob.setValue(value)
instance[0].addKnob(knob)
instance.data["instanceToggled"] = instanceToggled

View file

@ -0,0 +1,107 @@
import os
import tempfile
import shutil
import nuke
import pyblish.api
class ExtractNukeBakedColorspace(pyblish.api.InstancePlugin):
"""Extracts movie with baked in luts
V:\Remote Apps\ffmpeg\bin>ffmpeg -y -i
V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02._baked.mov
-pix_fmt yuv420p
-crf 18
-timecode 00:00:00:01
V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02..mov
"""
order = pyblish.api.ExtractorOrder
label = "Baked Colorspace"
optional = True
families = ["review"]
hosts = ["nuke"]
def process(self, instance):
if "collection" not in instance.data.keys():
return
# Store selection
selection = [i for i in nuke.allNodes() if i["selected"].getValue()]
# Deselect all nodes to prevent external connections
[i["selected"].setValue(False) for i in nuke.allNodes()]
temporary_nodes = []
# Create nodes
first_frame = min(instance.data["collection"].indexes)
last_frame = max(instance.data["collection"].indexes)
temp_dir = tempfile.mkdtemp()
for f in instance.data["collection"]:
shutil.copy(f, os.path.join(temp_dir, os.path.basename(f)))
node = previous_node = nuke.createNode("Read")
node["file"].setValue(
os.path.join(temp_dir,
os.path.basename(instance.data["collection"].format(
"{head}{padding}{tail}"))).replace("\\", "/"))
node["first"].setValue(first_frame)
node["origfirst"].setValue(first_frame)
node["last"].setValue(last_frame)
node["origlast"].setValue(last_frame)
temporary_nodes.append(node)
reformat_node = nuke.createNode("Reformat")
reformat_node["format"].setValue("HD_1080")
reformat_node["resize"].setValue("fit")
reformat_node["filter"].setValue("Lanczos6")
reformat_node["black_outside"].setValue(True)
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
viewer_process_node = nuke.ViewerProcess.node()
dag_node = None
if viewer_process_node:
dag_node = nuke.createNode(viewer_process_node.Class())
dag_node.setInput(0, previous_node)
previous_node = dag_node
temporary_nodes.append(dag_node)
# Copy viewer process values
excludedKnobs = ["name", "xpos", "ypos"]
for item in viewer_process_node.knobs().keys():
if item not in excludedKnobs and item in dag_node.knobs():
x1 = viewer_process_node[item]
x2 = dag_node[item]
x2.fromScript(x1.toScript(False))
else:
self.log.warning("No viewer node found.")
write_node = nuke.createNode("Write")
path = instance.data["collection"].format("{head}_baked.mov")
instance.data["baked_colorspace_movie"] = path
write_node["file"].setValue(path.replace("\\", "/"))
write_node["file_type"].setValue("mov")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
# Clean up
for node in temporary_nodes:
nuke.delete(node)
shutil.rmtree(temp_dir)
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]

View file

@ -0,0 +1,116 @@
import os
import nuke
import pyblish.api
class Extract(pyblish.api.InstancePlugin):
"""Super class for write and writegeo extractors."""
order = pyblish.api.ExtractorOrder
optional = True
label = "Extract Nuke [super]"
hosts = ["nuke"]
match = pyblish.api.Subset
# targets = ["process.local"]
def execute(self, instance):
# Get frame range
node = instance[0]
first_frame = nuke.root()["first_frame"].value()
last_frame = nuke.root()["last_frame"].value()
if node["use_limit"].value():
first_frame = node["first"].value()
last_frame = node["last"].value()
# Render frames
nuke.execute(node.name(), int(first_frame), int(last_frame))
class ExtractNukeWrite(Extract):
""" Extract output from write nodes. """
families = ["write", "local"]
label = "Extract Write"
def process(self, instance):
self.execute(instance)
# Validate output
for filename in list(instance.data["collection"]):
if not os.path.exists(filename):
instance.data["collection"].remove(filename)
self.log.warning("\"{0}\" didn't render.".format(filename))
class ExtractNukeCache(Extract):
label = "Cache"
families = ["cache", "local"]
def process(self, instance):
self.execute(instance)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeCamera(Extract):
label = "Camera"
families = ["camera", "local"]
def process(self, instance):
node = instance[0]
node["writeGeometries"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeGeometries"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeGeometry(Extract):
label = "Geometry"
families = ["geometry", "local"]
def process(self, instance):
node = instance[0]
node["writeCameras"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeCameras"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg

View file

@ -0,0 +1,264 @@
import os
import json
import getpass
from maya import cmds
from avalon import api
from avalon.vendor import requests
import pyblish.api
import pype.maya.lib as lib
def get_renderer_variables(renderlayer=None):
"""Retrieve the extension which has been set in the VRay settings
Will return None if the current renderer is not VRay
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
start with `rs`. Use the actual node name, do NOT use the `nice name`
Args:
renderlayer (str): the node name of the renderlayer.
Returns:
dict
"""
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
if renderer == "vray":
# Maya's renderSettings function does not return V-Ray file extension
# so we get the extension from vraySettings
extension = cmds.getAttr("vraySettings.imageFormatStr")
# When V-Ray image format has not been switched once from default .png
# the getAttr command above returns None. As such we explicitly set
# it to `.png`
if extension is None:
extension = "png"
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
else:
# Get the extension, getAttr defaultRenderGlobals.imageFormat
# returns an index number.
filename_base = os.path.basename(filename_0)
extension = os.path.splitext(filename_base)[-1].strip(".")
filename_prefix = "<Scene>/<Scene>_<RenderLayer>/<RenderLayer>"
return {"ext": extension,
"filename_prefix": filename_prefix,
"padding": padding,
"filename_0": filename_0}
def preview_fname(folder, scene, layer, padding, ext):
"""Return output file path with #### for padding.
Deadline requires the path to be formatted with # in place of numbers.
For example `/path/to/render.####.png`
Args:
folder (str): The root output folder (image path)
scene (str): The scene name
layer (str): The layer name to be rendered
padding (int): The padding length
ext(str): The output file extension
Returns:
str
"""
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format(
scene=scene,
layer=layer,
number="#" * padding,
ext=ext
)
return os.path.join(folder, output)
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit available render layers to Deadline
Renders are submitted to a Deadline Web Service as
supplied via the environment variable AVALON_DEADLINE
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["renderlayer"]
def process(self, instance):
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
context = instance.context
workspace = context.data["workspaceDir"]
filepath = context.data["currentFile"]
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
scene = os.path.splitext(filename)[0]
dirname = os.path.join(workspace, "renders")
renderlayer = instance.data['setMembers'] # rs_beauty
renderlayer_name = instance.data['subset'] # beauty
renderlayer_globals = instance.data["renderGlobals"]
legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
# Get the variables depending on the renderer
render_variables = get_renderer_variables(renderlayer)
output_filename_0 = preview_fname(folder=dirname,
scene=scene,
layer=renderlayer_name,
padding=render_variables["padding"],
ext=render_variables["ext"])
try:
# Ensure render folder exists
os.makedirs(dirname)
except OSError:
pass
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
# Job name, as seen in Monitor
"Name": jobname,
# Arbitrary username, for visualisation in Monitor
"UserName": deadline_user,
"Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"),
"Frames": "{start}-{end}x{step}".format(
start=int(instance.data["startFrame"]),
end=int(instance.data["endFrame"]),
step=int(instance.data["byFrameStep"]),
),
"Comment": comment,
# Optional, enable double-click to preview rendered
# frames from Deadline Monitor
"OutputFilename0": output_filename_0.replace("\\", "/"),
},
"PluginInfo": {
# Input
"SceneFile": filepath,
# Output directory and filename
"OutputFilePath": dirname.replace("\\", "/"),
"OutputFilePrefix": render_variables["filename_prefix"],
# Mandatory for Deadline
"Version": cmds.about(version=True),
# Only render layers are considered renderable in this pipeline
"UsingRenderLayers": True,
# Use legacy Render Layer system
"UseLegacyRenderLayers": legacy_layers,
# Render only this layer
"RenderLayer": renderlayer,
# Determine which renderer to use from the file itself
"Renderer": instance.data["renderer"],
# Resolve relative references
"ProjectPath": workspace,
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Include critical environment variables with submission
keys = [
# This will trigger `userSetup.py` on the slave
# such that proper initialisation happens the same
# way as it does on a local machine.
# TODO(marcus): This won't work if the slaves don't
# have accesss to these paths, such as if slaves are
# running Linux and the submitter is on Windows.
"PYTHONPATH",
# todo: This is a temporary fix for yeti variables
"PEREGRINEL_LICENSE",
"REDSHIFT_MAYAEXTENSIONSPATH",
"REDSHIFT_DISABLEOUTPUTLOCKFILES"
"VRAY_FOR_MAYA2018_PLUGINS_X64",
"VRAY_PLUGINS_X64",
"VRAY_USE_THREAD_AFFINITY",
"MAYA_MODULE_PATH"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
PATHS = os.environ["PATH"].split(";")
environment["PATH"] = ";".join([p for p in PATHS
if p.startswith("P:")])
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
# Include optional render globals
render_globals = instance.data.get("renderGlobals", {})
payload["JobInfo"].update(render_globals)
plugin = payload["JobInfo"]["Plugin"]
self.log.info("using render plugin : {}".format(plugin))
self.preflight_check(instance)
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(AVALON_DEADLINE)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
instance.data["outputDir"] = os.path.dirname(output_filename_0)
instance.data["deadlineSubmissionJob"] = response.json()
def preflight_check(self, instance):
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
for key in ("startFrame", "endFrame", "byFrameStep"):
value = instance.data[key]
if int(value) == value:
continue
self.log.warning(
"%f=%d was rounded off to nearest integer"
% (value, int(value))
)

View file

@ -0,0 +1,27 @@
import pyblish.api
import avalon.api as api
from avalon.vendor import requests
class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
"""Validate Deadline Web Service is running"""
label = "Validate Deadline Web Service"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = ["renderlayer"]
def process(self, instance):
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE is not None, "Requires AVALON_DEADLINE"
# Check response
response = requests.get(AVALON_DEADLINE)
assert response.ok, "Response must be ok"
assert response.text.startswith("Deadline Web Service "), (
"Web service did not respond with 'Deadline Web Service'"
)