model family name instead of geo

This commit is contained in:
karimmozlia 2021-10-13 11:41:56 +02:00
parent a896a3a9e9
commit a6c1037de2
5 changed files with 334 additions and 41 deletions

View file

@ -3,17 +3,17 @@ from openpype.hosts.nuke.api import plugin
import nuke
class CreateGeo(plugin.PypeCreator):
"""Add Publishable Geometry"""
class CreateModel(plugin.PypeCreator):
"""Add Publishable Modelmetry"""
name = "geo"
label = "Create 3d Geo"
name = "model"
label = "Create 3d Model"
family = "model"
icon = "cube"
defaults = ["Main"]
def __init__(self, *args, **kwargs):
super(CreateGeo, self).__init__(*args, **kwargs)
super(CreateModel, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
self.node_color = "0xff3200ff"
return
@ -46,8 +46,8 @@ class CreateGeo(plugin.PypeCreator):
return
else:
# if selected is off then create one node
geo_node = nuke.createNode("Geo2")
geo_node["tile_color"].setValue(int(self.node_color, 16))
model_node = nuke.createNode("Model2")
model_node["tile_color"].setValue(int(self.node_color, 16))
# add avalon knobs
instance = anlib.set_avalon_knob_data(geo_node, self.data)
instance = anlib.set_avalon_knob_data(model_node, self.data)
return instance

View file

@ -4,16 +4,16 @@ from avalon.nuke import containerise, update_container
import nuke
class AlembicGeoLoader(api.Loader):
class AlembicModelLoader(api.Loader):
"""
This will load alembic geo into script.
This will load alembic model into script.
"""
families = ["geo"]
families = ["model"]
representations = ["abc"]
label = "Load Alembic Geo"
icon = "geo"
label = "Load Alembic Model"
icon = "model"
color = "orange"
node_color = "0xff3200ff"
@ -44,30 +44,30 @@ class AlembicGeoLoader(api.Loader):
file = self.fname.replace("\\", "/")
with anlib.maintained_selection():
geo_node = nuke.createNode(
"Geo2",
model_node = nuke.createNode(
"Model2",
"name {} file {} read_from_file True".format(
object_name, file),
inpanel=False
)
geo_node.forceValidate()
geo_node["frame_rate"].setValue(float(fps))
model_node.forceValidate()
model_node["frame_rate"].setValue(float(fps))
# workaround because nuke's bug is not adding
# animation keys properly
xpos = geo_node.xpos()
ypos = geo_node.ypos()
xpos = model_node.xpos()
ypos = model_node.ypos()
nuke.nodeCopy("%clipboard%")
nuke.delete(geo_node)
nuke.delete(model_node)
nuke.nodePaste("%clipboard%")
geo_node = nuke.toNode(object_name)
geo_node.setXYpos(xpos, ypos)
model_node = nuke.toNode(object_name)
model_node.setXYpos(xpos, ypos)
# color node by correct color by actual version
self.node_version_color(version, geo_node)
self.node_version_color(version, model_node)
return containerise(
node=geo_node,
node=model_node,
name=name,
namespace=namespace,
context=context,
@ -97,7 +97,7 @@ class AlembicGeoLoader(api.Loader):
})
object_name = container['objectName']
# get corresponding node
geo_node = nuke.toNode(object_name)
model_node = nuke.toNode(object_name)
# get main variables
version_data = version.get("data", {})
@ -123,42 +123,42 @@ class AlembicGeoLoader(api.Loader):
file = api.get_representation_path(representation).replace("\\", "/")
with anlib.maintained_selection():
geo_node = nuke.toNode(object_name)
geo_node['selected'].setValue(True)
model_node = nuke.toNode(object_name)
model_node['selected'].setValue(True)
# collect input output dependencies
dependencies = geo_node.dependencies()
dependent = geo_node.dependent()
dependencies = model_node.dependencies()
dependent = model_node.dependent()
geo_node["frame_rate"].setValue(float(fps))
geo_node["file"].setValue(file)
model_node["frame_rate"].setValue(float(fps))
model_node["file"].setValue(file)
# workaround because nuke's bug is
# not adding animation keys properly
xpos = geo_node.xpos()
ypos = geo_node.ypos()
xpos = model_node.xpos()
ypos = model_node.ypos()
nuke.nodeCopy("%clipboard%")
nuke.delete(geo_node)
nuke.delete(model_node)
nuke.nodePaste("%clipboard%")
geo_node = nuke.toNode(object_name)
geo_node.setXYpos(xpos, ypos)
model_node = nuke.toNode(object_name)
model_node.setXYpos(xpos, ypos)
# link to original input nodes
for i, input in enumerate(dependencies):
geo_node.setInput(i, input)
model_node.setInput(i, input)
# link to original output nodes
for d in dependent:
index = next((i for i, dpcy in enumerate(
d.dependencies())
if geo_node is dpcy), 0)
d.setInput(index, geo_node)
if model_node is dpcy), 0)
d.setInput(index, model_node)
# color node by correct color by actual version
self.node_version_color(version, geo_node)
self.node_version_color(version, model_node)
self.log.info("udated to version: {}".format(version.get("name")))
return update_container(geo_node, data_imprint)
return update_container(model_node, data_imprint)
def node_version_color(self, version, node):
""" Coloring a node by correct color by actual version

View file

@ -0,0 +1,50 @@
import pyblish.api
import nuke
@pyblish.api.log
class CollectModel(pyblish.api.InstancePlugin):
"""Collect Model (group) node instance and its content
"""
order = pyblish.api.CollectorOrder + 0.22
label = "Collect Model (Group)"
hosts = ["nuke"]
families = ["model"]
def process(self, instance):
grpn = instance[0]
# add family to familiess
instance.data["families"].insert(0, instance.data["family"])
# make label nicer
instance.data["label"] = "{0} ({1} nodes)".format(
grpn.name(), len(instance) - 1)
# Get frame range
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
# Add version data to instance
version_data = {
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"colorspace": nuke.root().knob('workingSpaceLUT').value(),
"families": [instance.data["family"]] + instance.data["families"],
"subset": instance.data["subset"],
"fps": instance.context.data["fps"]
}
instance.data.update({
"versionData": version_data,
"frameStart": first_frame,
"frameEnd": last_frame
})
self.log.info("Model content collected: `{}`".format(instance[:]))
self.log.info("Model instance collected: `{}`".format(instance))

View file

@ -0,0 +1,185 @@
import nuke
import os
import math
import pyblish.api
import openpype.api
from avalon.nuke import lib as anlib
from pprint import pformat
class ExtractModel(openpype.api.Extractor):
""" 3D model exctractor
"""
label = 'Exctract Model'
order = pyblish.api.ExtractorOrder
families = ["model"]
hosts = ["nuke"]
# presets
write_geo_knobs = [
("file_type", "abc"),
("storageFormat", "Ogawa"),
("writeGeometries", False),
("writePointClouds", False),
("writeAxes", False)
]
def process(self, instance):
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
step = 1
output_range = str(nuke.FrameRange(first_frame, last_frame, step))
self.log.info("instance.data: `{}`".format(
pformat(instance.data)))
rm_nodes = list()
self.log.info("Crating additional nodes")
subset = instance.data["subset"]
staging_dir = self.staging_dir(instance)
# get extension form preset
extension = next((k[1] for k in self.write_geo_knobs
if k[0] == "file_type"), None)
if not extension:
raise RuntimeError(
"Bad config for extension in presets. "
"Talk to your supervisor or pipeline admin")
# create file name and path
filename = subset + ".{}".format(extension)
file_path = os.path.join(staging_dir, filename).replace("\\", "/")
with anlib.maintained_selection():
# bake model with axeses onto word coordinate XYZ
rm_n = bakeModelWithAxeses(
nuke.toNode(instance.data["name"]), output_range)
rm_nodes.append(rm_n)
# create scene node
rm_n = nuke.createNode("Scene")
rm_nodes.append(rm_n)
# create write geo node
wg_n = nuke.createNode("WriteGeo")
wg_n["file"].setValue(file_path)
# add path to write to
for k, v in self.write_geo_knobs:
wg_n[k].setValue(v)
rm_nodes.append(wg_n)
# write out model
nuke.execute(
wg_n,
int(first_frame),
int(last_frame)
)
# erase additional nodes
for n in rm_nodes:
nuke.delete(n)
self.log.info(file_path)
# create representation data
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': extension,
'ext': extension,
'files': filename,
"stagingDir": staging_dir,
"frameStart": first_frame,
"frameEnd": last_frame
}
instance.data["representations"].append(representation)
instance.data.update({
"path": file_path,
"outputDir": staging_dir,
"ext": extension,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, file_path))
def bakeModelWithAxeses(model_node, output_range):
""" Baking all perent hiearchy of axeses into model
with transposition onto word XYZ coordinance
"""
bakeFocal = False
bakeHaperture = False
bakeVaperture = False
model_matrix = model_node['world_matrix']
new_cam_n = nuke.createNode("Model2")
new_cam_n.setInput(0, None)
new_cam_n['rotate'].setAnimated()
new_cam_n['translate'].setAnimated()
old_focal = model_node['focal']
if old_focal.isAnimated() and not (old_focal.animation(0).constant()):
new_cam_n['focal'].setAnimated()
bakeFocal = True
else:
new_cam_n['focal'].setValue(old_focal.value())
old_haperture = model_node['haperture']
if old_haperture.isAnimated() and not (
old_haperture.animation(0).constant()):
new_cam_n['haperture'].setAnimated()
bakeHaperture = True
else:
new_cam_n['haperture'].setValue(old_haperture.value())
old_vaperture = model_node['vaperture']
if old_vaperture.isAnimated() and not (
old_vaperture.animation(0).constant()):
new_cam_n['vaperture'].setAnimated()
bakeVaperture = True
else:
new_cam_n['vaperture'].setValue(old_vaperture.value())
new_cam_n['win_translate'].setValue(model_node['win_translate'].value())
new_cam_n['win_scale'].setValue(model_node['win_scale'].value())
for x in nuke.FrameRange(output_range):
math_matrix = nuke.math.Matrix4()
for y in range(model_matrix.height()):
for z in range(model_matrix.width()):
matrix_pointer = z + (y * model_matrix.width())
math_matrix[matrix_pointer] = model_matrix.getValueAt(
x, (y + (z * model_matrix.width())))
rot_matrix = nuke.math.Matrix4(math_matrix)
rot_matrix.rotationOnly()
rot = rot_matrix.rotationsZXY()
new_cam_n['rotate'].setValueAt(math.degrees(rot[0]), x, 0)
new_cam_n['rotate'].setValueAt(math.degrees(rot[1]), x, 1)
new_cam_n['rotate'].setValueAt(math.degrees(rot[2]), x, 2)
new_cam_n['translate'].setValueAt(
model_matrix.getValueAt(x, 3), x, 0)
new_cam_n['translate'].setValueAt(
model_matrix.getValueAt(x, 7), x, 1)
new_cam_n['translate'].setValueAt(
model_matrix.getValueAt(x, 11), x, 2)
if bakeFocal:
new_cam_n['focal'].setValueAt(old_focal.getValueAt(x), x)
if bakeHaperture:
new_cam_n['haperture'].setValueAt(old_haperture.getValueAt(x), x)
if bakeVaperture:
new_cam_n['vaperture'].setValueAt(old_vaperture.getValueAt(x), x)
return new_cam_n

View file

@ -0,0 +1,58 @@
import pyblish
from avalon.nuke import lib as anlib
import nuke
class OpenFailedGroupNode(pyblish.api.Action):
"""
Centering failed instance node in node grap
"""
label = "Open Model in Node Graph"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
# maintain selection
with anlib.maintained_selection():
# collect all failed nodes xpos and ypos
for instance in instances:
grpn = instance[0]
nuke.showDag(grpn)
@pyblish.api.log
class ValidateModel(pyblish.api.InstancePlugin):
"""Validate amount of output nodes in model (group) node"""
order = pyblish.api.ValidatorOrder
optional = True
families = ["model"]
label = "Validate Model (Group)"
hosts = ["nuke"]
actions = [OpenFailedGroupNode]
def process(self, instance):
grpn = instance[0]
with grpn:
connections_out = nuke.allNodes('Output')
msg_multiple_outputs = "Only one outcoming connection from "
"\"{}\" is allowed".format(instance.data["name"])
assert len(connections_out) <= 1, msg_multiple_outputs
connections_in = nuke.allNodes('Input')
msg_missing_inputs = "At least one Input node has to be used in: "
"\"{}\"".format(instance.data["name"])
assert len(connections_in) >= 1, msg_missing_inputs